Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ 6e0d8677

History | View | Annotate | Download (221.7 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
62
static TCGv cpu_T3;
63
/* local register indexes (only used inside old micro ops) */
64
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65
static TCGv cpu_tmp5, cpu_tmp6;
66

    
67
#ifdef TARGET_X86_64
68
static int x86_64_hregs;
69
#endif
70

    
71
typedef struct DisasContext {
72
    /* current insn context */
73
    int override; /* -1 if no override */
74
    int prefix;
75
    int aflag, dflag;
76
    target_ulong pc; /* pc = eip + cs_base */
77
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78
                   static state change (stop translation) */
79
    /* current block context */
80
    target_ulong cs_base; /* base of CS segment */
81
    int pe;     /* protected mode */
82
    int code32; /* 32 bit code segment */
83
#ifdef TARGET_X86_64
84
    int lma;    /* long mode active */
85
    int code64; /* 64 bit code segment */
86
    int rex_x, rex_b;
87
#endif
88
    int ss32;   /* 32 bit stack segment */
89
    int cc_op;  /* current CC operation */
90
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
91
    int f_st;   /* currently unused */
92
    int vm86;   /* vm86 mode */
93
    int cpl;
94
    int iopl;
95
    int tf;     /* TF cpu flag */
96
    int singlestep_enabled; /* "hardware" single step enabled */
97
    int jmp_opt; /* use direct block chaining for direct jumps */
98
    int mem_index; /* select memory access functions */
99
    uint64_t flags; /* all execution flags */
100
    struct TranslationBlock *tb;
101
    int popl_esp_hack; /* for correct popl with esp base handling */
102
    int rip_offset; /* only used in x86_64, but left for simplicity */
103
    int cpuid_features;
104
    int cpuid_ext_features;
105
    int cpuid_ext2_features;
106
} DisasContext;
107

    
108
static void gen_eob(DisasContext *s);
109
static void gen_jmp(DisasContext *s, target_ulong eip);
110
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
111

    
112
/* i386 arith/logic operations */
113
enum {
114
    OP_ADDL,
115
    OP_ORL,
116
    OP_ADCL,
117
    OP_SBBL,
118
    OP_ANDL,
119
    OP_SUBL,
120
    OP_XORL,
121
    OP_CMPL,
122
};
123

    
124
/* i386 shift ops */
125
enum {
126
    OP_ROL,
127
    OP_ROR,
128
    OP_RCL,
129
    OP_RCR,
130
    OP_SHL,
131
    OP_SHR,
132
    OP_SHL1, /* undocumented */
133
    OP_SAR = 7,
134
};
135

    
136
/* operand size */
137
enum {
138
    OT_BYTE = 0,
139
    OT_WORD,
140
    OT_LONG,
141
    OT_QUAD,
142
};
143

    
144
enum {
145
    /* I386 int registers */
146
    OR_EAX,   /* MUST be even numbered */
147
    OR_ECX,
148
    OR_EDX,
149
    OR_EBX,
150
    OR_ESP,
151
    OR_EBP,
152
    OR_ESI,
153
    OR_EDI,
154

    
155
    OR_TMP0 = 16,    /* temporary operand register */
156
    OR_TMP1,
157
    OR_A0, /* temporary register used when doing address evaluation */
158
};
159

    
160
static inline void gen_op_movl_T0_0(void)
161
{
162
    tcg_gen_movi_tl(cpu_T[0], 0);
163
}
164

    
165
static inline void gen_op_movl_T0_im(int32_t val)
166
{
167
    tcg_gen_movi_tl(cpu_T[0], val);
168
}
169

    
170
static inline void gen_op_movl_T0_imu(uint32_t val)
171
{
172
    tcg_gen_movi_tl(cpu_T[0], val);
173
}
174

    
175
static inline void gen_op_movl_T1_im(int32_t val)
176
{
177
    tcg_gen_movi_tl(cpu_T[1], val);
178
}
179

    
180
static inline void gen_op_movl_T1_imu(uint32_t val)
181
{
182
    tcg_gen_movi_tl(cpu_T[1], val);
183
}
184

    
185
static inline void gen_op_movl_A0_im(uint32_t val)
186
{
187
    tcg_gen_movi_tl(cpu_A0, val);
188
}
189

    
190
#ifdef TARGET_X86_64
191
static inline void gen_op_movq_A0_im(int64_t val)
192
{
193
    tcg_gen_movi_tl(cpu_A0, val);
194
}
195
#endif
196

    
197
static inline void gen_movtl_T0_im(target_ulong val)
198
{
199
    tcg_gen_movi_tl(cpu_T[0], val);
200
}
201

    
202
static inline void gen_movtl_T1_im(target_ulong val)
203
{
204
    tcg_gen_movi_tl(cpu_T[1], val);
205
}
206

    
207
static inline void gen_op_andl_T0_ffff(void)
208
{
209
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210
}
211

    
212
static inline void gen_op_andl_T0_im(uint32_t val)
213
{
214
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215
}
216

    
217
static inline void gen_op_movl_T0_T1(void)
218
{
219
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220
}
221

    
222
static inline void gen_op_andl_A0_ffff(void)
223
{
224
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
225
}
226

    
227
#ifdef TARGET_X86_64
228

    
229
#define NB_OP_SIZES 4
230

    
231
#define DEF_REGS(prefix, suffix) \
232
  prefix ## EAX ## suffix,\
233
  prefix ## ECX ## suffix,\
234
  prefix ## EDX ## suffix,\
235
  prefix ## EBX ## suffix,\
236
  prefix ## ESP ## suffix,\
237
  prefix ## EBP ## suffix,\
238
  prefix ## ESI ## suffix,\
239
  prefix ## EDI ## suffix,\
240
  prefix ## R8 ## suffix,\
241
  prefix ## R9 ## suffix,\
242
  prefix ## R10 ## suffix,\
243
  prefix ## R11 ## suffix,\
244
  prefix ## R12 ## suffix,\
245
  prefix ## R13 ## suffix,\
246
  prefix ## R14 ## suffix,\
247
  prefix ## R15 ## suffix,
248

    
249
#else /* !TARGET_X86_64 */
250

    
251
#define NB_OP_SIZES 3
252

    
253
#define DEF_REGS(prefix, suffix) \
254
  prefix ## EAX ## suffix,\
255
  prefix ## ECX ## suffix,\
256
  prefix ## EDX ## suffix,\
257
  prefix ## EBX ## suffix,\
258
  prefix ## ESP ## suffix,\
259
  prefix ## EBP ## suffix,\
260
  prefix ## ESI ## suffix,\
261
  prefix ## EDI ## suffix,
262

    
263
#endif /* !TARGET_X86_64 */
264

    
265
#if defined(WORDS_BIGENDIAN)
266
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
267
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
268
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
269
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
270
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
271
#else
272
#define REG_B_OFFSET 0
273
#define REG_H_OFFSET 1
274
#define REG_W_OFFSET 0
275
#define REG_L_OFFSET 0
276
#define REG_LH_OFFSET 4
277
#endif
278

    
279
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
280
{
281
    switch(ot) {
282
    case OT_BYTE:
283
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
285
        } else {
286
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
287
        }
288
        break;
289
    case OT_WORD:
290
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
291
        break;
292
#ifdef TARGET_X86_64
293
    case OT_LONG:
294
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295
        /* high part of register set to zero */
296
        tcg_gen_movi_tl(cpu_tmp0, 0);
297
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
298
        break;
299
    default:
300
    case OT_QUAD:
301
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
302
        break;
303
#else
304
    default:
305
    case OT_LONG:
306
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
307
        break;
308
#endif
309
    }
310
}
311

    
312
static inline void gen_op_mov_reg_T0(int ot, int reg)
313
{
314
    gen_op_mov_reg_TN(ot, 0, reg);
315
}
316

    
317
static inline void gen_op_mov_reg_T1(int ot, int reg)
318
{
319
    gen_op_mov_reg_TN(ot, 1, reg);
320
}
321

    
322
static inline void gen_op_mov_reg_A0(int size, int reg)
323
{
324
    switch(size) {
325
    case 0:
326
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
327
        break;
328
#ifdef TARGET_X86_64
329
    case 1:
330
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331
        /* high part of register set to zero */
332
        tcg_gen_movi_tl(cpu_tmp0, 0);
333
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
334
        break;
335
    default:
336
    case 2:
337
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
338
        break;
339
#else
340
    default:
341
    case 1:
342
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
343
        break;
344
#endif
345
    }
346
}
347

    
348
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
349
{
350
    switch(ot) {
351
    case OT_BYTE:
352
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353
            goto std_case;
354
        } else {
355
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
356
        }
357
        break;
358
    default:
359
    std_case:
360
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
361
        break;
362
    }
363
}
364

    
365
static inline void gen_op_movl_A0_reg(int reg)
366
{
367
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368
}
369

    
370
static inline void gen_op_addl_A0_im(int32_t val)
371
{
372
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
373
#ifdef TARGET_X86_64
374
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
375
#endif
376
}
377

    
378
#ifdef TARGET_X86_64
379
static inline void gen_op_addq_A0_im(int64_t val)
380
{
381
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382
}
383
#endif
384
    
385
static void gen_add_A0_im(DisasContext *s, int val)
386
{
387
#ifdef TARGET_X86_64
388
    if (CODE64(s))
389
        gen_op_addq_A0_im(val);
390
    else
391
#endif
392
        gen_op_addl_A0_im(val);
393
}
394

    
395
static inline void gen_op_addl_T0_T1(void)
396
{
397
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398
}
399

    
400
static inline void gen_op_jmp_T0(void)
401
{
402
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403
}
404

    
405
static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
406
{
407
    switch(size) {
408
    case 0:
409
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
410
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
411
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
412
        break;
413
    case 1:
414
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
416
#ifdef TARGET_X86_64
417
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
418
#endif
419
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
420
        break;
421
#ifdef TARGET_X86_64
422
    case 2:
423
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
424
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
426
        break;
427
#endif
428
    }
429
}
430

    
431
static inline void gen_op_add_reg_T0(int size, int reg)
432
{
433
    switch(size) {
434
    case 0:
435
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
436
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
437
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
438
        break;
439
    case 1:
440
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
442
#ifdef TARGET_X86_64
443
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
444
#endif
445
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
446
        break;
447
#ifdef TARGET_X86_64
448
    case 2:
449
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
450
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
451
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
452
        break;
453
#endif
454
    }
455
}
456

    
457
static inline void gen_op_set_cc_op(int32_t val)
458
{
459
    tcg_gen_movi_i32(cpu_cc_op, val);
460
}
461

    
462
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
463
{
464
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
465
    if (shift != 0) 
466
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
467
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
468
#ifdef TARGET_X86_64
469
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
470
#endif
471
}
472

    
473
static inline void gen_op_movl_A0_seg(int reg)
474
{
475
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
476
}
477

    
478
static inline void gen_op_addl_A0_seg(int reg)
479
{
480
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
481
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
482
#ifdef TARGET_X86_64
483
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
484
#endif
485
}
486

    
487
#ifdef TARGET_X86_64
488
static inline void gen_op_movq_A0_seg(int reg)
489
{
490
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
491
}
492

    
493
static inline void gen_op_addq_A0_seg(int reg)
494
{
495
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
496
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
497
}
498

    
499
static inline void gen_op_movq_A0_reg(int reg)
500
{
501
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
502
}
503

    
504
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
505
{
506
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
507
    if (shift != 0) 
508
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
509
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
510
}
511
#endif
512

    
513
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
514
    [0] = {
515
        DEF_REGS(gen_op_cmovw_, _T1_T0)
516
    },
517
    [1] = {
518
        DEF_REGS(gen_op_cmovl_, _T1_T0)
519
    },
520
#ifdef TARGET_X86_64
521
    [2] = {
522
        DEF_REGS(gen_op_cmovq_, _T1_T0)
523
    },
524
#endif
525
};
526

    
527
static inline void gen_op_lds_T0_A0(int idx)
528
{
529
    int mem_index = (idx >> 2) - 1;
530
    switch(idx & 3) {
531
    case 0:
532
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
533
        break;
534
    case 1:
535
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
536
        break;
537
    default:
538
    case 2:
539
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
540
        break;
541
    }
542
}
543

    
544
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
545
static inline void gen_op_ld_T0_A0(int idx)
546
{
547
    int mem_index = (idx >> 2) - 1;
548
    switch(idx & 3) {
549
    case 0:
550
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
551
        break;
552
    case 1:
553
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
554
        break;
555
    case 2:
556
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
557
        break;
558
    default:
559
    case 3:
560
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
561
        break;
562
    }
563
}
564

    
565
static inline void gen_op_ldu_T0_A0(int idx)
566
{
567
    gen_op_ld_T0_A0(idx);
568
}
569

    
570
static inline void gen_op_ld_T1_A0(int idx)
571
{
572
    int mem_index = (idx >> 2) - 1;
573
    switch(idx & 3) {
574
    case 0:
575
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
576
        break;
577
    case 1:
578
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
579
        break;
580
    case 2:
581
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
582
        break;
583
    default:
584
    case 3:
585
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
586
        break;
587
    }
588
}
589

    
590
static inline void gen_op_st_T0_A0(int idx)
591
{
592
    int mem_index = (idx >> 2) - 1;
593
    switch(idx & 3) {
594
    case 0:
595
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
596
        break;
597
    case 1:
598
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
599
        break;
600
    case 2:
601
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
602
        break;
603
    default:
604
    case 3:
605
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
606
        break;
607
    }
608
}
609

    
610
static inline void gen_op_st_T1_A0(int idx)
611
{
612
    int mem_index = (idx >> 2) - 1;
613
    switch(idx & 3) {
614
    case 0:
615
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
616
        break;
617
    case 1:
618
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
619
        break;
620
    case 2:
621
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
622
        break;
623
    default:
624
    case 3:
625
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
626
        break;
627
    }
628
}
629

    
630
static inline void gen_jmp_im(target_ulong pc)
631
{
632
    tcg_gen_movi_tl(cpu_tmp0, pc);
633
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
634
}
635

    
636
static inline void gen_string_movl_A0_ESI(DisasContext *s)
637
{
638
    int override;
639

    
640
    override = s->override;
641
#ifdef TARGET_X86_64
642
    if (s->aflag == 2) {
643
        if (override >= 0) {
644
            gen_op_movq_A0_seg(override);
645
            gen_op_addq_A0_reg_sN(0, R_ESI);
646
        } else {
647
            gen_op_movq_A0_reg(R_ESI);
648
        }
649
    } else
650
#endif
651
    if (s->aflag) {
652
        /* 32 bit address */
653
        if (s->addseg && override < 0)
654
            override = R_DS;
655
        if (override >= 0) {
656
            gen_op_movl_A0_seg(override);
657
            gen_op_addl_A0_reg_sN(0, R_ESI);
658
        } else {
659
            gen_op_movl_A0_reg(R_ESI);
660
        }
661
    } else {
662
        /* 16 address, always override */
663
        if (override < 0)
664
            override = R_DS;
665
        gen_op_movl_A0_reg(R_ESI);
666
        gen_op_andl_A0_ffff();
667
        gen_op_addl_A0_seg(override);
668
    }
669
}
670

    
671
static inline void gen_string_movl_A0_EDI(DisasContext *s)
672
{
673
#ifdef TARGET_X86_64
674
    if (s->aflag == 2) {
675
        gen_op_movq_A0_reg(R_EDI);
676
    } else
677
#endif
678
    if (s->aflag) {
679
        if (s->addseg) {
680
            gen_op_movl_A0_seg(R_ES);
681
            gen_op_addl_A0_reg_sN(0, R_EDI);
682
        } else {
683
            gen_op_movl_A0_reg(R_EDI);
684
        }
685
    } else {
686
        gen_op_movl_A0_reg(R_EDI);
687
        gen_op_andl_A0_ffff();
688
        gen_op_addl_A0_seg(R_ES);
689
    }
690
}
691

    
692
static inline void gen_op_movl_T0_Dshift(int ot) 
693
{
694
    tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
695
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
696
};
697

    
698
static void gen_extu(int ot, TCGv reg)
699
{
700
    switch(ot) {
701
    case OT_BYTE:
702
        tcg_gen_ext8u_tl(reg, reg);
703
        break;
704
    case OT_WORD:
705
        tcg_gen_ext16u_tl(reg, reg);
706
        break;
707
    case OT_LONG:
708
        tcg_gen_ext32u_tl(reg, reg);
709
        break;
710
    default:
711
        break;
712
    }
713
}
714

    
715
static void gen_exts(int ot, TCGv reg)
716
{
717
    switch(ot) {
718
    case OT_BYTE:
719
        tcg_gen_ext8s_tl(reg, reg);
720
        break;
721
    case OT_WORD:
722
        tcg_gen_ext16s_tl(reg, reg);
723
        break;
724
    case OT_LONG:
725
        tcg_gen_ext32s_tl(reg, reg);
726
        break;
727
    default:
728
        break;
729
    }
730
}
731

    
732
static inline void gen_op_jnz_ecx(int size, int label1)
733
{
734
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
735
    gen_extu(size + 1, cpu_tmp0);
736
    tcg_gen_brcond_tl(TCG_COND_NE, cpu_tmp0, tcg_const_tl(0), label1);
737
}
738

    
739
static inline void gen_op_jz_ecx(int size, int label1)
740
{
741
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
742
    gen_extu(size + 1, cpu_tmp0);
743
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
744
}
745

    
746
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
747
    {
748
        gen_op_jnz_subb,
749
        gen_op_jnz_subw,
750
        gen_op_jnz_subl,
751
        X86_64_ONLY(gen_op_jnz_subq),
752
    },
753
    {
754
        gen_op_jz_subb,
755
        gen_op_jz_subw,
756
        gen_op_jz_subl,
757
        X86_64_ONLY(gen_op_jz_subq),
758
    },
759
};
760

    
761
static void *helper_in_func[3] = {
762
    helper_inb,
763
    helper_inw,
764
    helper_inl,
765
};
766

    
767
static void *helper_out_func[3] = {
768
    helper_outb,
769
    helper_outw,
770
    helper_outl,
771
};
772

    
773
static void *gen_check_io_func[3] = {
774
    helper_check_iob,
775
    helper_check_iow,
776
    helper_check_iol,
777
};
778

    
779
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
780
                         uint32_t svm_flags)
781
{
782
    int state_saved;
783
    target_ulong next_eip;
784

    
785
    state_saved = 0;
786
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
787
        if (s->cc_op != CC_OP_DYNAMIC)
788
            gen_op_set_cc_op(s->cc_op);
789
        gen_jmp_im(cur_eip);
790
        state_saved = 1;
791
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
792
        tcg_gen_helper_0_1(gen_check_io_func[ot],
793
                           cpu_tmp2_i32);
794
    }
795
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
796
        if (!state_saved) {
797
            if (s->cc_op != CC_OP_DYNAMIC)
798
                gen_op_set_cc_op(s->cc_op);
799
            gen_jmp_im(cur_eip);
800
            state_saved = 1;
801
        }
802
        svm_flags |= (1 << (4 + ot));
803
        next_eip = s->pc - s->cs_base;
804
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
805
        tcg_gen_helper_0_3(helper_svm_check_io,
806
                           cpu_tmp2_i32,
807
                           tcg_const_i32(svm_flags),
808
                           tcg_const_i32(next_eip - cur_eip));
809
    }
810
}
811

    
812
static inline void gen_movs(DisasContext *s, int ot)
813
{
814
    gen_string_movl_A0_ESI(s);
815
    gen_op_ld_T0_A0(ot + s->mem_index);
816
    gen_string_movl_A0_EDI(s);
817
    gen_op_st_T0_A0(ot + s->mem_index);
818
    gen_op_movl_T0_Dshift(ot);
819
    gen_op_add_reg_T0(s->aflag, R_ESI);
820
    gen_op_add_reg_T0(s->aflag, R_EDI);
821
}
822

    
823
static inline void gen_update_cc_op(DisasContext *s)
824
{
825
    if (s->cc_op != CC_OP_DYNAMIC) {
826
        gen_op_set_cc_op(s->cc_op);
827
        s->cc_op = CC_OP_DYNAMIC;
828
    }
829
}
830

    
831
static void gen_op_update1_cc(void)
832
{
833
    tcg_gen_discard_tl(cpu_cc_src);
834
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
835
}
836

    
837
static void gen_op_update2_cc(void)
838
{
839
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
840
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
841
}
842

    
843
static inline void gen_op_cmpl_T0_T1_cc(void)
844
{
845
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
846
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
847
}
848

    
849
static inline void gen_op_testl_T0_T1_cc(void)
850
{
851
    tcg_gen_discard_tl(cpu_cc_src);
852
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
853
}
854

    
855
static void gen_op_update_neg_cc(void)
856
{
857
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
858
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
859
}
860

    
861
/* XXX: does not work with gdbstub "ice" single step - not a
862
   serious problem */
863
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
864
{
865
    int l1, l2;
866

    
867
    l1 = gen_new_label();
868
    l2 = gen_new_label();
869
    gen_op_jnz_ecx(s->aflag, l1);
870
    gen_set_label(l2);
871
    gen_jmp_tb(s, next_eip, 1);
872
    gen_set_label(l1);
873
    return l2;
874
}
875

    
876
static inline void gen_stos(DisasContext *s, int ot)
877
{
878
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
879
    gen_string_movl_A0_EDI(s);
880
    gen_op_st_T0_A0(ot + s->mem_index);
881
    gen_op_movl_T0_Dshift(ot);
882
    gen_op_add_reg_T0(s->aflag, R_EDI);
883
}
884

    
885
static inline void gen_lods(DisasContext *s, int ot)
886
{
887
    gen_string_movl_A0_ESI(s);
888
    gen_op_ld_T0_A0(ot + s->mem_index);
889
    gen_op_mov_reg_T0(ot, R_EAX);
890
    gen_op_movl_T0_Dshift(ot);
891
    gen_op_add_reg_T0(s->aflag, R_ESI);
892
}
893

    
894
static inline void gen_scas(DisasContext *s, int ot)
895
{
896
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
897
    gen_string_movl_A0_EDI(s);
898
    gen_op_ld_T1_A0(ot + s->mem_index);
899
    gen_op_cmpl_T0_T1_cc();
900
    gen_op_movl_T0_Dshift(ot);
901
    gen_op_add_reg_T0(s->aflag, R_EDI);
902
}
903

    
904
static inline void gen_cmps(DisasContext *s, int ot)
905
{
906
    gen_string_movl_A0_ESI(s);
907
    gen_op_ld_T0_A0(ot + s->mem_index);
908
    gen_string_movl_A0_EDI(s);
909
    gen_op_ld_T1_A0(ot + s->mem_index);
910
    gen_op_cmpl_T0_T1_cc();
911
    gen_op_movl_T0_Dshift(ot);
912
    gen_op_add_reg_T0(s->aflag, R_ESI);
913
    gen_op_add_reg_T0(s->aflag, R_EDI);
914
}
915

    
916
static inline void gen_ins(DisasContext *s, int ot)
917
{
918
    gen_string_movl_A0_EDI(s);
919
    /* Note: we must do this dummy write first to be restartable in
920
       case of page fault. */
921
    gen_op_movl_T0_0();
922
    gen_op_st_T0_A0(ot + s->mem_index);
923
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
924
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
925
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
926
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
927
    gen_op_st_T0_A0(ot + s->mem_index);
928
    gen_op_movl_T0_Dshift(ot);
929
    gen_op_add_reg_T0(s->aflag, R_EDI);
930
}
931

    
932
static inline void gen_outs(DisasContext *s, int ot)
933
{
934
    gen_string_movl_A0_ESI(s);
935
    gen_op_ld_T0_A0(ot + s->mem_index);
936

    
937
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
938
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
939
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
940
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
941
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
942

    
943
    gen_op_movl_T0_Dshift(ot);
944
    gen_op_add_reg_T0(s->aflag, R_ESI);
945
}
946

    
947
/* same method as Valgrind : we generate jumps to current or next
948
   instruction */
949
#define GEN_REPZ(op)                                                          \
950
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
951
                                 target_ulong cur_eip, target_ulong next_eip) \
952
{                                                                             \
953
    int l2;\
954
    gen_update_cc_op(s);                                                      \
955
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
956
    gen_ ## op(s, ot);                                                        \
957
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
958
    /* a loop would cause two single step exceptions if ECX = 1               \
959
       before rep string_insn */                                              \
960
    if (!s->jmp_opt)                                                          \
961
        gen_op_jz_ecx(s->aflag, l2);                                          \
962
    gen_jmp(s, cur_eip);                                                      \
963
}
964

    
965
#define GEN_REPZ2(op)                                                         \
966
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
967
                                   target_ulong cur_eip,                      \
968
                                   target_ulong next_eip,                     \
969
                                   int nz)                                    \
970
{                                                                             \
971
    int l2;\
972
    gen_update_cc_op(s);                                                      \
973
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
974
    gen_ ## op(s, ot);                                                        \
975
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
976
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
977
    gen_op_string_jnz_sub[nz][ot](l2);\
978
    if (!s->jmp_opt)                                                          \
979
        gen_op_jz_ecx(s->aflag, l2);                                          \
980
    gen_jmp(s, cur_eip);                                                      \
981
}
982

    
983
GEN_REPZ(movs)
984
GEN_REPZ(stos)
985
GEN_REPZ(lods)
986
GEN_REPZ(ins)
987
GEN_REPZ(outs)
988
GEN_REPZ2(scas)
989
GEN_REPZ2(cmps)
990

    
991
enum {
992
    JCC_O,
993
    JCC_B,
994
    JCC_Z,
995
    JCC_BE,
996
    JCC_S,
997
    JCC_P,
998
    JCC_L,
999
    JCC_LE,
1000
};
1001

    
1002
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1003
    [OT_BYTE] = {
1004
        NULL,
1005
        gen_op_jb_subb,
1006
        gen_op_jz_subb,
1007
        gen_op_jbe_subb,
1008
        gen_op_js_subb,
1009
        NULL,
1010
        gen_op_jl_subb,
1011
        gen_op_jle_subb,
1012
    },
1013
    [OT_WORD] = {
1014
        NULL,
1015
        gen_op_jb_subw,
1016
        gen_op_jz_subw,
1017
        gen_op_jbe_subw,
1018
        gen_op_js_subw,
1019
        NULL,
1020
        gen_op_jl_subw,
1021
        gen_op_jle_subw,
1022
    },
1023
    [OT_LONG] = {
1024
        NULL,
1025
        gen_op_jb_subl,
1026
        gen_op_jz_subl,
1027
        gen_op_jbe_subl,
1028
        gen_op_js_subl,
1029
        NULL,
1030
        gen_op_jl_subl,
1031
        gen_op_jle_subl,
1032
    },
1033
#ifdef TARGET_X86_64
1034
    [OT_QUAD] = {
1035
        NULL,
1036
        BUGGY_64(gen_op_jb_subq),
1037
        gen_op_jz_subq,
1038
        BUGGY_64(gen_op_jbe_subq),
1039
        gen_op_js_subq,
1040
        NULL,
1041
        BUGGY_64(gen_op_jl_subq),
1042
        BUGGY_64(gen_op_jle_subq),
1043
    },
1044
#endif
1045
};
1046

    
1047
static GenOpFunc *gen_setcc_slow[8] = {
1048
    gen_op_seto_T0_cc,
1049
    gen_op_setb_T0_cc,
1050
    gen_op_setz_T0_cc,
1051
    gen_op_setbe_T0_cc,
1052
    gen_op_sets_T0_cc,
1053
    gen_op_setp_T0_cc,
1054
    gen_op_setl_T0_cc,
1055
    gen_op_setle_T0_cc,
1056
};
1057

    
1058
static GenOpFunc *gen_setcc_sub[4][8] = {
1059
    [OT_BYTE] = {
1060
        NULL,
1061
        gen_op_setb_T0_subb,
1062
        gen_op_setz_T0_subb,
1063
        gen_op_setbe_T0_subb,
1064
        gen_op_sets_T0_subb,
1065
        NULL,
1066
        gen_op_setl_T0_subb,
1067
        gen_op_setle_T0_subb,
1068
    },
1069
    [OT_WORD] = {
1070
        NULL,
1071
        gen_op_setb_T0_subw,
1072
        gen_op_setz_T0_subw,
1073
        gen_op_setbe_T0_subw,
1074
        gen_op_sets_T0_subw,
1075
        NULL,
1076
        gen_op_setl_T0_subw,
1077
        gen_op_setle_T0_subw,
1078
    },
1079
    [OT_LONG] = {
1080
        NULL,
1081
        gen_op_setb_T0_subl,
1082
        gen_op_setz_T0_subl,
1083
        gen_op_setbe_T0_subl,
1084
        gen_op_sets_T0_subl,
1085
        NULL,
1086
        gen_op_setl_T0_subl,
1087
        gen_op_setle_T0_subl,
1088
    },
1089
#ifdef TARGET_X86_64
1090
    [OT_QUAD] = {
1091
        NULL,
1092
        gen_op_setb_T0_subq,
1093
        gen_op_setz_T0_subq,
1094
        gen_op_setbe_T0_subq,
1095
        gen_op_sets_T0_subq,
1096
        NULL,
1097
        gen_op_setl_T0_subq,
1098
        gen_op_setle_T0_subq,
1099
    },
1100
#endif
1101
};
1102

    
1103
static void *helper_fp_arith_ST0_FT0[8] = {
1104
    helper_fadd_ST0_FT0,
1105
    helper_fmul_ST0_FT0,
1106
    helper_fcom_ST0_FT0,
1107
    helper_fcom_ST0_FT0,
1108
    helper_fsub_ST0_FT0,
1109
    helper_fsubr_ST0_FT0,
1110
    helper_fdiv_ST0_FT0,
1111
    helper_fdivr_ST0_FT0,
1112
};
1113

    
1114
/* NOTE the exception in "r" op ordering */
1115
static void *helper_fp_arith_STN_ST0[8] = {
1116
    helper_fadd_STN_ST0,
1117
    helper_fmul_STN_ST0,
1118
    NULL,
1119
    NULL,
1120
    helper_fsubr_STN_ST0,
1121
    helper_fsub_STN_ST0,
1122
    helper_fdivr_STN_ST0,
1123
    helper_fdiv_STN_ST0,
1124
};
1125

    
1126
/* compute eflags.C to reg */
1127
static void gen_compute_eflags_c(TCGv reg)
1128
{
1129
#if TCG_TARGET_REG_BITS == 32
1130
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1131
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1132
                     (long)cc_table + offsetof(CCTable, compute_c));
1133
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1134
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1135
                 1, &cpu_tmp2_i32, 0, NULL);
1136
#else
1137
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1138
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1139
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1140
                     (long)cc_table + offsetof(CCTable, compute_c));
1141
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1142
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1143
                 1, &cpu_tmp2_i32, 0, NULL);
1144
#endif
1145
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1146
}
1147

    
1148
/* compute all eflags to cc_src */
1149
static void gen_compute_eflags(TCGv reg)
1150
{
1151
#if TCG_TARGET_REG_BITS == 32
1152
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1153
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1154
                     (long)cc_table + offsetof(CCTable, compute_all));
1155
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1156
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1157
                 1, &cpu_tmp2_i32, 0, NULL);
1158
#else
1159
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1160
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1161
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1162
                     (long)cc_table + offsetof(CCTable, compute_all));
1163
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1164
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1165
                 1, &cpu_tmp2_i32, 0, NULL);
1166
#endif
1167
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1168
}
1169

    
1170
/* if d == OR_TMP0, it means memory operand (address in A0) */
1171
static void gen_op(DisasContext *s1, int op, int ot, int d)
1172
{
1173
    if (d != OR_TMP0) {
1174
        gen_op_mov_TN_reg(ot, 0, d);
1175
    } else {
1176
        gen_op_ld_T0_A0(ot + s1->mem_index);
1177
    }
1178
    switch(op) {
1179
    case OP_ADCL:
1180
        if (s1->cc_op != CC_OP_DYNAMIC)
1181
            gen_op_set_cc_op(s1->cc_op);
1182
        gen_compute_eflags_c(cpu_tmp4);
1183
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1184
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1185
        if (d != OR_TMP0)
1186
            gen_op_mov_reg_T0(ot, d);
1187
        else
1188
            gen_op_st_T0_A0(ot + s1->mem_index);
1189
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1190
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1191
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1192
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1193
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1194
        s1->cc_op = CC_OP_DYNAMIC;
1195
        break;
1196
    case OP_SBBL:
1197
        if (s1->cc_op != CC_OP_DYNAMIC)
1198
            gen_op_set_cc_op(s1->cc_op);
1199
        gen_compute_eflags_c(cpu_tmp4);
1200
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1201
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1202
        if (d != OR_TMP0)
1203
            gen_op_mov_reg_T0(ot, d);
1204
        else
1205
            gen_op_st_T0_A0(ot + s1->mem_index);
1206
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1207
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1208
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1209
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1210
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1211
        s1->cc_op = CC_OP_DYNAMIC;
1212
        break;
1213
    case OP_ADDL:
1214
        gen_op_addl_T0_T1();
1215
        if (d != OR_TMP0)
1216
            gen_op_mov_reg_T0(ot, d);
1217
        else
1218
            gen_op_st_T0_A0(ot + s1->mem_index);
1219
        gen_op_update2_cc();
1220
        s1->cc_op = CC_OP_ADDB + ot;
1221
        break;
1222
    case OP_SUBL:
1223
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1224
        if (d != OR_TMP0)
1225
            gen_op_mov_reg_T0(ot, d);
1226
        else
1227
            gen_op_st_T0_A0(ot + s1->mem_index);
1228
        gen_op_update2_cc();
1229
        s1->cc_op = CC_OP_SUBB + ot;
1230
        break;
1231
    default:
1232
    case OP_ANDL:
1233
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1234
        if (d != OR_TMP0)
1235
            gen_op_mov_reg_T0(ot, d);
1236
        else
1237
            gen_op_st_T0_A0(ot + s1->mem_index);
1238
        gen_op_update1_cc();
1239
        s1->cc_op = CC_OP_LOGICB + ot;
1240
        break;
1241
    case OP_ORL:
1242
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1243
        if (d != OR_TMP0)
1244
            gen_op_mov_reg_T0(ot, d);
1245
        else
1246
            gen_op_st_T0_A0(ot + s1->mem_index);
1247
        gen_op_update1_cc();
1248
        s1->cc_op = CC_OP_LOGICB + ot;
1249
        break;
1250
    case OP_XORL:
1251
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1252
        if (d != OR_TMP0)
1253
            gen_op_mov_reg_T0(ot, d);
1254
        else
1255
            gen_op_st_T0_A0(ot + s1->mem_index);
1256
        gen_op_update1_cc();
1257
        s1->cc_op = CC_OP_LOGICB + ot;
1258
        break;
1259
    case OP_CMPL:
1260
        gen_op_cmpl_T0_T1_cc();
1261
        s1->cc_op = CC_OP_SUBB + ot;
1262
        break;
1263
    }
1264
}
1265

    
1266
/* if d == OR_TMP0, it means memory operand (address in A0) */
1267
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1268
{
1269
    if (d != OR_TMP0)
1270
        gen_op_mov_TN_reg(ot, 0, d);
1271
    else
1272
        gen_op_ld_T0_A0(ot + s1->mem_index);
1273
    if (s1->cc_op != CC_OP_DYNAMIC)
1274
        gen_op_set_cc_op(s1->cc_op);
1275
    if (c > 0) {
1276
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1277
        s1->cc_op = CC_OP_INCB + ot;
1278
    } else {
1279
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1280
        s1->cc_op = CC_OP_DECB + ot;
1281
    }
1282
    if (d != OR_TMP0)
1283
        gen_op_mov_reg_T0(ot, d);
1284
    else
1285
        gen_op_st_T0_A0(ot + s1->mem_index);
1286
    gen_compute_eflags_c(cpu_cc_src);
1287
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1288
}
1289

    
1290
/* XXX: add faster immediate case */
1291
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1292
                            int is_right, int is_arith)
1293
{
1294
    target_ulong mask;
1295
    int shift_label;
1296
    
1297
    if (ot == OT_QUAD)
1298
        mask = 0x3f;
1299
    else
1300
        mask = 0x1f;
1301

    
1302
    /* load */
1303
    if (op1 == OR_TMP0)
1304
        gen_op_ld_T0_A0(ot + s->mem_index);
1305
    else
1306
        gen_op_mov_TN_reg(ot, 0, op1);
1307

    
1308
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1309

    
1310
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1311

    
1312
    if (is_right) {
1313
        if (is_arith) {
1314
            gen_exts(ot, cpu_T[0]);
1315
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1316
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1317
        } else {
1318
            gen_extu(ot, cpu_T[0]);
1319
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1320
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1321
        }
1322
    } else {
1323
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1324
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1325
    }
1326

    
1327
    /* store */
1328
    if (op1 == OR_TMP0)
1329
        gen_op_st_T0_A0(ot + s->mem_index);
1330
    else
1331
        gen_op_mov_reg_T0(ot, op1);
1332
        
1333
    /* update eflags if non zero shift */
1334
    if (s->cc_op != CC_OP_DYNAMIC)
1335
        gen_op_set_cc_op(s->cc_op);
1336

    
1337
    shift_label = gen_new_label();
1338
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1339

    
1340
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1341
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1342
    if (is_right)
1343
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1344
    else
1345
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1346
        
1347
    gen_set_label(shift_label);
1348
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1349
}
1350

    
1351
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1352
{
1353
    if (arg2 >= 0)
1354
        tcg_gen_shli_tl(ret, arg1, arg2);
1355
    else
1356
        tcg_gen_shri_tl(ret, arg1, -arg2);
1357
}
1358

    
1359
/* XXX: add faster immediate case */
1360
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1361
                          int is_right)
1362
{
1363
    target_ulong mask;
1364
    int label1, label2, data_bits;
1365
    
1366
    if (ot == OT_QUAD)
1367
        mask = 0x3f;
1368
    else
1369
        mask = 0x1f;
1370

    
1371
    /* load */
1372
    if (op1 == OR_TMP0)
1373
        gen_op_ld_T0_A0(ot + s->mem_index);
1374
    else
1375
        gen_op_mov_TN_reg(ot, 0, op1);
1376

    
1377
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1378

    
1379
    /* Must test zero case to avoid using undefined behaviour in TCG
1380
       shifts. */
1381
    label1 = gen_new_label();
1382
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1383
    
1384
    if (ot <= OT_WORD)
1385
        tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1386
    else
1387
        tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1388
    
1389
    gen_extu(ot, cpu_T[0]);
1390
    tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1391

    
1392
    data_bits = 8 << ot;
1393
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1394
       fix TCG definition) */
1395
    if (is_right) {
1396
        tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1397
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1398
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1399
    } else {
1400
        tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1401
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1402
        tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1403
    }
1404
    tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1405

    
1406
    gen_set_label(label1);
1407
    /* store */
1408
    if (op1 == OR_TMP0)
1409
        gen_op_st_T0_A0(ot + s->mem_index);
1410
    else
1411
        gen_op_mov_reg_T0(ot, op1);
1412
    
1413
    /* update eflags */
1414
    if (s->cc_op != CC_OP_DYNAMIC)
1415
        gen_op_set_cc_op(s->cc_op);
1416

    
1417
    label2 = gen_new_label();
1418
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1419

    
1420
    gen_compute_eflags(cpu_cc_src);
1421
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1422
    tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1423
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1424
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1425
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1426
    if (is_right) {
1427
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1428
    }
1429
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1430
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1431
    
1432
    tcg_gen_discard_tl(cpu_cc_dst);
1433
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1434
        
1435
    gen_set_label(label2);
1436
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1437
}
1438

    
1439
static void *helper_rotc[8] = {
1440
    helper_rclb,
1441
    helper_rclw,
1442
    helper_rcll,
1443
    X86_64_ONLY(helper_rclq),
1444
    helper_rcrb,
1445
    helper_rcrw,
1446
    helper_rcrl,
1447
    X86_64_ONLY(helper_rcrq),
1448
};
1449

    
1450
/* XXX: add faster immediate = 1 case */
1451
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1452
                           int is_right)
1453
{
1454
    int label1;
1455

    
1456
    if (s->cc_op != CC_OP_DYNAMIC)
1457
        gen_op_set_cc_op(s->cc_op);
1458

    
1459
    /* load */
1460
    if (op1 == OR_TMP0)
1461
        gen_op_ld_T0_A0(ot + s->mem_index);
1462
    else
1463
        gen_op_mov_TN_reg(ot, 0, op1);
1464
    
1465
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1466
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1467
    /* store */
1468
    if (op1 == OR_TMP0)
1469
        gen_op_st_T0_A0(ot + s->mem_index);
1470
    else
1471
        gen_op_mov_reg_T0(ot, op1);
1472

    
1473
    /* update eflags */
1474
    label1 = gen_new_label();
1475
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1476

    
1477
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1478
    tcg_gen_discard_tl(cpu_cc_dst);
1479
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1480
        
1481
    gen_set_label(label1);
1482
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1483
}
1484

    
1485
/* XXX: add faster immediate case */
1486
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1487
                                int is_right)
1488
{
1489
    int label1, label2, data_bits;
1490
    target_ulong mask;
1491

    
1492
    if (ot == OT_QUAD)
1493
        mask = 0x3f;
1494
    else
1495
        mask = 0x1f;
1496

    
1497
    /* load */
1498
    if (op1 == OR_TMP0)
1499
        gen_op_ld_T0_A0(ot + s->mem_index);
1500
    else
1501
        gen_op_mov_TN_reg(ot, 0, op1);
1502

    
1503
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1504
    /* Must test zero case to avoid using undefined behaviour in TCG
1505
       shifts. */
1506
    label1 = gen_new_label();
1507
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1508
    
1509
    tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1510
    if (ot == OT_WORD) {
1511
        /* Note: we implement the Intel behaviour for shift count > 16 */
1512
        if (is_right) {
1513
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1514
            tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1515
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1516
            tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1517

    
1518
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1519
            
1520
            /* only needed if count > 16, but a test would complicate */
1521
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1522
            tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1523

    
1524
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1525

    
1526
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1527
        } else {
1528
            /* XXX: not optimal */
1529
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1530
            tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1531
            tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1532
            tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1533
            
1534
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1535
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1536
            tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1537
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1538

    
1539
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1540
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1541
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1542
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1543
        }
1544
    } else {
1545
        data_bits = 8 << ot;
1546
        if (is_right) {
1547
            if (ot == OT_LONG)
1548
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1549

    
1550
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1551

    
1552
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1553
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1554
            tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1555
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1556
            
1557
        } else {
1558
            if (ot == OT_LONG)
1559
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1560

    
1561
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1562
            
1563
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1564
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1565
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1566
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1567
        }
1568
    }
1569
    tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1570

    
1571
    gen_set_label(label1);
1572
    /* store */
1573
    if (op1 == OR_TMP0)
1574
        gen_op_st_T0_A0(ot + s->mem_index);
1575
    else
1576
        gen_op_mov_reg_T0(ot, op1);
1577
    
1578
    /* update eflags */
1579
    if (s->cc_op != CC_OP_DYNAMIC)
1580
        gen_op_set_cc_op(s->cc_op);
1581

    
1582
    label2 = gen_new_label();
1583
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1584

    
1585
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1586
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1587
    if (is_right) {
1588
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1589
    } else {
1590
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1591
    }
1592
    gen_set_label(label2);
1593
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1594
}
1595

    
1596
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1597
{
1598
    if (s != OR_TMP1)
1599
        gen_op_mov_TN_reg(ot, 1, s);
1600
    switch(op) {
1601
    case OP_ROL:
1602
        gen_rot_rm_T1(s1, ot, d, 0);
1603
        break;
1604
    case OP_ROR:
1605
        gen_rot_rm_T1(s1, ot, d, 1);
1606
        break;
1607
    case OP_SHL:
1608
    case OP_SHL1:
1609
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1610
        break;
1611
    case OP_SHR:
1612
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1613
        break;
1614
    case OP_SAR:
1615
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1616
        break;
1617
    case OP_RCL:
1618
        gen_rotc_rm_T1(s1, ot, d, 0);
1619
        break;
1620
    case OP_RCR:
1621
        gen_rotc_rm_T1(s1, ot, d, 1);
1622
        break;
1623
    }
1624
}
1625

    
1626
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1627
{
1628
    /* currently not optimized */
1629
    gen_op_movl_T1_im(c);
1630
    gen_shift(s1, op, ot, d, OR_TMP1);
1631
}
1632

    
1633
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1634
{
1635
    target_long disp;
1636
    int havesib;
1637
    int base;
1638
    int index;
1639
    int scale;
1640
    int opreg;
1641
    int mod, rm, code, override, must_add_seg;
1642

    
1643
    override = s->override;
1644
    must_add_seg = s->addseg;
1645
    if (override >= 0)
1646
        must_add_seg = 1;
1647
    mod = (modrm >> 6) & 3;
1648
    rm = modrm & 7;
1649

    
1650
    if (s->aflag) {
1651

    
1652
        havesib = 0;
1653
        base = rm;
1654
        index = 0;
1655
        scale = 0;
1656

    
1657
        if (base == 4) {
1658
            havesib = 1;
1659
            code = ldub_code(s->pc++);
1660
            scale = (code >> 6) & 3;
1661
            index = ((code >> 3) & 7) | REX_X(s);
1662
            base = (code & 7);
1663
        }
1664
        base |= REX_B(s);
1665

    
1666
        switch (mod) {
1667
        case 0:
1668
            if ((base & 7) == 5) {
1669
                base = -1;
1670
                disp = (int32_t)ldl_code(s->pc);
1671
                s->pc += 4;
1672
                if (CODE64(s) && !havesib) {
1673
                    disp += s->pc + s->rip_offset;
1674
                }
1675
            } else {
1676
                disp = 0;
1677
            }
1678
            break;
1679
        case 1:
1680
            disp = (int8_t)ldub_code(s->pc++);
1681
            break;
1682
        default:
1683
        case 2:
1684
            disp = ldl_code(s->pc);
1685
            s->pc += 4;
1686
            break;
1687
        }
1688

    
1689
        if (base >= 0) {
1690
            /* for correct popl handling with esp */
1691
            if (base == 4 && s->popl_esp_hack)
1692
                disp += s->popl_esp_hack;
1693
#ifdef TARGET_X86_64
1694
            if (s->aflag == 2) {
1695
                gen_op_movq_A0_reg(base);
1696
                if (disp != 0) {
1697
                    gen_op_addq_A0_im(disp);
1698
                }
1699
            } else
1700
#endif
1701
            {
1702
                gen_op_movl_A0_reg(base);
1703
                if (disp != 0)
1704
                    gen_op_addl_A0_im(disp);
1705
            }
1706
        } else {
1707
#ifdef TARGET_X86_64
1708
            if (s->aflag == 2) {
1709
                gen_op_movq_A0_im(disp);
1710
            } else
1711
#endif
1712
            {
1713
                gen_op_movl_A0_im(disp);
1714
            }
1715
        }
1716
        /* XXX: index == 4 is always invalid */
1717
        if (havesib && (index != 4 || scale != 0)) {
1718
#ifdef TARGET_X86_64
1719
            if (s->aflag == 2) {
1720
                gen_op_addq_A0_reg_sN(scale, index);
1721
            } else
1722
#endif
1723
            {
1724
                gen_op_addl_A0_reg_sN(scale, index);
1725
            }
1726
        }
1727
        if (must_add_seg) {
1728
            if (override < 0) {
1729
                if (base == R_EBP || base == R_ESP)
1730
                    override = R_SS;
1731
                else
1732
                    override = R_DS;
1733
            }
1734
#ifdef TARGET_X86_64
1735
            if (s->aflag == 2) {
1736
                gen_op_addq_A0_seg(override);
1737
            } else
1738
#endif
1739
            {
1740
                gen_op_addl_A0_seg(override);
1741
            }
1742
        }
1743
    } else {
1744
        switch (mod) {
1745
        case 0:
1746
            if (rm == 6) {
1747
                disp = lduw_code(s->pc);
1748
                s->pc += 2;
1749
                gen_op_movl_A0_im(disp);
1750
                rm = 0; /* avoid SS override */
1751
                goto no_rm;
1752
            } else {
1753
                disp = 0;
1754
            }
1755
            break;
1756
        case 1:
1757
            disp = (int8_t)ldub_code(s->pc++);
1758
            break;
1759
        default:
1760
        case 2:
1761
            disp = lduw_code(s->pc);
1762
            s->pc += 2;
1763
            break;
1764
        }
1765
        switch(rm) {
1766
        case 0:
1767
            gen_op_movl_A0_reg(R_EBX);
1768
            gen_op_addl_A0_reg_sN(0, R_ESI);
1769
            break;
1770
        case 1:
1771
            gen_op_movl_A0_reg(R_EBX);
1772
            gen_op_addl_A0_reg_sN(0, R_EDI);
1773
            break;
1774
        case 2:
1775
            gen_op_movl_A0_reg(R_EBP);
1776
            gen_op_addl_A0_reg_sN(0, R_ESI);
1777
            break;
1778
        case 3:
1779
            gen_op_movl_A0_reg(R_EBP);
1780
            gen_op_addl_A0_reg_sN(0, R_EDI);
1781
            break;
1782
        case 4:
1783
            gen_op_movl_A0_reg(R_ESI);
1784
            break;
1785
        case 5:
1786
            gen_op_movl_A0_reg(R_EDI);
1787
            break;
1788
        case 6:
1789
            gen_op_movl_A0_reg(R_EBP);
1790
            break;
1791
        default:
1792
        case 7:
1793
            gen_op_movl_A0_reg(R_EBX);
1794
            break;
1795
        }
1796
        if (disp != 0)
1797
            gen_op_addl_A0_im(disp);
1798
        gen_op_andl_A0_ffff();
1799
    no_rm:
1800
        if (must_add_seg) {
1801
            if (override < 0) {
1802
                if (rm == 2 || rm == 3 || rm == 6)
1803
                    override = R_SS;
1804
                else
1805
                    override = R_DS;
1806
            }
1807
            gen_op_addl_A0_seg(override);
1808
        }
1809
    }
1810

    
1811
    opreg = OR_A0;
1812
    disp = 0;
1813
    *reg_ptr = opreg;
1814
    *offset_ptr = disp;
1815
}
1816

    
1817
static void gen_nop_modrm(DisasContext *s, int modrm)
1818
{
1819
    int mod, rm, base, code;
1820

    
1821
    mod = (modrm >> 6) & 3;
1822
    if (mod == 3)
1823
        return;
1824
    rm = modrm & 7;
1825

    
1826
    if (s->aflag) {
1827

    
1828
        base = rm;
1829

    
1830
        if (base == 4) {
1831
            code = ldub_code(s->pc++);
1832
            base = (code & 7);
1833
        }
1834

    
1835
        switch (mod) {
1836
        case 0:
1837
            if (base == 5) {
1838
                s->pc += 4;
1839
            }
1840
            break;
1841
        case 1:
1842
            s->pc++;
1843
            break;
1844
        default:
1845
        case 2:
1846
            s->pc += 4;
1847
            break;
1848
        }
1849
    } else {
1850
        switch (mod) {
1851
        case 0:
1852
            if (rm == 6) {
1853
                s->pc += 2;
1854
            }
1855
            break;
1856
        case 1:
1857
            s->pc++;
1858
            break;
1859
        default:
1860
        case 2:
1861
            s->pc += 2;
1862
            break;
1863
        }
1864
    }
1865
}
1866

    
1867
/* used for LEA and MOV AX, mem */
1868
static void gen_add_A0_ds_seg(DisasContext *s)
1869
{
1870
    int override, must_add_seg;
1871
    must_add_seg = s->addseg;
1872
    override = R_DS;
1873
    if (s->override >= 0) {
1874
        override = s->override;
1875
        must_add_seg = 1;
1876
    } else {
1877
        override = R_DS;
1878
    }
1879
    if (must_add_seg) {
1880
#ifdef TARGET_X86_64
1881
        if (CODE64(s)) {
1882
            gen_op_addq_A0_seg(override);
1883
        } else
1884
#endif
1885
        {
1886
            gen_op_addl_A0_seg(override);
1887
        }
1888
    }
1889
}
1890

    
1891
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1892
   OR_TMP0 */
1893
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1894
{
1895
    int mod, rm, opreg, disp;
1896

    
1897
    mod = (modrm >> 6) & 3;
1898
    rm = (modrm & 7) | REX_B(s);
1899
    if (mod == 3) {
1900
        if (is_store) {
1901
            if (reg != OR_TMP0)
1902
                gen_op_mov_TN_reg(ot, 0, reg);
1903
            gen_op_mov_reg_T0(ot, rm);
1904
        } else {
1905
            gen_op_mov_TN_reg(ot, 0, rm);
1906
            if (reg != OR_TMP0)
1907
                gen_op_mov_reg_T0(ot, reg);
1908
        }
1909
    } else {
1910
        gen_lea_modrm(s, modrm, &opreg, &disp);
1911
        if (is_store) {
1912
            if (reg != OR_TMP0)
1913
                gen_op_mov_TN_reg(ot, 0, reg);
1914
            gen_op_st_T0_A0(ot + s->mem_index);
1915
        } else {
1916
            gen_op_ld_T0_A0(ot + s->mem_index);
1917
            if (reg != OR_TMP0)
1918
                gen_op_mov_reg_T0(ot, reg);
1919
        }
1920
    }
1921
}
1922

    
1923
static inline uint32_t insn_get(DisasContext *s, int ot)
1924
{
1925
    uint32_t ret;
1926

    
1927
    switch(ot) {
1928
    case OT_BYTE:
1929
        ret = ldub_code(s->pc);
1930
        s->pc++;
1931
        break;
1932
    case OT_WORD:
1933
        ret = lduw_code(s->pc);
1934
        s->pc += 2;
1935
        break;
1936
    default:
1937
    case OT_LONG:
1938
        ret = ldl_code(s->pc);
1939
        s->pc += 4;
1940
        break;
1941
    }
1942
    return ret;
1943
}
1944

    
1945
static inline int insn_const_size(unsigned int ot)
1946
{
1947
    if (ot <= OT_LONG)
1948
        return 1 << ot;
1949
    else
1950
        return 4;
1951
}
1952

    
1953
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1954
{
1955
    TranslationBlock *tb;
1956
    target_ulong pc;
1957

    
1958
    pc = s->cs_base + eip;
1959
    tb = s->tb;
1960
    /* NOTE: we handle the case where the TB spans two pages here */
1961
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1962
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1963
        /* jump to same page: we can use a direct jump */
1964
        tcg_gen_goto_tb(tb_num);
1965
        gen_jmp_im(eip);
1966
        tcg_gen_exit_tb((long)tb + tb_num);
1967
    } else {
1968
        /* jump to another page: currently not optimized */
1969
        gen_jmp_im(eip);
1970
        gen_eob(s);
1971
    }
1972
}
1973

    
1974
static inline void gen_jcc(DisasContext *s, int b,
1975
                           target_ulong val, target_ulong next_eip)
1976
{
1977
    TranslationBlock *tb;
1978
    int inv, jcc_op;
1979
    GenOpFunc1 *func;
1980
    target_ulong tmp;
1981
    int l1, l2;
1982

    
1983
    inv = b & 1;
1984
    jcc_op = (b >> 1) & 7;
1985

    
1986
    if (s->jmp_opt) {
1987
        switch(s->cc_op) {
1988
            /* we optimize the cmp/jcc case */
1989
        case CC_OP_SUBB:
1990
        case CC_OP_SUBW:
1991
        case CC_OP_SUBL:
1992
        case CC_OP_SUBQ:
1993
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1994
            break;
1995

    
1996
            /* some jumps are easy to compute */
1997
        case CC_OP_ADDB:
1998
        case CC_OP_ADDW:
1999
        case CC_OP_ADDL:
2000
        case CC_OP_ADDQ:
2001

    
2002
        case CC_OP_ADCB:
2003
        case CC_OP_ADCW:
2004
        case CC_OP_ADCL:
2005
        case CC_OP_ADCQ:
2006

    
2007
        case CC_OP_SBBB:
2008
        case CC_OP_SBBW:
2009
        case CC_OP_SBBL:
2010
        case CC_OP_SBBQ:
2011

    
2012
        case CC_OP_LOGICB:
2013
        case CC_OP_LOGICW:
2014
        case CC_OP_LOGICL:
2015
        case CC_OP_LOGICQ:
2016

    
2017
        case CC_OP_INCB:
2018
        case CC_OP_INCW:
2019
        case CC_OP_INCL:
2020
        case CC_OP_INCQ:
2021

    
2022
        case CC_OP_DECB:
2023
        case CC_OP_DECW:
2024
        case CC_OP_DECL:
2025
        case CC_OP_DECQ:
2026

    
2027
        case CC_OP_SHLB:
2028
        case CC_OP_SHLW:
2029
        case CC_OP_SHLL:
2030
        case CC_OP_SHLQ:
2031

    
2032
        case CC_OP_SARB:
2033
        case CC_OP_SARW:
2034
        case CC_OP_SARL:
2035
        case CC_OP_SARQ:
2036
            switch(jcc_op) {
2037
            case JCC_Z:
2038
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2039
                break;
2040
            case JCC_S:
2041
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2042
                break;
2043
            default:
2044
                func = NULL;
2045
                break;
2046
            }
2047
            break;
2048
        default:
2049
            func = NULL;
2050
            break;
2051
        }
2052

    
2053
        if (s->cc_op != CC_OP_DYNAMIC) {
2054
            gen_op_set_cc_op(s->cc_op);
2055
            s->cc_op = CC_OP_DYNAMIC;
2056
        }
2057

    
2058
        if (!func) {
2059
            gen_setcc_slow[jcc_op]();
2060
            func = gen_op_jnz_T0_label;
2061
        }
2062

    
2063
        if (inv) {
2064
            tmp = val;
2065
            val = next_eip;
2066
            next_eip = tmp;
2067
        }
2068
        tb = s->tb;
2069

    
2070
        l1 = gen_new_label();
2071
        func(l1);
2072

    
2073
        gen_goto_tb(s, 0, next_eip);
2074

    
2075
        gen_set_label(l1);
2076
        gen_goto_tb(s, 1, val);
2077

    
2078
        s->is_jmp = 3;
2079
    } else {
2080

    
2081
        if (s->cc_op != CC_OP_DYNAMIC) {
2082
            gen_op_set_cc_op(s->cc_op);
2083
            s->cc_op = CC_OP_DYNAMIC;
2084
        }
2085
        gen_setcc_slow[jcc_op]();
2086
        if (inv) {
2087
            tmp = val;
2088
            val = next_eip;
2089
            next_eip = tmp;
2090
        }
2091
        l1 = gen_new_label();
2092
        l2 = gen_new_label();
2093
        gen_op_jnz_T0_label(l1);
2094
        gen_jmp_im(next_eip);
2095
        gen_op_jmp_label(l2);
2096
        gen_set_label(l1);
2097
        gen_jmp_im(val);
2098
        gen_set_label(l2);
2099
        gen_eob(s);
2100
    }
2101
}
2102

    
2103
static void gen_setcc(DisasContext *s, int b)
2104
{
2105
    int inv, jcc_op;
2106
    GenOpFunc *func;
2107

    
2108
    inv = b & 1;
2109
    jcc_op = (b >> 1) & 7;
2110
    switch(s->cc_op) {
2111
        /* we optimize the cmp/jcc case */
2112
    case CC_OP_SUBB:
2113
    case CC_OP_SUBW:
2114
    case CC_OP_SUBL:
2115
    case CC_OP_SUBQ:
2116
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2117
        if (!func)
2118
            goto slow_jcc;
2119
        break;
2120

    
2121
        /* some jumps are easy to compute */
2122
    case CC_OP_ADDB:
2123
    case CC_OP_ADDW:
2124
    case CC_OP_ADDL:
2125
    case CC_OP_ADDQ:
2126

    
2127
    case CC_OP_LOGICB:
2128
    case CC_OP_LOGICW:
2129
    case CC_OP_LOGICL:
2130
    case CC_OP_LOGICQ:
2131

    
2132
    case CC_OP_INCB:
2133
    case CC_OP_INCW:
2134
    case CC_OP_INCL:
2135
    case CC_OP_INCQ:
2136

    
2137
    case CC_OP_DECB:
2138
    case CC_OP_DECW:
2139
    case CC_OP_DECL:
2140
    case CC_OP_DECQ:
2141

    
2142
    case CC_OP_SHLB:
2143
    case CC_OP_SHLW:
2144
    case CC_OP_SHLL:
2145
    case CC_OP_SHLQ:
2146
        switch(jcc_op) {
2147
        case JCC_Z:
2148
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2149
            break;
2150
        case JCC_S:
2151
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2152
            break;
2153
        default:
2154
            goto slow_jcc;
2155
        }
2156
        break;
2157
    default:
2158
    slow_jcc:
2159
        if (s->cc_op != CC_OP_DYNAMIC)
2160
            gen_op_set_cc_op(s->cc_op);
2161
        func = gen_setcc_slow[jcc_op];
2162
        break;
2163
    }
2164
    func();
2165
    if (inv) {
2166
        gen_op_xor_T0_1();
2167
    }
2168
}
2169

    
2170
/* move T0 to seg_reg and compute if the CPU state may change. Never
2171
   call this function with seg_reg == R_CS */
2172
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2173
{
2174
    if (s->pe && !s->vm86) {
2175
        /* XXX: optimize by finding processor state dynamically */
2176
        if (s->cc_op != CC_OP_DYNAMIC)
2177
            gen_op_set_cc_op(s->cc_op);
2178
        gen_jmp_im(cur_eip);
2179
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2180
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2181
        /* abort translation because the addseg value may change or
2182
           because ss32 may change. For R_SS, translation must always
2183
           stop as a special handling must be done to disable hardware
2184
           interrupts for the next instruction */
2185
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2186
            s->is_jmp = 3;
2187
    } else {
2188
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2189
        if (seg_reg == R_SS)
2190
            s->is_jmp = 3;
2191
    }
2192
}
2193

    
2194
static inline int svm_is_rep(int prefixes)
2195
{
2196
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2197
}
2198

    
2199
static inline int
2200
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2201
                              uint32_t type, uint64_t param)
2202
{
2203
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2204
        /* no SVM activated */
2205
        return 0;
2206
    switch(type) {
2207
        /* CRx and DRx reads/writes */
2208
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2209
            if (s->cc_op != CC_OP_DYNAMIC) {
2210
                gen_op_set_cc_op(s->cc_op);
2211
            }
2212
            gen_jmp_im(pc_start - s->cs_base);
2213
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2214
                               tcg_const_i32(type), tcg_const_i64(param));
2215
            /* this is a special case as we do not know if the interception occurs
2216
               so we assume there was none */
2217
            return 0;
2218
        case SVM_EXIT_MSR:
2219
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2220
                if (s->cc_op != CC_OP_DYNAMIC) {
2221
                    gen_op_set_cc_op(s->cc_op);
2222
                }
2223
                gen_jmp_im(pc_start - s->cs_base);
2224
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2225
                                   tcg_const_i32(type), tcg_const_i64(param));
2226
                /* this is a special case as we do not know if the interception occurs
2227
                   so we assume there was none */
2228
                return 0;
2229
            }
2230
            break;
2231
        default:
2232
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2233
                if (s->cc_op != CC_OP_DYNAMIC) {
2234
                    gen_op_set_cc_op(s->cc_op);
2235
                }
2236
                gen_jmp_im(pc_start - s->cs_base);
2237
                tcg_gen_helper_0_2(helper_vmexit,
2238
                                   tcg_const_i32(type), tcg_const_i64(param));
2239
                /* we can optimize this one so TBs don't get longer
2240
                   than up to vmexit */
2241
                gen_eob(s);
2242
                return 1;
2243
            }
2244
    }
2245
    return 0;
2246
}
2247

    
2248
static inline int
2249
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2250
{
2251
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2252
}
2253

    
2254
static inline void gen_stack_update(DisasContext *s, int addend)
2255
{
2256
#ifdef TARGET_X86_64
2257
    if (CODE64(s)) {
2258
        gen_op_add_reg_im(2, R_ESP, addend);
2259
    } else
2260
#endif
2261
    if (s->ss32) {
2262
        gen_op_add_reg_im(1, R_ESP, addend);
2263
    } else {
2264
        gen_op_add_reg_im(0, R_ESP, addend);
2265
    }
2266
}
2267

    
2268
/* generate a push. It depends on ss32, addseg and dflag */
2269
static void gen_push_T0(DisasContext *s)
2270
{
2271
#ifdef TARGET_X86_64
2272
    if (CODE64(s)) {
2273
        gen_op_movq_A0_reg(R_ESP);
2274
        if (s->dflag) {
2275
            gen_op_addq_A0_im(-8);
2276
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2277
        } else {
2278
            gen_op_addq_A0_im(-2);
2279
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2280
        }
2281
        gen_op_mov_reg_A0(2, R_ESP);
2282
    } else
2283
#endif
2284
    {
2285
        gen_op_movl_A0_reg(R_ESP);
2286
        if (!s->dflag)
2287
            gen_op_addl_A0_im(-2);
2288
        else
2289
            gen_op_addl_A0_im(-4);
2290
        if (s->ss32) {
2291
            if (s->addseg) {
2292
                tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2293
                gen_op_addl_A0_seg(R_SS);
2294
            }
2295
        } else {
2296
            gen_op_andl_A0_ffff();
2297
            tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2298
            gen_op_addl_A0_seg(R_SS);
2299
        }
2300
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2301
        if (s->ss32 && !s->addseg)
2302
            gen_op_mov_reg_A0(1, R_ESP);
2303
        else
2304
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2305
    }
2306
}
2307

    
2308
/* generate a push. It depends on ss32, addseg and dflag */
2309
/* slower version for T1, only used for call Ev */
2310
static void gen_push_T1(DisasContext *s)
2311
{
2312
#ifdef TARGET_X86_64
2313
    if (CODE64(s)) {
2314
        gen_op_movq_A0_reg(R_ESP);
2315
        if (s->dflag) {
2316
            gen_op_addq_A0_im(-8);
2317
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2318
        } else {
2319
            gen_op_addq_A0_im(-2);
2320
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2321
        }
2322
        gen_op_mov_reg_A0(2, R_ESP);
2323
    } else
2324
#endif
2325
    {
2326
        gen_op_movl_A0_reg(R_ESP);
2327
        if (!s->dflag)
2328
            gen_op_addl_A0_im(-2);
2329
        else
2330
            gen_op_addl_A0_im(-4);
2331
        if (s->ss32) {
2332
            if (s->addseg) {
2333
                gen_op_addl_A0_seg(R_SS);
2334
            }
2335
        } else {
2336
            gen_op_andl_A0_ffff();
2337
            gen_op_addl_A0_seg(R_SS);
2338
        }
2339
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2340

    
2341
        if (s->ss32 && !s->addseg)
2342
            gen_op_mov_reg_A0(1, R_ESP);
2343
        else
2344
            gen_stack_update(s, (-2) << s->dflag);
2345
    }
2346
}
2347

    
2348
/* two step pop is necessary for precise exceptions */
2349
static void gen_pop_T0(DisasContext *s)
2350
{
2351
#ifdef TARGET_X86_64
2352
    if (CODE64(s)) {
2353
        gen_op_movq_A0_reg(R_ESP);
2354
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2355
    } else
2356
#endif
2357
    {
2358
        gen_op_movl_A0_reg(R_ESP);
2359
        if (s->ss32) {
2360
            if (s->addseg)
2361
                gen_op_addl_A0_seg(R_SS);
2362
        } else {
2363
            gen_op_andl_A0_ffff();
2364
            gen_op_addl_A0_seg(R_SS);
2365
        }
2366
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2367
    }
2368
}
2369

    
2370
static void gen_pop_update(DisasContext *s)
2371
{
2372
#ifdef TARGET_X86_64
2373
    if (CODE64(s) && s->dflag) {
2374
        gen_stack_update(s, 8);
2375
    } else
2376
#endif
2377
    {
2378
        gen_stack_update(s, 2 << s->dflag);
2379
    }
2380
}
2381

    
2382
static void gen_stack_A0(DisasContext *s)
2383
{
2384
    gen_op_movl_A0_reg(R_ESP);
2385
    if (!s->ss32)
2386
        gen_op_andl_A0_ffff();
2387
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2388
    if (s->addseg)
2389
        gen_op_addl_A0_seg(R_SS);
2390
}
2391

    
2392
/* NOTE: wrap around in 16 bit not fully handled */
2393
static void gen_pusha(DisasContext *s)
2394
{
2395
    int i;
2396
    gen_op_movl_A0_reg(R_ESP);
2397
    gen_op_addl_A0_im(-16 <<  s->dflag);
2398
    if (!s->ss32)
2399
        gen_op_andl_A0_ffff();
2400
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2401
    if (s->addseg)
2402
        gen_op_addl_A0_seg(R_SS);
2403
    for(i = 0;i < 8; i++) {
2404
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2405
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2406
        gen_op_addl_A0_im(2 <<  s->dflag);
2407
    }
2408
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2409
}
2410

    
2411
/* NOTE: wrap around in 16 bit not fully handled */
2412
static void gen_popa(DisasContext *s)
2413
{
2414
    int i;
2415
    gen_op_movl_A0_reg(R_ESP);
2416
    if (!s->ss32)
2417
        gen_op_andl_A0_ffff();
2418
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2419
    tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 <<  s->dflag);
2420
    if (s->addseg)
2421
        gen_op_addl_A0_seg(R_SS);
2422
    for(i = 0;i < 8; i++) {
2423
        /* ESP is not reloaded */
2424
        if (i != 3) {
2425
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2426
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2427
        }
2428
        gen_op_addl_A0_im(2 <<  s->dflag);
2429
    }
2430
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2431
}
2432

    
2433
static void gen_enter(DisasContext *s, int esp_addend, int level)
2434
{
2435
    int ot, opsize;
2436

    
2437
    level &= 0x1f;
2438
#ifdef TARGET_X86_64
2439
    if (CODE64(s)) {
2440
        ot = s->dflag ? OT_QUAD : OT_WORD;
2441
        opsize = 1 << ot;
2442

    
2443
        gen_op_movl_A0_reg(R_ESP);
2444
        gen_op_addq_A0_im(-opsize);
2445
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2446

    
2447
        /* push bp */
2448
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2449
        gen_op_st_T0_A0(ot + s->mem_index);
2450
        if (level) {
2451
            /* XXX: must save state */
2452
            tcg_gen_helper_0_3(helper_enter64_level,
2453
                               tcg_const_i32(level),
2454
                               tcg_const_i32((ot == OT_QUAD)),
2455
                               cpu_T[1]);
2456
        }
2457
        gen_op_mov_reg_T1(ot, R_EBP);
2458
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2459
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2460
    } else
2461
#endif
2462
    {
2463
        ot = s->dflag + OT_WORD;
2464
        opsize = 2 << s->dflag;
2465

    
2466
        gen_op_movl_A0_reg(R_ESP);
2467
        gen_op_addl_A0_im(-opsize);
2468
        if (!s->ss32)
2469
            gen_op_andl_A0_ffff();
2470
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2471
        if (s->addseg)
2472
            gen_op_addl_A0_seg(R_SS);
2473
        /* push bp */
2474
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2475
        gen_op_st_T0_A0(ot + s->mem_index);
2476
        if (level) {
2477
            /* XXX: must save state */
2478
            tcg_gen_helper_0_3(helper_enter_level,
2479
                               tcg_const_i32(level),
2480
                               tcg_const_i32(s->dflag),
2481
                               cpu_T[1]);
2482
        }
2483
        gen_op_mov_reg_T1(ot, R_EBP);
2484
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2485
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2486
    }
2487
}
2488

    
2489
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2490
{
2491
    if (s->cc_op != CC_OP_DYNAMIC)
2492
        gen_op_set_cc_op(s->cc_op);
2493
    gen_jmp_im(cur_eip);
2494
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2495
    s->is_jmp = 3;
2496
}
2497

    
2498
/* an interrupt is different from an exception because of the
2499
   privilege checks */
2500
static void gen_interrupt(DisasContext *s, int intno,
2501
                          target_ulong cur_eip, target_ulong next_eip)
2502
{
2503
    if (s->cc_op != CC_OP_DYNAMIC)
2504
        gen_op_set_cc_op(s->cc_op);
2505
    gen_jmp_im(cur_eip);
2506
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2507
                       tcg_const_i32(intno), 
2508
                       tcg_const_i32(next_eip - cur_eip));
2509
    s->is_jmp = 3;
2510
}
2511

    
2512
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2513
{
2514
    if (s->cc_op != CC_OP_DYNAMIC)
2515
        gen_op_set_cc_op(s->cc_op);
2516
    gen_jmp_im(cur_eip);
2517
    tcg_gen_helper_0_0(helper_debug);
2518
    s->is_jmp = 3;
2519
}
2520

    
2521
/* generate a generic end of block. Trace exception is also generated
2522
   if needed */
2523
static void gen_eob(DisasContext *s)
2524
{
2525
    if (s->cc_op != CC_OP_DYNAMIC)
2526
        gen_op_set_cc_op(s->cc_op);
2527
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2528
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2529
    }
2530
    if (s->singlestep_enabled) {
2531
        tcg_gen_helper_0_0(helper_debug);
2532
    } else if (s->tf) {
2533
        tcg_gen_helper_0_0(helper_single_step);
2534
    } else {
2535
        tcg_gen_exit_tb(0);
2536
    }
2537
    s->is_jmp = 3;
2538
}
2539

    
2540
/* generate a jump to eip. No segment change must happen before as a
2541
   direct call to the next block may occur */
2542
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2543
{
2544
    if (s->jmp_opt) {
2545
        if (s->cc_op != CC_OP_DYNAMIC) {
2546
            gen_op_set_cc_op(s->cc_op);
2547
            s->cc_op = CC_OP_DYNAMIC;
2548
        }
2549
        gen_goto_tb(s, tb_num, eip);
2550
        s->is_jmp = 3;
2551
    } else {
2552
        gen_jmp_im(eip);
2553
        gen_eob(s);
2554
    }
2555
}
2556

    
2557
static void gen_jmp(DisasContext *s, target_ulong eip)
2558
{
2559
    gen_jmp_tb(s, eip, 0);
2560
}
2561

    
2562
static inline void gen_ldq_env_A0(int idx, int offset)
2563
{
2564
    int mem_index = (idx >> 2) - 1;
2565
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2566
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2567
}
2568

    
2569
static inline void gen_stq_env_A0(int idx, int offset)
2570
{
2571
    int mem_index = (idx >> 2) - 1;
2572
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2573
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2574
}
2575

    
2576
static inline void gen_ldo_env_A0(int idx, int offset)
2577
{
2578
    int mem_index = (idx >> 2) - 1;
2579
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2580
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2581
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2582
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2583
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2584
}
2585

    
2586
static inline void gen_sto_env_A0(int idx, int offset)
2587
{
2588
    int mem_index = (idx >> 2) - 1;
2589
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2590
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2591
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2592
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2593
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2594
}
2595

    
2596
static inline void gen_op_movo(int d_offset, int s_offset)
2597
{
2598
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2599
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2600
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2601
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2602
}
2603

    
2604
static inline void gen_op_movq(int d_offset, int s_offset)
2605
{
2606
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2607
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2608
}
2609

    
2610
static inline void gen_op_movl(int d_offset, int s_offset)
2611
{
2612
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2613
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2614
}
2615

    
2616
static inline void gen_op_movq_env_0(int d_offset)
2617
{
2618
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2619
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2620
}
2621

    
2622
#define SSE_SPECIAL ((void *)1)
2623
#define SSE_DUMMY ((void *)2)
2624

    
2625
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2626
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2627
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2628

    
2629
static void *sse_op_table1[256][4] = {
2630
    /* 3DNow! extensions */
2631
    [0x0e] = { SSE_DUMMY }, /* femms */
2632
    [0x0f] = { SSE_DUMMY }, /* pf... */
2633
    /* pure SSE operations */
2634
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2635
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2636
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2637
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2638
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2639
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2640
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2641
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2642

    
2643
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2644
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2645
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2646
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2647
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2648
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2649
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2650
    [0x2f] = { helper_comiss, helper_comisd },
2651
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2652
    [0x51] = SSE_FOP(sqrt),
2653
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2654
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2655
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2656
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2657
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2658
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2659
    [0x58] = SSE_FOP(add),
2660
    [0x59] = SSE_FOP(mul),
2661
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2662
               helper_cvtss2sd, helper_cvtsd2ss },
2663
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2664
    [0x5c] = SSE_FOP(sub),
2665
    [0x5d] = SSE_FOP(min),
2666
    [0x5e] = SSE_FOP(div),
2667
    [0x5f] = SSE_FOP(max),
2668

    
2669
    [0xc2] = SSE_FOP(cmpeq),
2670
    [0xc6] = { helper_shufps, helper_shufpd },
2671

    
2672
    /* MMX ops and their SSE extensions */
2673
    [0x60] = MMX_OP2(punpcklbw),
2674
    [0x61] = MMX_OP2(punpcklwd),
2675
    [0x62] = MMX_OP2(punpckldq),
2676
    [0x63] = MMX_OP2(packsswb),
2677
    [0x64] = MMX_OP2(pcmpgtb),
2678
    [0x65] = MMX_OP2(pcmpgtw),
2679
    [0x66] = MMX_OP2(pcmpgtl),
2680
    [0x67] = MMX_OP2(packuswb),
2681
    [0x68] = MMX_OP2(punpckhbw),
2682
    [0x69] = MMX_OP2(punpckhwd),
2683
    [0x6a] = MMX_OP2(punpckhdq),
2684
    [0x6b] = MMX_OP2(packssdw),
2685
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2686
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2687
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2688
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2689
    [0x70] = { helper_pshufw_mmx,
2690
               helper_pshufd_xmm,
2691
               helper_pshufhw_xmm,
2692
               helper_pshuflw_xmm },
2693
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2694
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2695
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2696
    [0x74] = MMX_OP2(pcmpeqb),
2697
    [0x75] = MMX_OP2(pcmpeqw),
2698
    [0x76] = MMX_OP2(pcmpeql),
2699
    [0x77] = { SSE_DUMMY }, /* emms */
2700
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2701
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2702
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2703
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2704
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2705
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2706
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2707
    [0xd1] = MMX_OP2(psrlw),
2708
    [0xd2] = MMX_OP2(psrld),
2709
    [0xd3] = MMX_OP2(psrlq),
2710
    [0xd4] = MMX_OP2(paddq),
2711
    [0xd5] = MMX_OP2(pmullw),
2712
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2713
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2714
    [0xd8] = MMX_OP2(psubusb),
2715
    [0xd9] = MMX_OP2(psubusw),
2716
    [0xda] = MMX_OP2(pminub),
2717
    [0xdb] = MMX_OP2(pand),
2718
    [0xdc] = MMX_OP2(paddusb),
2719
    [0xdd] = MMX_OP2(paddusw),
2720
    [0xde] = MMX_OP2(pmaxub),
2721
    [0xdf] = MMX_OP2(pandn),
2722
    [0xe0] = MMX_OP2(pavgb),
2723
    [0xe1] = MMX_OP2(psraw),
2724
    [0xe2] = MMX_OP2(psrad),
2725
    [0xe3] = MMX_OP2(pavgw),
2726
    [0xe4] = MMX_OP2(pmulhuw),
2727
    [0xe5] = MMX_OP2(pmulhw),
2728
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2729
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2730
    [0xe8] = MMX_OP2(psubsb),
2731
    [0xe9] = MMX_OP2(psubsw),
2732
    [0xea] = MMX_OP2(pminsw),
2733
    [0xeb] = MMX_OP2(por),
2734
    [0xec] = MMX_OP2(paddsb),
2735
    [0xed] = MMX_OP2(paddsw),
2736
    [0xee] = MMX_OP2(pmaxsw),
2737
    [0xef] = MMX_OP2(pxor),
2738
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2739
    [0xf1] = MMX_OP2(psllw),
2740
    [0xf2] = MMX_OP2(pslld),
2741
    [0xf3] = MMX_OP2(psllq),
2742
    [0xf4] = MMX_OP2(pmuludq),
2743
    [0xf5] = MMX_OP2(pmaddwd),
2744
    [0xf6] = MMX_OP2(psadbw),
2745
    [0xf7] = MMX_OP2(maskmov),
2746
    [0xf8] = MMX_OP2(psubb),
2747
    [0xf9] = MMX_OP2(psubw),
2748
    [0xfa] = MMX_OP2(psubl),
2749
    [0xfb] = MMX_OP2(psubq),
2750
    [0xfc] = MMX_OP2(paddb),
2751
    [0xfd] = MMX_OP2(paddw),
2752
    [0xfe] = MMX_OP2(paddl),
2753
};
2754

    
2755
static void *sse_op_table2[3 * 8][2] = {
2756
    [0 + 2] = MMX_OP2(psrlw),
2757
    [0 + 4] = MMX_OP2(psraw),
2758
    [0 + 6] = MMX_OP2(psllw),
2759
    [8 + 2] = MMX_OP2(psrld),
2760
    [8 + 4] = MMX_OP2(psrad),
2761
    [8 + 6] = MMX_OP2(pslld),
2762
    [16 + 2] = MMX_OP2(psrlq),
2763
    [16 + 3] = { NULL, helper_psrldq_xmm },
2764
    [16 + 6] = MMX_OP2(psllq),
2765
    [16 + 7] = { NULL, helper_pslldq_xmm },
2766
};
2767

    
2768
static void *sse_op_table3[4 * 3] = {
2769
    helper_cvtsi2ss,
2770
    helper_cvtsi2sd,
2771
    X86_64_ONLY(helper_cvtsq2ss),
2772
    X86_64_ONLY(helper_cvtsq2sd),
2773

    
2774
    helper_cvttss2si,
2775
    helper_cvttsd2si,
2776
    X86_64_ONLY(helper_cvttss2sq),
2777
    X86_64_ONLY(helper_cvttsd2sq),
2778

    
2779
    helper_cvtss2si,
2780
    helper_cvtsd2si,
2781
    X86_64_ONLY(helper_cvtss2sq),
2782
    X86_64_ONLY(helper_cvtsd2sq),
2783
};
2784

    
2785
static void *sse_op_table4[8][4] = {
2786
    SSE_FOP(cmpeq),
2787
    SSE_FOP(cmplt),
2788
    SSE_FOP(cmple),
2789
    SSE_FOP(cmpunord),
2790
    SSE_FOP(cmpneq),
2791
    SSE_FOP(cmpnlt),
2792
    SSE_FOP(cmpnle),
2793
    SSE_FOP(cmpord),
2794
};
2795

    
2796
static void *sse_op_table5[256] = {
2797
    [0x0c] = helper_pi2fw,
2798
    [0x0d] = helper_pi2fd,
2799
    [0x1c] = helper_pf2iw,
2800
    [0x1d] = helper_pf2id,
2801
    [0x8a] = helper_pfnacc,
2802
    [0x8e] = helper_pfpnacc,
2803
    [0x90] = helper_pfcmpge,
2804
    [0x94] = helper_pfmin,
2805
    [0x96] = helper_pfrcp,
2806
    [0x97] = helper_pfrsqrt,
2807
    [0x9a] = helper_pfsub,
2808
    [0x9e] = helper_pfadd,
2809
    [0xa0] = helper_pfcmpgt,
2810
    [0xa4] = helper_pfmax,
2811
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2812
    [0xa7] = helper_movq, /* pfrsqit1 */
2813
    [0xaa] = helper_pfsubr,
2814
    [0xae] = helper_pfacc,
2815
    [0xb0] = helper_pfcmpeq,
2816
    [0xb4] = helper_pfmul,
2817
    [0xb6] = helper_movq, /* pfrcpit2 */
2818
    [0xb7] = helper_pmulhrw_mmx,
2819
    [0xbb] = helper_pswapd,
2820
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2821
};
2822

    
2823
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2824
{
2825
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2826
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2827
    void *sse_op2;
2828

    
2829
    b &= 0xff;
2830
    if (s->prefix & PREFIX_DATA)
2831
        b1 = 1;
2832
    else if (s->prefix & PREFIX_REPZ)
2833
        b1 = 2;
2834
    else if (s->prefix & PREFIX_REPNZ)
2835
        b1 = 3;
2836
    else
2837
        b1 = 0;
2838
    sse_op2 = sse_op_table1[b][b1];
2839
    if (!sse_op2)
2840
        goto illegal_op;
2841
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2842
        is_xmm = 1;
2843
    } else {
2844
        if (b1 == 0) {
2845
            /* MMX case */
2846
            is_xmm = 0;
2847
        } else {
2848
            is_xmm = 1;
2849
        }
2850
    }
2851
    /* simple MMX/SSE operation */
2852
    if (s->flags & HF_TS_MASK) {
2853
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2854
        return;
2855
    }
2856
    if (s->flags & HF_EM_MASK) {
2857
    illegal_op:
2858
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2859
        return;
2860
    }
2861
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2862
        goto illegal_op;
2863
    if (b == 0x0e) {
2864
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2865
            goto illegal_op;
2866
        /* femms */
2867
        tcg_gen_helper_0_0(helper_emms);
2868
        return;
2869
    }
2870
    if (b == 0x77) {
2871
        /* emms */
2872
        tcg_gen_helper_0_0(helper_emms);
2873
        return;
2874
    }
2875
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2876
       the static cpu state) */
2877
    if (!is_xmm) {
2878
        tcg_gen_helper_0_0(helper_enter_mmx);
2879
    }
2880

    
2881
    modrm = ldub_code(s->pc++);
2882
    reg = ((modrm >> 3) & 7);
2883
    if (is_xmm)
2884
        reg |= rex_r;
2885
    mod = (modrm >> 6) & 3;
2886
    if (sse_op2 == SSE_SPECIAL) {
2887
        b |= (b1 << 8);
2888
        switch(b) {
2889
        case 0x0e7: /* movntq */
2890
            if (mod == 3)
2891
                goto illegal_op;
2892
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2893
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2894
            break;
2895
        case 0x1e7: /* movntdq */
2896
        case 0x02b: /* movntps */
2897
        case 0x12b: /* movntps */
2898
        case 0x3f0: /* lddqu */
2899
            if (mod == 3)
2900
                goto illegal_op;
2901
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2902
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2903
            break;
2904
        case 0x6e: /* movd mm, ea */
2905
#ifdef TARGET_X86_64
2906
            if (s->dflag == 2) {
2907
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2908
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2909
            } else
2910
#endif
2911
            {
2912
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2913
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2914
                                 offsetof(CPUX86State,fpregs[reg].mmx));
2915
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2916
            }
2917
            break;
2918
        case 0x16e: /* movd xmm, ea */
2919
#ifdef TARGET_X86_64
2920
            if (s->dflag == 2) {
2921
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2922
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2923
                                 offsetof(CPUX86State,xmm_regs[reg]));
2924
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2925
            } else
2926
#endif
2927
            {
2928
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2929
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2930
                                 offsetof(CPUX86State,xmm_regs[reg]));
2931
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2932
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2933
            }
2934
            break;
2935
        case 0x6f: /* movq mm, ea */
2936
            if (mod != 3) {
2937
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2938
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2939
            } else {
2940
                rm = (modrm & 7);
2941
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
2942
                               offsetof(CPUX86State,fpregs[rm].mmx));
2943
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
2944
                               offsetof(CPUX86State,fpregs[reg].mmx));
2945
            }
2946
            break;
2947
        case 0x010: /* movups */
2948
        case 0x110: /* movupd */
2949
        case 0x028: /* movaps */
2950
        case 0x128: /* movapd */
2951
        case 0x16f: /* movdqa xmm, ea */
2952
        case 0x26f: /* movdqu xmm, ea */
2953
            if (mod != 3) {
2954
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2955
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2956
            } else {
2957
                rm = (modrm & 7) | REX_B(s);
2958
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2959
                            offsetof(CPUX86State,xmm_regs[rm]));
2960
            }
2961
            break;
2962
        case 0x210: /* movss xmm, ea */
2963
            if (mod != 3) {
2964
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2965
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2966
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2967
                gen_op_movl_T0_0();
2968
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2969
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2970
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2971
            } else {
2972
                rm = (modrm & 7) | REX_B(s);
2973
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2974
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2975
            }
2976
            break;
2977
        case 0x310: /* movsd xmm, ea */
2978
            if (mod != 3) {
2979
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2980
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2981
                gen_op_movl_T0_0();
2982
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2983
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2984
            } else {
2985
                rm = (modrm & 7) | REX_B(s);
2986
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2987
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2988
            }
2989
            break;
2990
        case 0x012: /* movlps */
2991
        case 0x112: /* movlpd */
2992
            if (mod != 3) {
2993
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2994
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2995
            } else {
2996
                /* movhlps */
2997
                rm = (modrm & 7) | REX_B(s);
2998
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2999
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3000
            }
3001
            break;
3002
        case 0x212: /* movsldup */
3003
            if (mod != 3) {
3004
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3005
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3006
            } else {
3007
                rm = (modrm & 7) | REX_B(s);
3008
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3009
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3010
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3011
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3012
            }
3013
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3014
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3015
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3016
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3017
            break;
3018
        case 0x312: /* movddup */
3019
            if (mod != 3) {
3020
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3021
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3022
            } else {
3023
                rm = (modrm & 7) | REX_B(s);
3024
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3025
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3026
            }
3027
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3028
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3029
            break;
3030
        case 0x016: /* movhps */
3031
        case 0x116: /* movhpd */
3032
            if (mod != 3) {
3033
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3034
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3035
            } else {
3036
                /* movlhps */
3037
                rm = (modrm & 7) | REX_B(s);
3038
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3039
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3040
            }
3041
            break;
3042
        case 0x216: /* movshdup */
3043
            if (mod != 3) {
3044
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3045
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3046
            } else {
3047
                rm = (modrm & 7) | REX_B(s);
3048
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3049
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3050
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3051
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3052
            }
3053
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3054
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3055
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3056
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3057
            break;
3058
        case 0x7e: /* movd ea, mm */
3059
#ifdef TARGET_X86_64
3060
            if (s->dflag == 2) {
3061
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3062
                               offsetof(CPUX86State,fpregs[reg].mmx));
3063
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3064
            } else
3065
#endif
3066
            {
3067
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3068
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3069
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3070
            }
3071
            break;
3072
        case 0x17e: /* movd ea, xmm */
3073
#ifdef TARGET_X86_64
3074
            if (s->dflag == 2) {
3075
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3076
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3077
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3078
            } else
3079
#endif
3080
            {
3081
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3082
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3083
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3084
            }
3085
            break;
3086
        case 0x27e: /* movq xmm, ea */
3087
            if (mod != 3) {
3088
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3089
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3090
            } else {
3091
                rm = (modrm & 7) | REX_B(s);
3092
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3093
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3094
            }
3095
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3096
            break;
3097
        case 0x7f: /* movq ea, mm */
3098
            if (mod != 3) {
3099
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3100
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3101
            } else {
3102
                rm = (modrm & 7);
3103
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3104
                            offsetof(CPUX86State,fpregs[reg].mmx));
3105
            }
3106
            break;
3107
        case 0x011: /* movups */
3108
        case 0x111: /* movupd */
3109
        case 0x029: /* movaps */
3110
        case 0x129: /* movapd */
3111
        case 0x17f: /* movdqa ea, xmm */
3112
        case 0x27f: /* movdqu ea, xmm */
3113
            if (mod != 3) {
3114
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3115
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3116
            } else {
3117
                rm = (modrm & 7) | REX_B(s);
3118
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3119
                            offsetof(CPUX86State,xmm_regs[reg]));
3120
            }
3121
            break;
3122
        case 0x211: /* movss ea, xmm */
3123
            if (mod != 3) {
3124
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3125
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3126
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3127
            } else {
3128
                rm = (modrm & 7) | REX_B(s);
3129
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3130
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3131
            }
3132
            break;
3133
        case 0x311: /* movsd ea, xmm */
3134
            if (mod != 3) {
3135
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3136
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3137
            } else {
3138
                rm = (modrm & 7) | REX_B(s);
3139
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3140
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3141
            }
3142
            break;
3143
        case 0x013: /* movlps */
3144
        case 0x113: /* movlpd */
3145
            if (mod != 3) {
3146
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3147
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3148
            } else {
3149
                goto illegal_op;
3150
            }
3151
            break;
3152
        case 0x017: /* movhps */
3153
        case 0x117: /* movhpd */
3154
            if (mod != 3) {
3155
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3156
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3157
            } else {
3158
                goto illegal_op;
3159
            }
3160
            break;
3161
        case 0x71: /* shift mm, im */
3162
        case 0x72:
3163
        case 0x73:
3164
        case 0x171: /* shift xmm, im */
3165
        case 0x172:
3166
        case 0x173:
3167
            val = ldub_code(s->pc++);
3168
            if (is_xmm) {
3169
                gen_op_movl_T0_im(val);
3170
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3171
                gen_op_movl_T0_0();
3172
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3173
                op1_offset = offsetof(CPUX86State,xmm_t0);
3174
            } else {
3175
                gen_op_movl_T0_im(val);
3176
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3177
                gen_op_movl_T0_0();
3178
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3179
                op1_offset = offsetof(CPUX86State,mmx_t0);
3180
            }
3181
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3182
            if (!sse_op2)
3183
                goto illegal_op;
3184
            if (is_xmm) {
3185
                rm = (modrm & 7) | REX_B(s);
3186
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3187
            } else {
3188
                rm = (modrm & 7);
3189
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3190
            }
3191
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3192
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3193
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3194
            break;
3195
        case 0x050: /* movmskps */
3196
            rm = (modrm & 7) | REX_B(s);
3197
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3198
                             offsetof(CPUX86State,xmm_regs[rm]));
3199
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3200
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3201
            gen_op_mov_reg_T0(OT_LONG, reg);
3202
            break;
3203
        case 0x150: /* movmskpd */
3204
            rm = (modrm & 7) | REX_B(s);
3205
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3206
                             offsetof(CPUX86State,xmm_regs[rm]));
3207
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3208
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3209
            gen_op_mov_reg_T0(OT_LONG, reg);
3210
            break;
3211
        case 0x02a: /* cvtpi2ps */
3212
        case 0x12a: /* cvtpi2pd */
3213
            tcg_gen_helper_0_0(helper_enter_mmx);
3214
            if (mod != 3) {
3215
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3216
                op2_offset = offsetof(CPUX86State,mmx_t0);
3217
                gen_ldq_env_A0(s->mem_index, op2_offset);
3218
            } else {
3219
                rm = (modrm & 7);
3220
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3221
            }
3222
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3223
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3224
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3225
            switch(b >> 8) {
3226
            case 0x0:
3227
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3228
                break;
3229
            default:
3230
            case 0x1:
3231
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3232
                break;
3233
            }
3234
            break;
3235
        case 0x22a: /* cvtsi2ss */
3236
        case 0x32a: /* cvtsi2sd */
3237
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3238
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3239
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3240
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3241
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3242
            tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3243
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3244
            break;
3245
        case 0x02c: /* cvttps2pi */
3246
        case 0x12c: /* cvttpd2pi */
3247
        case 0x02d: /* cvtps2pi */
3248
        case 0x12d: /* cvtpd2pi */
3249
            tcg_gen_helper_0_0(helper_enter_mmx);
3250
            if (mod != 3) {
3251
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3252
                op2_offset = offsetof(CPUX86State,xmm_t0);
3253
                gen_ldo_env_A0(s->mem_index, op2_offset);
3254
            } else {
3255
                rm = (modrm & 7) | REX_B(s);
3256
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3257
            }
3258
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3259
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3260
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3261
            switch(b) {
3262
            case 0x02c:
3263
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3264
                break;
3265
            case 0x12c:
3266
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3267
                break;
3268
            case 0x02d:
3269
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3270
                break;
3271
            case 0x12d:
3272
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3273
                break;
3274
            }
3275
            break;
3276
        case 0x22c: /* cvttss2si */
3277
        case 0x32c: /* cvttsd2si */
3278
        case 0x22d: /* cvtss2si */
3279
        case 0x32d: /* cvtsd2si */
3280
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3281
            if (mod != 3) {
3282
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3283
                if ((b >> 8) & 1) {
3284
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3285
                } else {
3286
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3287
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3288
                }
3289
                op2_offset = offsetof(CPUX86State,xmm_t0);
3290
            } else {
3291
                rm = (modrm & 7) | REX_B(s);
3292
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3293
            }
3294
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3295
                                    (b & 1) * 4];
3296
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3297
            if (ot == OT_LONG) {
3298
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3299
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3300
            } else {
3301
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3302
            }
3303
            gen_op_mov_reg_T0(ot, reg);
3304
            break;
3305
        case 0xc4: /* pinsrw */
3306
        case 0x1c4:
3307
            s->rip_offset = 1;
3308
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3309
            val = ldub_code(s->pc++);
3310
            if (b1) {
3311
                val &= 7;
3312
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3313
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3314
            } else {
3315
                val &= 3;
3316
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3317
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3318
            }
3319
            break;
3320
        case 0xc5: /* pextrw */
3321
        case 0x1c5:
3322
            if (mod != 3)
3323
                goto illegal_op;
3324
            val = ldub_code(s->pc++);
3325
            if (b1) {
3326
                val &= 7;
3327
                rm = (modrm & 7) | REX_B(s);
3328
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3329
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3330
            } else {
3331
                val &= 3;
3332
                rm = (modrm & 7);
3333
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3334
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3335
            }
3336
            reg = ((modrm >> 3) & 7) | rex_r;
3337
            gen_op_mov_reg_T0(OT_LONG, reg);
3338
            break;
3339
        case 0x1d6: /* movq ea, xmm */
3340
            if (mod != 3) {
3341
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3342
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3343
            } else {
3344
                rm = (modrm & 7) | REX_B(s);
3345
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3346
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3347
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3348
            }
3349
            break;
3350
        case 0x2d6: /* movq2dq */
3351
            tcg_gen_helper_0_0(helper_enter_mmx);
3352
            rm = (modrm & 7);
3353
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3354
                        offsetof(CPUX86State,fpregs[rm].mmx));
3355
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3356
            break;
3357
        case 0x3d6: /* movdq2q */
3358
            tcg_gen_helper_0_0(helper_enter_mmx);
3359
            rm = (modrm & 7) | REX_B(s);
3360
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3361
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3362
            break;
3363
        case 0xd7: /* pmovmskb */
3364
        case 0x1d7:
3365
            if (mod != 3)
3366
                goto illegal_op;
3367
            if (b1) {
3368
                rm = (modrm & 7) | REX_B(s);
3369
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3370
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3371
            } else {
3372
                rm = (modrm & 7);
3373
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3374
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3375
            }
3376
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3377
            reg = ((modrm >> 3) & 7) | rex_r;
3378
            gen_op_mov_reg_T0(OT_LONG, reg);
3379
            break;
3380
        default:
3381
            goto illegal_op;
3382
        }
3383
    } else {
3384
        /* generic MMX or SSE operation */
3385
        switch(b) {
3386
        case 0x70: /* pshufx insn */
3387
        case 0xc6: /* pshufx insn */
3388
        case 0xc2: /* compare insns */
3389
            s->rip_offset = 1;
3390
            break;
3391
        default:
3392
            break;
3393
        }
3394
        if (is_xmm) {
3395
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3396
            if (mod != 3) {
3397
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3398
                op2_offset = offsetof(CPUX86State,xmm_t0);
3399
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3400
                                b == 0xc2)) {
3401
                    /* specific case for SSE single instructions */
3402
                    if (b1 == 2) {
3403
                        /* 32 bit access */
3404
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3405
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3406
                    } else {
3407
                        /* 64 bit access */
3408
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3409
                    }
3410
                } else {
3411
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3412
                }
3413
            } else {
3414
                rm = (modrm & 7) | REX_B(s);
3415
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3416
            }
3417
        } else {
3418
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3419
            if (mod != 3) {
3420
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3421
                op2_offset = offsetof(CPUX86State,mmx_t0);
3422
                gen_ldq_env_A0(s->mem_index, op2_offset);
3423
            } else {
3424
                rm = (modrm & 7);
3425
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3426
            }
3427
        }
3428
        switch(b) {
3429
        case 0x0f: /* 3DNow! data insns */
3430
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3431
                goto illegal_op;
3432
            val = ldub_code(s->pc++);
3433
            sse_op2 = sse_op_table5[val];
3434
            if (!sse_op2)
3435
                goto illegal_op;
3436
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3437
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3438
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3439
            break;
3440
        case 0x70: /* pshufx insn */
3441
        case 0xc6: /* pshufx insn */
3442
            val = ldub_code(s->pc++);
3443
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3444
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3445
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3446
            break;
3447
        case 0xc2:
3448
            /* compare insns */
3449
            val = ldub_code(s->pc++);
3450
            if (val >= 8)
3451
                goto illegal_op;
3452
            sse_op2 = sse_op_table4[val][b1];
3453
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3454
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3455
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3456
            break;
3457
        case 0xf7:
3458
            /* maskmov : we must prepare A0 */
3459
            if (mod != 3)
3460
                goto illegal_op;
3461
#ifdef TARGET_X86_64
3462
            if (s->aflag == 2) {
3463
                gen_op_movq_A0_reg(R_EDI);
3464
            } else
3465
#endif
3466
            {
3467
                gen_op_movl_A0_reg(R_EDI);
3468
                if (s->aflag == 0)
3469
                    gen_op_andl_A0_ffff();
3470
            }
3471
            gen_add_A0_ds_seg(s);
3472

    
3473
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3474
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3475
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3476
            break;
3477
        default:
3478
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3479
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3480
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3481
            break;
3482
        }
3483
        if (b == 0x2e || b == 0x2f) {
3484
            /* just to keep the EFLAGS optimization correct */
3485
            gen_op_com_dummy();
3486
            s->cc_op = CC_OP_EFLAGS;
3487
        }
3488
    }
3489
}
3490

    
3491
/* convert one instruction. s->is_jmp is set if the translation must
3492
   be stopped. Return the next pc value */
3493
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3494
{
3495
    int b, prefixes, aflag, dflag;
3496
    int shift, ot;
3497
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3498
    target_ulong next_eip, tval;
3499
    int rex_w, rex_r;
3500

    
3501
    s->pc = pc_start;
3502
    prefixes = 0;
3503
    aflag = s->code32;
3504
    dflag = s->code32;
3505
    s->override = -1;
3506
    rex_w = -1;
3507
    rex_r = 0;
3508
#ifdef TARGET_X86_64
3509
    s->rex_x = 0;
3510
    s->rex_b = 0;
3511
    x86_64_hregs = 0;
3512
#endif
3513
    s->rip_offset = 0; /* for relative ip address */
3514
 next_byte:
3515
    b = ldub_code(s->pc);
3516
    s->pc++;
3517
    /* check prefixes */
3518
#ifdef TARGET_X86_64
3519
    if (CODE64(s)) {
3520
        switch (b) {
3521
        case 0xf3:
3522
            prefixes |= PREFIX_REPZ;
3523
            goto next_byte;
3524
        case 0xf2:
3525
            prefixes |= PREFIX_REPNZ;
3526
            goto next_byte;
3527
        case 0xf0:
3528
            prefixes |= PREFIX_LOCK;
3529
            goto next_byte;
3530
        case 0x2e:
3531
            s->override = R_CS;
3532
            goto next_byte;
3533
        case 0x36:
3534
            s->override = R_SS;
3535
            goto next_byte;
3536
        case 0x3e:
3537
            s->override = R_DS;
3538
            goto next_byte;
3539
        case 0x26:
3540
            s->override = R_ES;
3541
            goto next_byte;
3542
        case 0x64:
3543
            s->override = R_FS;
3544
            goto next_byte;
3545
        case 0x65:
3546
            s->override = R_GS;
3547
            goto next_byte;
3548
        case 0x66:
3549
            prefixes |= PREFIX_DATA;
3550
            goto next_byte;
3551
        case 0x67:
3552
            prefixes |= PREFIX_ADR;
3553
            goto next_byte;
3554
        case 0x40 ... 0x4f:
3555
            /* REX prefix */
3556
            rex_w = (b >> 3) & 1;
3557
            rex_r = (b & 0x4) << 1;
3558
            s->rex_x = (b & 0x2) << 2;
3559
            REX_B(s) = (b & 0x1) << 3;
3560
            x86_64_hregs = 1; /* select uniform byte register addressing */
3561
            goto next_byte;
3562
        }
3563
        if (rex_w == 1) {
3564
            /* 0x66 is ignored if rex.w is set */
3565
            dflag = 2;
3566
        } else {
3567
            if (prefixes & PREFIX_DATA)
3568
                dflag ^= 1;
3569
        }
3570
        if (!(prefixes & PREFIX_ADR))
3571
            aflag = 2;
3572
    } else
3573
#endif
3574
    {
3575
        switch (b) {
3576
        case 0xf3:
3577
            prefixes |= PREFIX_REPZ;
3578
            goto next_byte;
3579
        case 0xf2:
3580
            prefixes |= PREFIX_REPNZ;
3581
            goto next_byte;
3582
        case 0xf0:
3583
            prefixes |= PREFIX_LOCK;
3584
            goto next_byte;
3585
        case 0x2e:
3586
            s->override = R_CS;
3587
            goto next_byte;
3588
        case 0x36:
3589
            s->override = R_SS;
3590
            goto next_byte;
3591
        case 0x3e:
3592
            s->override = R_DS;
3593
            goto next_byte;
3594
        case 0x26:
3595
            s->override = R_ES;
3596
            goto next_byte;
3597
        case 0x64:
3598
            s->override = R_FS;
3599
            goto next_byte;
3600
        case 0x65:
3601
            s->override = R_GS;
3602
            goto next_byte;
3603
        case 0x66:
3604
            prefixes |= PREFIX_DATA;
3605
            goto next_byte;
3606
        case 0x67:
3607
            prefixes |= PREFIX_ADR;
3608
            goto next_byte;
3609
        }
3610
        if (prefixes & PREFIX_DATA)
3611
            dflag ^= 1;
3612
        if (prefixes & PREFIX_ADR)
3613
            aflag ^= 1;
3614
    }
3615

    
3616
    s->prefix = prefixes;
3617
    s->aflag = aflag;
3618
    s->dflag = dflag;
3619

    
3620
    /* lock generation */
3621
    if (prefixes & PREFIX_LOCK)
3622
        tcg_gen_helper_0_0(helper_lock);
3623

    
3624
    /* now check op code */
3625
 reswitch:
3626
    switch(b) {
3627
    case 0x0f:
3628
        /**************************/
3629
        /* extended op code */
3630
        b = ldub_code(s->pc++) | 0x100;
3631
        goto reswitch;
3632

    
3633
        /**************************/
3634
        /* arith & logic */
3635
    case 0x00 ... 0x05:
3636
    case 0x08 ... 0x0d:
3637
    case 0x10 ... 0x15:
3638
    case 0x18 ... 0x1d:
3639
    case 0x20 ... 0x25:
3640
    case 0x28 ... 0x2d:
3641
    case 0x30 ... 0x35:
3642
    case 0x38 ... 0x3d:
3643
        {
3644
            int op, f, val;
3645
            op = (b >> 3) & 7;
3646
            f = (b >> 1) & 3;
3647

    
3648
            if ((b & 1) == 0)
3649
                ot = OT_BYTE;
3650
            else
3651
                ot = dflag + OT_WORD;
3652

    
3653
            switch(f) {
3654
            case 0: /* OP Ev, Gv */
3655
                modrm = ldub_code(s->pc++);
3656
                reg = ((modrm >> 3) & 7) | rex_r;
3657
                mod = (modrm >> 6) & 3;
3658
                rm = (modrm & 7) | REX_B(s);
3659
                if (mod != 3) {
3660
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3661
                    opreg = OR_TMP0;
3662
                } else if (op == OP_XORL && rm == reg) {
3663
                xor_zero:
3664
                    /* xor reg, reg optimisation */
3665
                    gen_op_movl_T0_0();
3666
                    s->cc_op = CC_OP_LOGICB + ot;
3667
                    gen_op_mov_reg_T0(ot, reg);
3668
                    gen_op_update1_cc();
3669
                    break;
3670
                } else {
3671
                    opreg = rm;
3672
                }
3673
                gen_op_mov_TN_reg(ot, 1, reg);
3674
                gen_op(s, op, ot, opreg);
3675
                break;
3676
            case 1: /* OP Gv, Ev */
3677
                modrm = ldub_code(s->pc++);
3678
                mod = (modrm >> 6) & 3;
3679
                reg = ((modrm >> 3) & 7) | rex_r;
3680
                rm = (modrm & 7) | REX_B(s);
3681
                if (mod != 3) {
3682
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3683
                    gen_op_ld_T1_A0(ot + s->mem_index);
3684
                } else if (op == OP_XORL && rm == reg) {
3685
                    goto xor_zero;
3686
                } else {
3687
                    gen_op_mov_TN_reg(ot, 1, rm);
3688
                }
3689
                gen_op(s, op, ot, reg);
3690
                break;
3691
            case 2: /* OP A, Iv */
3692
                val = insn_get(s, ot);
3693
                gen_op_movl_T1_im(val);
3694
                gen_op(s, op, ot, OR_EAX);
3695
                break;
3696
            }
3697
        }
3698
        break;
3699

    
3700
    case 0x80: /* GRP1 */
3701
    case 0x81:
3702
    case 0x82:
3703
    case 0x83:
3704
        {
3705
            int val;
3706

    
3707
            if ((b & 1) == 0)
3708
                ot = OT_BYTE;
3709
            else
3710
                ot = dflag + OT_WORD;
3711

    
3712
            modrm = ldub_code(s->pc++);
3713
            mod = (modrm >> 6) & 3;
3714
            rm = (modrm & 7) | REX_B(s);
3715
            op = (modrm >> 3) & 7;
3716

    
3717
            if (mod != 3) {
3718
                if (b == 0x83)
3719
                    s->rip_offset = 1;
3720
                else
3721
                    s->rip_offset = insn_const_size(ot);
3722
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3723
                opreg = OR_TMP0;
3724
            } else {
3725
                opreg = rm;
3726
            }
3727

    
3728
            switch(b) {
3729
            default:
3730
            case 0x80:
3731
            case 0x81:
3732
            case 0x82:
3733
                val = insn_get(s, ot);
3734
                break;
3735
            case 0x83:
3736
                val = (int8_t)insn_get(s, OT_BYTE);
3737
                break;
3738
            }
3739
            gen_op_movl_T1_im(val);
3740
            gen_op(s, op, ot, opreg);
3741
        }
3742
        break;
3743

    
3744
        /**************************/
3745
        /* inc, dec, and other misc arith */
3746
    case 0x40 ... 0x47: /* inc Gv */
3747
        ot = dflag ? OT_LONG : OT_WORD;
3748
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3749
        break;
3750
    case 0x48 ... 0x4f: /* dec Gv */
3751
        ot = dflag ? OT_LONG : OT_WORD;
3752
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3753
        break;
3754
    case 0xf6: /* GRP3 */
3755
    case 0xf7:
3756
        if ((b & 1) == 0)
3757
            ot = OT_BYTE;
3758
        else
3759
            ot = dflag + OT_WORD;
3760

    
3761
        modrm = ldub_code(s->pc++);
3762
        mod = (modrm >> 6) & 3;
3763
        rm = (modrm & 7) | REX_B(s);
3764
        op = (modrm >> 3) & 7;
3765
        if (mod != 3) {
3766
            if (op == 0)
3767
                s->rip_offset = insn_const_size(ot);
3768
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3769
            gen_op_ld_T0_A0(ot + s->mem_index);
3770
        } else {
3771
            gen_op_mov_TN_reg(ot, 0, rm);
3772
        }
3773

    
3774
        switch(op) {
3775
        case 0: /* test */
3776
            val = insn_get(s, ot);
3777
            gen_op_movl_T1_im(val);
3778
            gen_op_testl_T0_T1_cc();
3779
            s->cc_op = CC_OP_LOGICB + ot;
3780
            break;
3781
        case 2: /* not */
3782
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3783
            if (mod != 3) {
3784
                gen_op_st_T0_A0(ot + s->mem_index);
3785
            } else {
3786
                gen_op_mov_reg_T0(ot, rm);
3787
            }
3788
            break;
3789
        case 3: /* neg */
3790
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3791
            if (mod != 3) {
3792
                gen_op_st_T0_A0(ot + s->mem_index);
3793
            } else {
3794
                gen_op_mov_reg_T0(ot, rm);
3795
            }
3796
            gen_op_update_neg_cc();
3797
            s->cc_op = CC_OP_SUBB + ot;
3798
            break;
3799
        case 4: /* mul */
3800
            switch(ot) {
3801
            case OT_BYTE:
3802
                gen_op_mulb_AL_T0();
3803
                s->cc_op = CC_OP_MULB;
3804
                break;
3805
            case OT_WORD:
3806
                gen_op_mulw_AX_T0();
3807
                s->cc_op = CC_OP_MULW;
3808
                break;
3809
            default:
3810
            case OT_LONG:
3811
                gen_op_mull_EAX_T0();
3812
                s->cc_op = CC_OP_MULL;
3813
                break;
3814
#ifdef TARGET_X86_64
3815
            case OT_QUAD:
3816
                gen_op_mulq_EAX_T0();
3817
                s->cc_op = CC_OP_MULQ;
3818
                break;
3819
#endif
3820
            }
3821
            break;
3822
        case 5: /* imul */
3823
            switch(ot) {
3824
            case OT_BYTE:
3825
                gen_op_imulb_AL_T0();
3826
                s->cc_op = CC_OP_MULB;
3827
                break;
3828
            case OT_WORD:
3829
                gen_op_imulw_AX_T0();
3830
                s->cc_op = CC_OP_MULW;
3831
                break;
3832
            default:
3833
            case OT_LONG:
3834
                gen_op_imull_EAX_T0();
3835
                s->cc_op = CC_OP_MULL;
3836
                break;
3837
#ifdef TARGET_X86_64
3838
            case OT_QUAD:
3839
                gen_op_imulq_EAX_T0();
3840
                s->cc_op = CC_OP_MULQ;
3841
                break;
3842
#endif
3843
            }
3844
            break;
3845
        case 6: /* div */
3846
            switch(ot) {
3847
            case OT_BYTE:
3848
                gen_jmp_im(pc_start - s->cs_base);
3849
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3850
                break;
3851
            case OT_WORD:
3852
                gen_jmp_im(pc_start - s->cs_base);
3853
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3854
                break;
3855
            default:
3856
            case OT_LONG:
3857
                gen_jmp_im(pc_start - s->cs_base);
3858
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3859
                break;
3860
#ifdef TARGET_X86_64
3861
            case OT_QUAD:
3862
                gen_jmp_im(pc_start - s->cs_base);
3863
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3864
                break;
3865
#endif
3866
            }
3867
            break;
3868
        case 7: /* idiv */
3869
            switch(ot) {
3870
            case OT_BYTE:
3871
                gen_jmp_im(pc_start - s->cs_base);
3872
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3873
                break;
3874
            case OT_WORD:
3875
                gen_jmp_im(pc_start - s->cs_base);
3876
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3877
                break;
3878
            default:
3879
            case OT_LONG:
3880
                gen_jmp_im(pc_start - s->cs_base);
3881
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3882
                break;
3883
#ifdef TARGET_X86_64
3884
            case OT_QUAD:
3885
                gen_jmp_im(pc_start - s->cs_base);
3886
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3887
                break;
3888
#endif
3889
            }
3890
            break;
3891
        default:
3892
            goto illegal_op;
3893
        }
3894
        break;
3895

    
3896
    case 0xfe: /* GRP4 */
3897
    case 0xff: /* GRP5 */
3898
        if ((b & 1) == 0)
3899
            ot = OT_BYTE;
3900
        else
3901
            ot = dflag + OT_WORD;
3902

    
3903
        modrm = ldub_code(s->pc++);
3904
        mod = (modrm >> 6) & 3;
3905
        rm = (modrm & 7) | REX_B(s);
3906
        op = (modrm >> 3) & 7;
3907
        if (op >= 2 && b == 0xfe) {
3908
            goto illegal_op;
3909
        }
3910
        if (CODE64(s)) {
3911
            if (op == 2 || op == 4) {
3912
                /* operand size for jumps is 64 bit */
3913
                ot = OT_QUAD;
3914
            } else if (op == 3 || op == 5) {
3915
                /* for call calls, the operand is 16 or 32 bit, even
3916
                   in long mode */
3917
                ot = dflag ? OT_LONG : OT_WORD;
3918
            } else if (op == 6) {
3919
                /* default push size is 64 bit */
3920
                ot = dflag ? OT_QUAD : OT_WORD;
3921
            }
3922
        }
3923
        if (mod != 3) {
3924
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3925
            if (op >= 2 && op != 3 && op != 5)
3926
                gen_op_ld_T0_A0(ot + s->mem_index);
3927
        } else {
3928
            gen_op_mov_TN_reg(ot, 0, rm);
3929
        }
3930

    
3931
        switch(op) {
3932
        case 0: /* inc Ev */
3933
            if (mod != 3)
3934
                opreg = OR_TMP0;
3935
            else
3936
                opreg = rm;
3937
            gen_inc(s, ot, opreg, 1);
3938
            break;
3939
        case 1: /* dec Ev */
3940
            if (mod != 3)
3941
                opreg = OR_TMP0;
3942
            else
3943
                opreg = rm;
3944
            gen_inc(s, ot, opreg, -1);
3945
            break;
3946
        case 2: /* call Ev */
3947
            /* XXX: optimize if memory (no 'and' is necessary) */
3948
            if (s->dflag == 0)
3949
                gen_op_andl_T0_ffff();
3950
            next_eip = s->pc - s->cs_base;
3951
            gen_movtl_T1_im(next_eip);
3952
            gen_push_T1(s);
3953
            gen_op_jmp_T0();
3954
            gen_eob(s);
3955
            break;
3956
        case 3: /* lcall Ev */
3957
            gen_op_ld_T1_A0(ot + s->mem_index);
3958
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3959
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3960
        do_lcall:
3961
            if (s->pe && !s->vm86) {
3962
                if (s->cc_op != CC_OP_DYNAMIC)
3963
                    gen_op_set_cc_op(s->cc_op);
3964
                gen_jmp_im(pc_start - s->cs_base);
3965
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3966
                tcg_gen_helper_0_4(helper_lcall_protected,
3967
                                   cpu_tmp2_i32, cpu_T[1],
3968
                                   tcg_const_i32(dflag), 
3969
                                   tcg_const_i32(s->pc - pc_start));
3970
            } else {
3971
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3972
                tcg_gen_helper_0_4(helper_lcall_real,
3973
                                   cpu_tmp2_i32, cpu_T[1],
3974
                                   tcg_const_i32(dflag), 
3975
                                   tcg_const_i32(s->pc - s->cs_base));
3976
            }
3977
            gen_eob(s);
3978
            break;
3979
        case 4: /* jmp Ev */
3980
            if (s->dflag == 0)
3981
                gen_op_andl_T0_ffff();
3982
            gen_op_jmp_T0();
3983
            gen_eob(s);
3984
            break;
3985
        case 5: /* ljmp Ev */
3986
            gen_op_ld_T1_A0(ot + s->mem_index);
3987
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3988
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3989
        do_ljmp:
3990
            if (s->pe && !s->vm86) {
3991
                if (s->cc_op != CC_OP_DYNAMIC)
3992
                    gen_op_set_cc_op(s->cc_op);
3993
                gen_jmp_im(pc_start - s->cs_base);
3994
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3995
                tcg_gen_helper_0_3(helper_ljmp_protected,
3996
                                   cpu_tmp2_i32,
3997
                                   cpu_T[1],
3998
                                   tcg_const_i32(s->pc - pc_start));
3999
            } else {
4000
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4001
                gen_op_movl_T0_T1();
4002
                gen_op_jmp_T0();
4003
            }
4004
            gen_eob(s);
4005
            break;
4006
        case 6: /* push Ev */
4007
            gen_push_T0(s);
4008
            break;
4009
        default:
4010
            goto illegal_op;
4011
        }
4012
        break;
4013

    
4014
    case 0x84: /* test Ev, Gv */
4015
    case 0x85:
4016
        if ((b & 1) == 0)
4017
            ot = OT_BYTE;
4018
        else
4019
            ot = dflag + OT_WORD;
4020

    
4021
        modrm = ldub_code(s->pc++);
4022
        mod = (modrm >> 6) & 3;
4023
        rm = (modrm & 7) | REX_B(s);
4024
        reg = ((modrm >> 3) & 7) | rex_r;
4025

    
4026
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4027
        gen_op_mov_TN_reg(ot, 1, reg);
4028
        gen_op_testl_T0_T1_cc();
4029
        s->cc_op = CC_OP_LOGICB + ot;
4030
        break;
4031

    
4032
    case 0xa8: /* test eAX, Iv */
4033
    case 0xa9:
4034
        if ((b & 1) == 0)
4035
            ot = OT_BYTE;
4036
        else
4037
            ot = dflag + OT_WORD;
4038
        val = insn_get(s, ot);
4039

    
4040
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4041
        gen_op_movl_T1_im(val);
4042
        gen_op_testl_T0_T1_cc();
4043
        s->cc_op = CC_OP_LOGICB + ot;
4044
        break;
4045

    
4046
    case 0x98: /* CWDE/CBW */
4047
#ifdef TARGET_X86_64
4048
        if (dflag == 2) {
4049
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4050
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4051
            gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4052
        } else
4053
#endif
4054
        if (dflag == 1) {
4055
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4056
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4057
            gen_op_mov_reg_T0(OT_LONG, R_EAX);
4058
        } else {
4059
            gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4060
            tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4061
            gen_op_mov_reg_T0(OT_WORD, R_EAX);
4062
        }
4063
        break;
4064
    case 0x99: /* CDQ/CWD */
4065
#ifdef TARGET_X86_64
4066
        if (dflag == 2) {
4067
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4068
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4069
            gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4070
        } else
4071
#endif
4072
        if (dflag == 1) {
4073
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4074
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4075
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4076
            gen_op_mov_reg_T0(OT_LONG, R_EDX);
4077
        } else {
4078
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4079
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4080
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4081
            gen_op_mov_reg_T0(OT_WORD, R_EDX);
4082
        }
4083
        break;
4084
    case 0x1af: /* imul Gv, Ev */
4085
    case 0x69: /* imul Gv, Ev, I */
4086
    case 0x6b:
4087
        ot = dflag + OT_WORD;
4088
        modrm = ldub_code(s->pc++);
4089
        reg = ((modrm >> 3) & 7) | rex_r;
4090
        if (b == 0x69)
4091
            s->rip_offset = insn_const_size(ot);
4092
        else if (b == 0x6b)
4093
            s->rip_offset = 1;
4094
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4095
        if (b == 0x69) {
4096
            val = insn_get(s, ot);
4097
            gen_op_movl_T1_im(val);
4098
        } else if (b == 0x6b) {
4099
            val = (int8_t)insn_get(s, OT_BYTE);
4100
            gen_op_movl_T1_im(val);
4101
        } else {
4102
            gen_op_mov_TN_reg(ot, 1, reg);
4103
        }
4104

    
4105
#ifdef TARGET_X86_64
4106
        if (ot == OT_QUAD) {
4107
            gen_op_imulq_T0_T1();
4108
        } else
4109
#endif
4110
        if (ot == OT_LONG) {
4111
            gen_op_imull_T0_T1();
4112
        } else {
4113
            gen_op_imulw_T0_T1();
4114
        }
4115
        gen_op_mov_reg_T0(ot, reg);
4116
        s->cc_op = CC_OP_MULB + ot;
4117
        break;
4118
    case 0x1c0:
4119
    case 0x1c1: /* xadd Ev, Gv */
4120
        if ((b & 1) == 0)
4121
            ot = OT_BYTE;
4122
        else
4123
            ot = dflag + OT_WORD;
4124
        modrm = ldub_code(s->pc++);
4125
        reg = ((modrm >> 3) & 7) | rex_r;
4126
        mod = (modrm >> 6) & 3;
4127
        if (mod == 3) {
4128
            rm = (modrm & 7) | REX_B(s);
4129
            gen_op_mov_TN_reg(ot, 0, reg);
4130
            gen_op_mov_TN_reg(ot, 1, rm);
4131
            gen_op_addl_T0_T1();
4132
            gen_op_mov_reg_T1(ot, reg);
4133
            gen_op_mov_reg_T0(ot, rm);
4134
        } else {
4135
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4136
            gen_op_mov_TN_reg(ot, 0, reg);
4137
            gen_op_ld_T1_A0(ot + s->mem_index);
4138
            gen_op_addl_T0_T1();
4139
            gen_op_st_T0_A0(ot + s->mem_index);
4140
            gen_op_mov_reg_T1(ot, reg);
4141
        }
4142
        gen_op_update2_cc();
4143
        s->cc_op = CC_OP_ADDB + ot;
4144
        break;
4145
    case 0x1b0:
4146
    case 0x1b1: /* cmpxchg Ev, Gv */
4147
        {
4148
            int label1;
4149

    
4150
            if ((b & 1) == 0)
4151
                ot = OT_BYTE;
4152
            else
4153
                ot = dflag + OT_WORD;
4154
            modrm = ldub_code(s->pc++);
4155
            reg = ((modrm >> 3) & 7) | rex_r;
4156
            mod = (modrm >> 6) & 3;
4157
            gen_op_mov_TN_reg(ot, 1, reg);
4158
            if (mod == 3) {
4159
                rm = (modrm & 7) | REX_B(s);
4160
                gen_op_mov_TN_reg(ot, 0, rm);
4161
            } else {
4162
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4163
                gen_op_ld_T0_A0(ot + s->mem_index);
4164
                rm = 0; /* avoid warning */
4165
            }
4166
            label1 = gen_new_label();
4167
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4168
            tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4169
            gen_extu(ot, cpu_T3);
4170
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4171
            tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4172
            gen_op_mov_reg_T0(ot, R_EAX);
4173
            gen_set_label(label1);
4174
            if (mod == 3) {
4175
                gen_op_mov_reg_T1(ot, rm);
4176
            } else {
4177
                gen_op_st_T1_A0(ot + s->mem_index);
4178
            }
4179
            tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4180
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4181
            s->cc_op = CC_OP_SUBB + ot;
4182
        }
4183
        break;
4184
    case 0x1c7: /* cmpxchg8b */
4185
        modrm = ldub_code(s->pc++);
4186
        mod = (modrm >> 6) & 3;
4187
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4188
            goto illegal_op;
4189
        gen_jmp_im(pc_start - s->cs_base);
4190
        if (s->cc_op != CC_OP_DYNAMIC)
4191
            gen_op_set_cc_op(s->cc_op);
4192
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4193
        gen_op_cmpxchg8b();
4194
        s->cc_op = CC_OP_EFLAGS;
4195
        break;
4196

    
4197
        /**************************/
4198
        /* push/pop */
4199
    case 0x50 ... 0x57: /* push */
4200
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4201
        gen_push_T0(s);
4202
        break;
4203
    case 0x58 ... 0x5f: /* pop */
4204
        if (CODE64(s)) {
4205
            ot = dflag ? OT_QUAD : OT_WORD;
4206
        } else {
4207
            ot = dflag + OT_WORD;
4208
        }
4209
        gen_pop_T0(s);
4210
        /* NOTE: order is important for pop %sp */
4211
        gen_pop_update(s);
4212
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4213
        break;
4214
    case 0x60: /* pusha */
4215
        if (CODE64(s))
4216
            goto illegal_op;
4217
        gen_pusha(s);
4218
        break;
4219
    case 0x61: /* popa */
4220
        if (CODE64(s))
4221
            goto illegal_op;
4222
        gen_popa(s);
4223
        break;
4224
    case 0x68: /* push Iv */
4225
    case 0x6a:
4226
        if (CODE64(s)) {
4227
            ot = dflag ? OT_QUAD : OT_WORD;
4228
        } else {
4229
            ot = dflag + OT_WORD;
4230
        }
4231
        if (b == 0x68)
4232
            val = insn_get(s, ot);
4233
        else
4234
            val = (int8_t)insn_get(s, OT_BYTE);
4235
        gen_op_movl_T0_im(val);
4236
        gen_push_T0(s);
4237
        break;
4238
    case 0x8f: /* pop Ev */
4239
        if (CODE64(s)) {
4240
            ot = dflag ? OT_QUAD : OT_WORD;
4241
        } else {
4242
            ot = dflag + OT_WORD;
4243
        }
4244
        modrm = ldub_code(s->pc++);
4245
        mod = (modrm >> 6) & 3;
4246
        gen_pop_T0(s);
4247
        if (mod == 3) {
4248
            /* NOTE: order is important for pop %sp */
4249
            gen_pop_update(s);
4250
            rm = (modrm & 7) | REX_B(s);
4251
            gen_op_mov_reg_T0(ot, rm);
4252
        } else {
4253
            /* NOTE: order is important too for MMU exceptions */
4254
            s->popl_esp_hack = 1 << ot;
4255
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4256
            s->popl_esp_hack = 0;
4257
            gen_pop_update(s);
4258
        }
4259
        break;
4260
    case 0xc8: /* enter */
4261
        {
4262
            int level;
4263
            val = lduw_code(s->pc);
4264
            s->pc += 2;
4265
            level = ldub_code(s->pc++);
4266
            gen_enter(s, val, level);
4267
        }
4268
        break;
4269
    case 0xc9: /* leave */
4270
        /* XXX: exception not precise (ESP is updated before potential exception) */
4271
        if (CODE64(s)) {
4272
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4273
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4274
        } else if (s->ss32) {
4275
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4276
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4277
        } else {
4278
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4279
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4280
        }
4281
        gen_pop_T0(s);
4282
        if (CODE64(s)) {
4283
            ot = dflag ? OT_QUAD : OT_WORD;
4284
        } else {
4285
            ot = dflag + OT_WORD;
4286
        }
4287
        gen_op_mov_reg_T0(ot, R_EBP);
4288
        gen_pop_update(s);
4289
        break;
4290
    case 0x06: /* push es */
4291
    case 0x0e: /* push cs */
4292
    case 0x16: /* push ss */
4293
    case 0x1e: /* push ds */
4294
        if (CODE64(s))
4295
            goto illegal_op;
4296
        gen_op_movl_T0_seg(b >> 3);
4297
        gen_push_T0(s);
4298
        break;
4299
    case 0x1a0: /* push fs */
4300
    case 0x1a8: /* push gs */
4301
        gen_op_movl_T0_seg((b >> 3) & 7);
4302
        gen_push_T0(s);
4303
        break;
4304
    case 0x07: /* pop es */
4305
    case 0x17: /* pop ss */
4306
    case 0x1f: /* pop ds */
4307
        if (CODE64(s))
4308
            goto illegal_op;
4309
        reg = b >> 3;
4310
        gen_pop_T0(s);
4311
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4312
        gen_pop_update(s);
4313
        if (reg == R_SS) {
4314
            /* if reg == SS, inhibit interrupts/trace. */
4315
            /* If several instructions disable interrupts, only the
4316
               _first_ does it */
4317
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4318
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4319
            s->tf = 0;
4320
        }
4321
        if (s->is_jmp) {
4322
            gen_jmp_im(s->pc - s->cs_base);
4323
            gen_eob(s);
4324
        }
4325
        break;
4326
    case 0x1a1: /* pop fs */
4327
    case 0x1a9: /* pop gs */
4328
        gen_pop_T0(s);
4329
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4330
        gen_pop_update(s);
4331
        if (s->is_jmp) {
4332
            gen_jmp_im(s->pc - s->cs_base);
4333
            gen_eob(s);
4334
        }
4335
        break;
4336

    
4337
        /**************************/
4338
        /* mov */
4339
    case 0x88:
4340
    case 0x89: /* mov Gv, Ev */
4341
        if ((b & 1) == 0)
4342
            ot = OT_BYTE;
4343
        else
4344
            ot = dflag + OT_WORD;
4345
        modrm = ldub_code(s->pc++);
4346
        reg = ((modrm >> 3) & 7) | rex_r;
4347

    
4348
        /* generate a generic store */
4349
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4350
        break;
4351
    case 0xc6:
4352
    case 0xc7: /* mov Ev, Iv */
4353
        if ((b & 1) == 0)
4354
            ot = OT_BYTE;
4355
        else
4356
            ot = dflag + OT_WORD;
4357
        modrm = ldub_code(s->pc++);
4358
        mod = (modrm >> 6) & 3;
4359
        if (mod != 3) {
4360
            s->rip_offset = insn_const_size(ot);
4361
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4362
        }
4363
        val = insn_get(s, ot);
4364
        gen_op_movl_T0_im(val);
4365
        if (mod != 3)
4366
            gen_op_st_T0_A0(ot + s->mem_index);
4367
        else
4368
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4369
        break;
4370
    case 0x8a:
4371
    case 0x8b: /* mov Ev, Gv */
4372
        if ((b & 1) == 0)
4373
            ot = OT_BYTE;
4374
        else
4375
            ot = OT_WORD + dflag;
4376
        modrm = ldub_code(s->pc++);
4377
        reg = ((modrm >> 3) & 7) | rex_r;
4378

    
4379
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4380
        gen_op_mov_reg_T0(ot, reg);
4381
        break;
4382
    case 0x8e: /* mov seg, Gv */
4383
        modrm = ldub_code(s->pc++);
4384
        reg = (modrm >> 3) & 7;
4385
        if (reg >= 6 || reg == R_CS)
4386
            goto illegal_op;
4387
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4388
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4389
        if (reg == R_SS) {
4390
            /* if reg == SS, inhibit interrupts/trace */
4391
            /* If several instructions disable interrupts, only the
4392
               _first_ does it */
4393
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4394
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4395
            s->tf = 0;
4396
        }
4397
        if (s->is_jmp) {
4398
            gen_jmp_im(s->pc - s->cs_base);
4399
            gen_eob(s);
4400
        }
4401
        break;
4402
    case 0x8c: /* mov Gv, seg */
4403
        modrm = ldub_code(s->pc++);
4404
        reg = (modrm >> 3) & 7;
4405
        mod = (modrm >> 6) & 3;
4406
        if (reg >= 6)
4407
            goto illegal_op;
4408
        gen_op_movl_T0_seg(reg);
4409
        if (mod == 3)
4410
            ot = OT_WORD + dflag;
4411
        else
4412
            ot = OT_WORD;
4413
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4414
        break;
4415

    
4416
    case 0x1b6: /* movzbS Gv, Eb */
4417
    case 0x1b7: /* movzwS Gv, Eb */
4418
    case 0x1be: /* movsbS Gv, Eb */
4419
    case 0x1bf: /* movswS Gv, Eb */
4420
        {
4421
            int d_ot;
4422
            /* d_ot is the size of destination */
4423
            d_ot = dflag + OT_WORD;
4424
            /* ot is the size of source */
4425
            ot = (b & 1) + OT_BYTE;
4426
            modrm = ldub_code(s->pc++);
4427
            reg = ((modrm >> 3) & 7) | rex_r;
4428
            mod = (modrm >> 6) & 3;
4429
            rm = (modrm & 7) | REX_B(s);
4430

    
4431
            if (mod == 3) {
4432
                gen_op_mov_TN_reg(ot, 0, rm);
4433
                switch(ot | (b & 8)) {
4434
                case OT_BYTE:
4435
                    tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4436
                    break;
4437
                case OT_BYTE | 8:
4438
                    tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4439
                    break;
4440
                case OT_WORD:
4441
                    tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4442
                    break;
4443
                default:
4444
                case OT_WORD | 8:
4445
                    tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4446
                    break;
4447
                }
4448
                gen_op_mov_reg_T0(d_ot, reg);
4449
            } else {
4450
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4451
                if (b & 8) {
4452
                    gen_op_lds_T0_A0(ot + s->mem_index);
4453
                } else {
4454
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4455
                }
4456
                gen_op_mov_reg_T0(d_ot, reg);
4457
            }
4458
        }
4459
        break;
4460

    
4461
    case 0x8d: /* lea */
4462
        ot = dflag + OT_WORD;
4463
        modrm = ldub_code(s->pc++);
4464
        mod = (modrm >> 6) & 3;
4465
        if (mod == 3)
4466
            goto illegal_op;
4467
        reg = ((modrm >> 3) & 7) | rex_r;
4468
        /* we must ensure that no segment is added */
4469
        s->override = -1;
4470
        val = s->addseg;
4471
        s->addseg = 0;
4472
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4473
        s->addseg = val;
4474
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4475
        break;
4476

    
4477
    case 0xa0: /* mov EAX, Ov */
4478
    case 0xa1:
4479
    case 0xa2: /* mov Ov, EAX */
4480
    case 0xa3:
4481
        {
4482
            target_ulong offset_addr;
4483

    
4484
            if ((b & 1) == 0)
4485
                ot = OT_BYTE;
4486
            else
4487
                ot = dflag + OT_WORD;
4488
#ifdef TARGET_X86_64
4489
            if (s->aflag == 2) {
4490
                offset_addr = ldq_code(s->pc);
4491
                s->pc += 8;
4492
                gen_op_movq_A0_im(offset_addr);
4493
            } else
4494
#endif
4495
            {
4496
                if (s->aflag) {
4497
                    offset_addr = insn_get(s, OT_LONG);
4498
                } else {
4499
                    offset_addr = insn_get(s, OT_WORD);
4500
                }
4501
                gen_op_movl_A0_im(offset_addr);
4502
            }
4503
            gen_add_A0_ds_seg(s);
4504
            if ((b & 2) == 0) {
4505
                gen_op_ld_T0_A0(ot + s->mem_index);
4506
                gen_op_mov_reg_T0(ot, R_EAX);
4507
            } else {
4508
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4509
                gen_op_st_T0_A0(ot + s->mem_index);
4510
            }
4511
        }
4512
        break;
4513
    case 0xd7: /* xlat */
4514
#ifdef TARGET_X86_64
4515
        if (s->aflag == 2) {
4516
            gen_op_movq_A0_reg(R_EBX);
4517
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4518
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4519
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4520
        } else
4521
#endif
4522
        {
4523
            gen_op_movl_A0_reg(R_EBX);
4524
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4525
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4526
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4527
            if (s->aflag == 0)
4528
                gen_op_andl_A0_ffff();
4529
            else
4530
                tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4531
        }
4532
        gen_add_A0_ds_seg(s);
4533
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4534
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4535
        break;
4536
    case 0xb0 ... 0xb7: /* mov R, Ib */
4537
        val = insn_get(s, OT_BYTE);
4538
        gen_op_movl_T0_im(val);
4539
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4540
        break;
4541
    case 0xb8 ... 0xbf: /* mov R, Iv */
4542
#ifdef TARGET_X86_64
4543
        if (dflag == 2) {
4544
            uint64_t tmp;
4545
            /* 64 bit case */
4546
            tmp = ldq_code(s->pc);
4547
            s->pc += 8;
4548
            reg = (b & 7) | REX_B(s);
4549
            gen_movtl_T0_im(tmp);
4550
            gen_op_mov_reg_T0(OT_QUAD, reg);
4551
        } else
4552
#endif
4553
        {
4554
            ot = dflag ? OT_LONG : OT_WORD;
4555
            val = insn_get(s, ot);
4556
            reg = (b & 7) | REX_B(s);
4557
            gen_op_movl_T0_im(val);
4558
            gen_op_mov_reg_T0(ot, reg);
4559
        }
4560
        break;
4561

    
4562
    case 0x91 ... 0x97: /* xchg R, EAX */
4563
        ot = dflag + OT_WORD;
4564
        reg = (b & 7) | REX_B(s);
4565
        rm = R_EAX;
4566
        goto do_xchg_reg;
4567
    case 0x86:
4568
    case 0x87: /* xchg Ev, Gv */
4569
        if ((b & 1) == 0)
4570
            ot = OT_BYTE;
4571
        else
4572
            ot = dflag + OT_WORD;
4573
        modrm = ldub_code(s->pc++);
4574
        reg = ((modrm >> 3) & 7) | rex_r;
4575
        mod = (modrm >> 6) & 3;
4576
        if (mod == 3) {
4577
            rm = (modrm & 7) | REX_B(s);
4578
        do_xchg_reg:
4579
            gen_op_mov_TN_reg(ot, 0, reg);
4580
            gen_op_mov_TN_reg(ot, 1, rm);
4581
            gen_op_mov_reg_T0(ot, rm);
4582
            gen_op_mov_reg_T1(ot, reg);
4583
        } else {
4584
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4585
            gen_op_mov_TN_reg(ot, 0, reg);
4586
            /* for xchg, lock is implicit */
4587
            if (!(prefixes & PREFIX_LOCK))
4588
                tcg_gen_helper_0_0(helper_lock);
4589
            gen_op_ld_T1_A0(ot + s->mem_index);
4590
            gen_op_st_T0_A0(ot + s->mem_index);
4591
            if (!(prefixes & PREFIX_LOCK))
4592
                tcg_gen_helper_0_0(helper_unlock);
4593
            gen_op_mov_reg_T1(ot, reg);
4594
        }
4595
        break;
4596
    case 0xc4: /* les Gv */
4597
        if (CODE64(s))
4598
            goto illegal_op;
4599
        op = R_ES;
4600
        goto do_lxx;
4601
    case 0xc5: /* lds Gv */
4602
        if (CODE64(s))
4603
            goto illegal_op;
4604
        op = R_DS;
4605
        goto do_lxx;
4606
    case 0x1b2: /* lss Gv */
4607
        op = R_SS;
4608
        goto do_lxx;
4609
    case 0x1b4: /* lfs Gv */
4610
        op = R_FS;
4611
        goto do_lxx;
4612
    case 0x1b5: /* lgs Gv */
4613
        op = R_GS;
4614
    do_lxx:
4615
        ot = dflag ? OT_LONG : OT_WORD;
4616
        modrm = ldub_code(s->pc++);
4617
        reg = ((modrm >> 3) & 7) | rex_r;
4618
        mod = (modrm >> 6) & 3;
4619
        if (mod == 3)
4620
            goto illegal_op;
4621
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4622
        gen_op_ld_T1_A0(ot + s->mem_index);
4623
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4624
        /* load the segment first to handle exceptions properly */
4625
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4626
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4627
        /* then put the data */
4628
        gen_op_mov_reg_T1(ot, reg);
4629
        if (s->is_jmp) {
4630
            gen_jmp_im(s->pc - s->cs_base);
4631
            gen_eob(s);
4632
        }
4633
        break;
4634

    
4635
        /************************/
4636
        /* shifts */
4637
    case 0xc0:
4638
    case 0xc1:
4639
        /* shift Ev,Ib */
4640
        shift = 2;
4641
    grp2:
4642
        {
4643
            if ((b & 1) == 0)
4644
                ot = OT_BYTE;
4645
            else
4646
                ot = dflag + OT_WORD;
4647

    
4648
            modrm = ldub_code(s->pc++);
4649
            mod = (modrm >> 6) & 3;
4650
            op = (modrm >> 3) & 7;
4651

    
4652
            if (mod != 3) {
4653
                if (shift == 2) {
4654
                    s->rip_offset = 1;
4655
                }
4656
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4657
                opreg = OR_TMP0;
4658
            } else {
4659
                opreg = (modrm & 7) | REX_B(s);
4660
            }
4661

    
4662
            /* simpler op */
4663
            if (shift == 0) {
4664
                gen_shift(s, op, ot, opreg, OR_ECX);
4665
            } else {
4666
                if (shift == 2) {
4667
                    shift = ldub_code(s->pc++);
4668
                }
4669
                gen_shifti(s, op, ot, opreg, shift);
4670
            }
4671
        }
4672
        break;
4673
    case 0xd0:
4674
    case 0xd1:
4675
        /* shift Ev,1 */
4676
        shift = 1;
4677
        goto grp2;
4678
    case 0xd2:
4679
    case 0xd3:
4680
        /* shift Ev,cl */
4681
        shift = 0;
4682
        goto grp2;
4683

    
4684
    case 0x1a4: /* shld imm */
4685
        op = 0;
4686
        shift = 1;
4687
        goto do_shiftd;
4688
    case 0x1a5: /* shld cl */
4689
        op = 0;
4690
        shift = 0;
4691
        goto do_shiftd;
4692
    case 0x1ac: /* shrd imm */
4693
        op = 1;
4694
        shift = 1;
4695
        goto do_shiftd;
4696
    case 0x1ad: /* shrd cl */
4697
        op = 1;
4698
        shift = 0;
4699
    do_shiftd:
4700
        ot = dflag + OT_WORD;
4701
        modrm = ldub_code(s->pc++);
4702
        mod = (modrm >> 6) & 3;
4703
        rm = (modrm & 7) | REX_B(s);
4704
        reg = ((modrm >> 3) & 7) | rex_r;
4705
        if (mod != 3) {
4706
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4707
            opreg = OR_TMP0;
4708
        } else {
4709
            opreg = rm;
4710
        }
4711
        gen_op_mov_TN_reg(ot, 1, reg);
4712

    
4713
        if (shift) {
4714
            val = ldub_code(s->pc++);
4715
            tcg_gen_movi_tl(cpu_T3, val);
4716
        } else {
4717
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4718
        }
4719
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4720
        break;
4721

    
4722
        /************************/
4723
        /* floats */
4724
    case 0xd8 ... 0xdf:
4725
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4726
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4727
            /* XXX: what to do if illegal op ? */
4728
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4729
            break;
4730
        }
4731
        modrm = ldub_code(s->pc++);
4732
        mod = (modrm >> 6) & 3;
4733
        rm = modrm & 7;
4734
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4735
        if (mod != 3) {
4736
            /* memory op */
4737
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4738
            switch(op) {
4739
            case 0x00 ... 0x07: /* fxxxs */
4740
            case 0x10 ... 0x17: /* fixxxl */
4741
            case 0x20 ... 0x27: /* fxxxl */
4742
            case 0x30 ... 0x37: /* fixxx */
4743
                {
4744
                    int op1;
4745
                    op1 = op & 7;
4746

    
4747
                    switch(op >> 4) {
4748
                    case 0:
4749
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4750
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4751
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4752
                        break;
4753
                    case 1:
4754
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4755
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4756
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4757
                        break;
4758
                    case 2:
4759
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4760
                                          (s->mem_index >> 2) - 1);
4761
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4762
                        break;
4763
                    case 3:
4764
                    default:
4765
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4766
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4767
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4768
                        break;
4769
                    }
4770

    
4771
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4772
                    if (op1 == 3) {
4773
                        /* fcomp needs pop */
4774
                        tcg_gen_helper_0_0(helper_fpop);
4775
                    }
4776
                }
4777
                break;
4778
            case 0x08: /* flds */
4779
            case 0x0a: /* fsts */
4780
            case 0x0b: /* fstps */
4781
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4782
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4783
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4784
                switch(op & 7) {
4785
                case 0:
4786
                    switch(op >> 4) {
4787
                    case 0:
4788
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4789
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4790
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4791
                        break;
4792
                    case 1:
4793
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4794
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4795
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4796
                        break;
4797
                    case 2:
4798
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4799
                                          (s->mem_index >> 2) - 1);
4800
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4801
                        break;
4802
                    case 3:
4803
                    default:
4804
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4805
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4806
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4807
                        break;
4808
                    }
4809
                    break;
4810
                case 1:
4811
                    /* XXX: the corresponding CPUID bit must be tested ! */
4812
                    switch(op >> 4) {
4813
                    case 1:
4814
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4815
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4816
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4817
                        break;
4818
                    case 2:
4819
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4820
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4821
                                          (s->mem_index >> 2) - 1);
4822
                        break;
4823
                    case 3:
4824
                    default:
4825
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4826
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4827
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4828
                        break;
4829
                    }
4830
                    tcg_gen_helper_0_0(helper_fpop);
4831
                    break;
4832
                default:
4833
                    switch(op >> 4) {
4834
                    case 0:
4835
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4836
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4837
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4838
                        break;
4839
                    case 1:
4840
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4841
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4842
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4843
                        break;
4844
                    case 2:
4845
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4846
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4847
                                          (s->mem_index >> 2) - 1);
4848
                        break;
4849
                    case 3:
4850
                    default:
4851
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4852
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4853
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4854
                        break;
4855
                    }
4856
                    if ((op & 7) == 3)
4857
                        tcg_gen_helper_0_0(helper_fpop);
4858
                    break;
4859
                }
4860
                break;
4861
            case 0x0c: /* fldenv mem */
4862
                if (s->cc_op != CC_OP_DYNAMIC)
4863
                    gen_op_set_cc_op(s->cc_op);
4864
                gen_jmp_im(pc_start - s->cs_base);
4865
                tcg_gen_helper_0_2(helper_fldenv, 
4866
                                   cpu_A0, tcg_const_i32(s->dflag));
4867
                break;
4868
            case 0x0d: /* fldcw mem */
4869
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4870
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4871
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
4872
                break;
4873
            case 0x0e: /* fnstenv mem */
4874
                if (s->cc_op != CC_OP_DYNAMIC)
4875
                    gen_op_set_cc_op(s->cc_op);
4876
                gen_jmp_im(pc_start - s->cs_base);
4877
                tcg_gen_helper_0_2(helper_fstenv,
4878
                                   cpu_A0, tcg_const_i32(s->dflag));
4879
                break;
4880
            case 0x0f: /* fnstcw mem */
4881
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
4882
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4883
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4884
                break;
4885
            case 0x1d: /* fldt mem */
4886
                if (s->cc_op != CC_OP_DYNAMIC)
4887
                    gen_op_set_cc_op(s->cc_op);
4888
                gen_jmp_im(pc_start - s->cs_base);
4889
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4890
                break;
4891
            case 0x1f: /* fstpt mem */
4892
                if (s->cc_op != CC_OP_DYNAMIC)
4893
                    gen_op_set_cc_op(s->cc_op);
4894
                gen_jmp_im(pc_start - s->cs_base);
4895
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4896
                tcg_gen_helper_0_0(helper_fpop);
4897
                break;
4898
            case 0x2c: /* frstor mem */
4899
                if (s->cc_op != CC_OP_DYNAMIC)
4900
                    gen_op_set_cc_op(s->cc_op);
4901
                gen_jmp_im(pc_start - s->cs_base);
4902
                tcg_gen_helper_0_2(helper_frstor,
4903
                                   cpu_A0, tcg_const_i32(s->dflag));
4904
                break;
4905
            case 0x2e: /* fnsave mem */
4906
                if (s->cc_op != CC_OP_DYNAMIC)
4907
                    gen_op_set_cc_op(s->cc_op);
4908
                gen_jmp_im(pc_start - s->cs_base);
4909
                tcg_gen_helper_0_2(helper_fsave,
4910
                                   cpu_A0, tcg_const_i32(s->dflag));
4911
                break;
4912
            case 0x2f: /* fnstsw mem */
4913
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
4914
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4915
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4916
                break;
4917
            case 0x3c: /* fbld */
4918
                if (s->cc_op != CC_OP_DYNAMIC)
4919
                    gen_op_set_cc_op(s->cc_op);
4920
                gen_jmp_im(pc_start - s->cs_base);
4921
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4922
                break;
4923
            case 0x3e: /* fbstp */
4924
                if (s->cc_op != CC_OP_DYNAMIC)
4925
                    gen_op_set_cc_op(s->cc_op);
4926
                gen_jmp_im(pc_start - s->cs_base);
4927
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4928
                tcg_gen_helper_0_0(helper_fpop);
4929
                break;
4930
            case 0x3d: /* fildll */
4931
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4932
                                  (s->mem_index >> 2) - 1);
4933
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
4934
                break;
4935
            case 0x3f: /* fistpll */
4936
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
4937
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4938
                                  (s->mem_index >> 2) - 1);
4939
                tcg_gen_helper_0_0(helper_fpop);
4940
                break;
4941
            default:
4942
                goto illegal_op;
4943
            }
4944
        } else {
4945
            /* register float ops */
4946
            opreg = rm;
4947

    
4948
            switch(op) {
4949
            case 0x08: /* fld sti */
4950
                tcg_gen_helper_0_0(helper_fpush);
4951
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
4952
                break;
4953
            case 0x09: /* fxchg sti */
4954
            case 0x29: /* fxchg4 sti, undocumented op */
4955
            case 0x39: /* fxchg7 sti, undocumented op */
4956
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
4957
                break;
4958
            case 0x0a: /* grp d9/2 */
4959
                switch(rm) {
4960
                case 0: /* fnop */
4961
                    /* check exceptions (FreeBSD FPU probe) */
4962
                    if (s->cc_op != CC_OP_DYNAMIC)
4963
                        gen_op_set_cc_op(s->cc_op);
4964
                    gen_jmp_im(pc_start - s->cs_base);
4965
                    tcg_gen_helper_0_0(helper_fwait);
4966
                    break;
4967
                default:
4968
                    goto illegal_op;
4969
                }
4970
                break;
4971
            case 0x0c: /* grp d9/4 */
4972
                switch(rm) {
4973
                case 0: /* fchs */
4974
                    tcg_gen_helper_0_0(helper_fchs_ST0);
4975
                    break;
4976
                case 1: /* fabs */
4977
                    tcg_gen_helper_0_0(helper_fabs_ST0);
4978
                    break;
4979
                case 4: /* ftst */
4980
                    tcg_gen_helper_0_0(helper_fldz_FT0);
4981
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4982
                    break;
4983
                case 5: /* fxam */
4984
                    tcg_gen_helper_0_0(helper_fxam_ST0);
4985
                    break;
4986
                default:
4987
                    goto illegal_op;
4988
                }
4989
                break;
4990
            case 0x0d: /* grp d9/5 */
4991
                {
4992
                    switch(rm) {
4993
                    case 0:
4994
                        tcg_gen_helper_0_0(helper_fpush);
4995
                        tcg_gen_helper_0_0(helper_fld1_ST0);
4996
                        break;
4997
                    case 1:
4998
                        tcg_gen_helper_0_0(helper_fpush);
4999
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5000
                        break;
5001
                    case 2:
5002
                        tcg_gen_helper_0_0(helper_fpush);
5003
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5004
                        break;
5005
                    case 3:
5006
                        tcg_gen_helper_0_0(helper_fpush);
5007
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5008
                        break;
5009
                    case 4:
5010
                        tcg_gen_helper_0_0(helper_fpush);
5011
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5012
                        break;
5013
                    case 5:
5014
                        tcg_gen_helper_0_0(helper_fpush);
5015
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5016
                        break;
5017
                    case 6:
5018
                        tcg_gen_helper_0_0(helper_fpush);
5019
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5020
                        break;
5021
                    default:
5022
                        goto illegal_op;
5023
                    }
5024
                }
5025
                break;
5026
            case 0x0e: /* grp d9/6 */
5027
                switch(rm) {
5028
                case 0: /* f2xm1 */
5029
                    tcg_gen_helper_0_0(helper_f2xm1);
5030
                    break;
5031
                case 1: /* fyl2x */
5032
                    tcg_gen_helper_0_0(helper_fyl2x);
5033
                    break;
5034
                case 2: /* fptan */
5035
                    tcg_gen_helper_0_0(helper_fptan);
5036
                    break;
5037
                case 3: /* fpatan */
5038
                    tcg_gen_helper_0_0(helper_fpatan);
5039
                    break;
5040
                case 4: /* fxtract */
5041
                    tcg_gen_helper_0_0(helper_fxtract);
5042
                    break;
5043
                case 5: /* fprem1 */
5044
                    tcg_gen_helper_0_0(helper_fprem1);
5045
                    break;
5046
                case 6: /* fdecstp */
5047
                    tcg_gen_helper_0_0(helper_fdecstp);
5048
                    break;
5049
                default:
5050
                case 7: /* fincstp */
5051
                    tcg_gen_helper_0_0(helper_fincstp);
5052
                    break;
5053
                }
5054
                break;
5055
            case 0x0f: /* grp d9/7 */
5056
                switch(rm) {
5057
                case 0: /* fprem */
5058
                    tcg_gen_helper_0_0(helper_fprem);
5059
                    break;
5060
                case 1: /* fyl2xp1 */
5061
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5062
                    break;
5063
                case 2: /* fsqrt */
5064
                    tcg_gen_helper_0_0(helper_fsqrt);
5065
                    break;
5066
                case 3: /* fsincos */
5067
                    tcg_gen_helper_0_0(helper_fsincos);
5068
                    break;
5069
                case 5: /* fscale */
5070
                    tcg_gen_helper_0_0(helper_fscale);
5071
                    break;
5072
                case 4: /* frndint */
5073
                    tcg_gen_helper_0_0(helper_frndint);
5074
                    break;
5075
                case 6: /* fsin */
5076
                    tcg_gen_helper_0_0(helper_fsin);
5077
                    break;
5078
                default:
5079
                case 7: /* fcos */
5080
                    tcg_gen_helper_0_0(helper_fcos);
5081
                    break;
5082
                }
5083
                break;
5084
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5085
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5086
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5087
                {
5088
                    int op1;
5089

    
5090
                    op1 = op & 7;
5091
                    if (op >= 0x20) {
5092
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5093
                        if (op >= 0x30)
5094
                            tcg_gen_helper_0_0(helper_fpop);
5095
                    } else {
5096
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5097
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5098
                    }
5099
                }
5100
                break;
5101
            case 0x02: /* fcom */
5102
            case 0x22: /* fcom2, undocumented op */
5103
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5104
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5105
                break;
5106
            case 0x03: /* fcomp */
5107
            case 0x23: /* fcomp3, undocumented op */
5108
            case 0x32: /* fcomp5, undocumented op */
5109
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5110
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5111
                tcg_gen_helper_0_0(helper_fpop);
5112
                break;
5113
            case 0x15: /* da/5 */
5114
                switch(rm) {
5115
                case 1: /* fucompp */
5116
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5117
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5118
                    tcg_gen_helper_0_0(helper_fpop);
5119
                    tcg_gen_helper_0_0(helper_fpop);
5120
                    break;
5121
                default:
5122
                    goto illegal_op;
5123
                }
5124
                break;
5125
            case 0x1c:
5126
                switch(rm) {
5127
                case 0: /* feni (287 only, just do nop here) */
5128
                    break;
5129
                case 1: /* fdisi (287 only, just do nop here) */
5130
                    break;
5131
                case 2: /* fclex */
5132
                    tcg_gen_helper_0_0(helper_fclex);
5133
                    break;
5134
                case 3: /* fninit */
5135
                    tcg_gen_helper_0_0(helper_fninit);
5136
                    break;
5137
                case 4: /* fsetpm (287 only, just do nop here) */
5138
                    break;
5139
                default:
5140
                    goto illegal_op;
5141
                }
5142
                break;
5143
            case 0x1d: /* fucomi */
5144
                if (s->cc_op != CC_OP_DYNAMIC)
5145
                    gen_op_set_cc_op(s->cc_op);
5146
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5147
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5148
                gen_op_fcomi_dummy();
5149
                s->cc_op = CC_OP_EFLAGS;
5150
                break;
5151
            case 0x1e: /* fcomi */
5152
                if (s->cc_op != CC_OP_DYNAMIC)
5153
                    gen_op_set_cc_op(s->cc_op);
5154
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5155
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5156
                gen_op_fcomi_dummy();
5157
                s->cc_op = CC_OP_EFLAGS;
5158
                break;
5159
            case 0x28: /* ffree sti */
5160
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5161
                break;
5162
            case 0x2a: /* fst sti */
5163
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5164
                break;
5165
            case 0x2b: /* fstp sti */
5166
            case 0x0b: /* fstp1 sti, undocumented op */
5167
            case 0x3a: /* fstp8 sti, undocumented op */
5168
            case 0x3b: /* fstp9 sti, undocumented op */
5169
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5170
                tcg_gen_helper_0_0(helper_fpop);
5171
                break;
5172
            case 0x2c: /* fucom st(i) */
5173
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5174
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5175
                break;
5176
            case 0x2d: /* fucomp st(i) */
5177
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5178
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5179
                tcg_gen_helper_0_0(helper_fpop);
5180
                break;
5181
            case 0x33: /* de/3 */
5182
                switch(rm) {
5183
                case 1: /* fcompp */
5184
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5185
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5186
                    tcg_gen_helper_0_0(helper_fpop);
5187
                    tcg_gen_helper_0_0(helper_fpop);
5188
                    break;
5189
                default:
5190
                    goto illegal_op;
5191
                }
5192
                break;
5193
            case 0x38: /* ffreep sti, undocumented op */
5194
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5195
                tcg_gen_helper_0_0(helper_fpop);
5196
                break;
5197
            case 0x3c: /* df/4 */
5198
                switch(rm) {
5199
                case 0:
5200
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5201
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5202
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5203
                    break;
5204
                default:
5205
                    goto illegal_op;
5206
                }
5207
                break;
5208
            case 0x3d: /* fucomip */
5209
                if (s->cc_op != CC_OP_DYNAMIC)
5210
                    gen_op_set_cc_op(s->cc_op);
5211
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5212
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5213
                tcg_gen_helper_0_0(helper_fpop);
5214
                gen_op_fcomi_dummy();
5215
                s->cc_op = CC_OP_EFLAGS;
5216
                break;
5217
            case 0x3e: /* fcomip */
5218
                if (s->cc_op != CC_OP_DYNAMIC)
5219
                    gen_op_set_cc_op(s->cc_op);
5220
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5221
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5222
                tcg_gen_helper_0_0(helper_fpop);
5223
                gen_op_fcomi_dummy();
5224
                s->cc_op = CC_OP_EFLAGS;
5225
                break;
5226
            case 0x10 ... 0x13: /* fcmovxx */
5227
            case 0x18 ... 0x1b:
5228
                {
5229
                    int op1, l1;
5230
                    const static uint8_t fcmov_cc[8] = {
5231
                        (JCC_B << 1),
5232
                        (JCC_Z << 1),
5233
                        (JCC_BE << 1),
5234
                        (JCC_P << 1),
5235
                    };
5236
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5237
                    gen_setcc(s, op1);
5238
                    l1 = gen_new_label();
5239
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5240
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5241
                    gen_set_label(l1);
5242
                }
5243
                break;
5244
            default:
5245
                goto illegal_op;
5246
            }
5247
        }
5248
        break;
5249
        /************************/
5250
        /* string ops */
5251

    
5252
    case 0xa4: /* movsS */
5253
    case 0xa5:
5254
        if ((b & 1) == 0)
5255
            ot = OT_BYTE;
5256
        else
5257
            ot = dflag + OT_WORD;
5258

    
5259
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5260
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5261
        } else {
5262
            gen_movs(s, ot);
5263
        }
5264
        break;
5265

    
5266
    case 0xaa: /* stosS */
5267
    case 0xab:
5268
        if ((b & 1) == 0)
5269
            ot = OT_BYTE;
5270
        else
5271
            ot = dflag + OT_WORD;
5272

    
5273
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5274
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5275
        } else {
5276
            gen_stos(s, ot);
5277
        }
5278
        break;
5279
    case 0xac: /* lodsS */
5280
    case 0xad:
5281
        if ((b & 1) == 0)
5282
            ot = OT_BYTE;
5283
        else
5284
            ot = dflag + OT_WORD;
5285
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5286
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5287
        } else {
5288
            gen_lods(s, ot);
5289
        }
5290
        break;
5291
    case 0xae: /* scasS */
5292
    case 0xaf:
5293
        if ((b & 1) == 0)
5294
            ot = OT_BYTE;
5295
        else
5296
            ot = dflag + OT_WORD;
5297
        if (prefixes & PREFIX_REPNZ) {
5298
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5299
        } else if (prefixes & PREFIX_REPZ) {
5300
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5301
        } else {
5302
            gen_scas(s, ot);
5303
            s->cc_op = CC_OP_SUBB + ot;
5304
        }
5305
        break;
5306

    
5307
    case 0xa6: /* cmpsS */
5308
    case 0xa7:
5309
        if ((b & 1) == 0)
5310
            ot = OT_BYTE;
5311
        else
5312
            ot = dflag + OT_WORD;
5313
        if (prefixes & PREFIX_REPNZ) {
5314
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5315
        } else if (prefixes & PREFIX_REPZ) {
5316
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5317
        } else {
5318
            gen_cmps(s, ot);
5319
            s->cc_op = CC_OP_SUBB + ot;
5320
        }
5321
        break;
5322
    case 0x6c: /* insS */
5323
    case 0x6d:
5324
        if ((b & 1) == 0)
5325
            ot = OT_BYTE;
5326
        else
5327
            ot = dflag ? OT_LONG : OT_WORD;
5328
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5329
        gen_op_andl_T0_ffff();
5330
        gen_check_io(s, ot, pc_start - s->cs_base, 
5331
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5332
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5333
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5334
        } else {
5335
            gen_ins(s, ot);
5336
        }
5337
        break;
5338
    case 0x6e: /* outsS */
5339
    case 0x6f:
5340
        if ((b & 1) == 0)
5341
            ot = OT_BYTE;
5342
        else
5343
            ot = dflag ? OT_LONG : OT_WORD;
5344
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5345
        gen_op_andl_T0_ffff();
5346
        gen_check_io(s, ot, pc_start - s->cs_base,
5347
                     svm_is_rep(prefixes) | 4);
5348
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5349
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5350
        } else {
5351
            gen_outs(s, ot);
5352
        }
5353
        break;
5354

    
5355
        /************************/
5356
        /* port I/O */
5357

    
5358
    case 0xe4:
5359
    case 0xe5:
5360
        if ((b & 1) == 0)
5361
            ot = OT_BYTE;
5362
        else
5363
            ot = dflag ? OT_LONG : OT_WORD;
5364
        val = ldub_code(s->pc++);
5365
        gen_op_movl_T0_im(val);
5366
        gen_check_io(s, ot, pc_start - s->cs_base,
5367
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5368
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5369
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5370
        gen_op_mov_reg_T1(ot, R_EAX);
5371
        break;
5372
    case 0xe6:
5373
    case 0xe7:
5374
        if ((b & 1) == 0)
5375
            ot = OT_BYTE;
5376
        else
5377
            ot = dflag ? OT_LONG : OT_WORD;
5378
        val = ldub_code(s->pc++);
5379
        gen_op_movl_T0_im(val);
5380
        gen_check_io(s, ot, pc_start - s->cs_base,
5381
                     svm_is_rep(prefixes));
5382
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5383

    
5384
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5385
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5386
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5387
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5388
        break;
5389
    case 0xec:
5390
    case 0xed:
5391
        if ((b & 1) == 0)
5392
            ot = OT_BYTE;
5393
        else
5394
            ot = dflag ? OT_LONG : OT_WORD;
5395
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5396
        gen_op_andl_T0_ffff();
5397
        gen_check_io(s, ot, pc_start - s->cs_base,
5398
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5399
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5400
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5401
        gen_op_mov_reg_T1(ot, R_EAX);
5402
        break;
5403
    case 0xee:
5404
    case 0xef:
5405
        if ((b & 1) == 0)
5406
            ot = OT_BYTE;
5407
        else
5408
            ot = dflag ? OT_LONG : OT_WORD;
5409
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5410
        gen_op_andl_T0_ffff();
5411
        gen_check_io(s, ot, pc_start - s->cs_base,
5412
                     svm_is_rep(prefixes));
5413
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5414

    
5415
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5416
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5417
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5418
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5419
        break;
5420

    
5421
        /************************/
5422
        /* control */
5423
    case 0xc2: /* ret im */
5424
        val = ldsw_code(s->pc);
5425
        s->pc += 2;
5426
        gen_pop_T0(s);
5427
        if (CODE64(s) && s->dflag)
5428
            s->dflag = 2;
5429
        gen_stack_update(s, val + (2 << s->dflag));
5430
        if (s->dflag == 0)
5431
            gen_op_andl_T0_ffff();
5432
        gen_op_jmp_T0();
5433
        gen_eob(s);
5434
        break;
5435
    case 0xc3: /* ret */
5436
        gen_pop_T0(s);
5437
        gen_pop_update(s);
5438
        if (s->dflag == 0)
5439
            gen_op_andl_T0_ffff();
5440
        gen_op_jmp_T0();
5441
        gen_eob(s);
5442
        break;
5443
    case 0xca: /* lret im */
5444
        val = ldsw_code(s->pc);
5445
        s->pc += 2;
5446
    do_lret:
5447
        if (s->pe && !s->vm86) {
5448
            if (s->cc_op != CC_OP_DYNAMIC)
5449
                gen_op_set_cc_op(s->cc_op);
5450
            gen_jmp_im(pc_start - s->cs_base);
5451
            tcg_gen_helper_0_2(helper_lret_protected,
5452
                               tcg_const_i32(s->dflag), 
5453
                               tcg_const_i32(val));
5454
        } else {
5455
            gen_stack_A0(s);
5456
            /* pop offset */
5457
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5458
            if (s->dflag == 0)
5459
                gen_op_andl_T0_ffff();
5460
            /* NOTE: keeping EIP updated is not a problem in case of
5461
               exception */
5462
            gen_op_jmp_T0();
5463
            /* pop selector */
5464
            gen_op_addl_A0_im(2 << s->dflag);
5465
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5466
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5467
            /* add stack offset */
5468
            gen_stack_update(s, val + (4 << s->dflag));
5469
        }
5470
        gen_eob(s);
5471
        break;
5472
    case 0xcb: /* lret */
5473
        val = 0;
5474
        goto do_lret;
5475
    case 0xcf: /* iret */
5476
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5477
            break;
5478
        if (!s->pe) {
5479
            /* real mode */
5480
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5481
            s->cc_op = CC_OP_EFLAGS;
5482
        } else if (s->vm86) {
5483
            if (s->iopl != 3) {
5484
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5485
            } else {
5486
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5487
                s->cc_op = CC_OP_EFLAGS;
5488
            }
5489
        } else {
5490
            if (s->cc_op != CC_OP_DYNAMIC)
5491
                gen_op_set_cc_op(s->cc_op);
5492
            gen_jmp_im(pc_start - s->cs_base);
5493
            tcg_gen_helper_0_2(helper_iret_protected,
5494
                               tcg_const_i32(s->dflag), 
5495
                               tcg_const_i32(s->pc - s->cs_base));
5496
            s->cc_op = CC_OP_EFLAGS;
5497
        }
5498
        gen_eob(s);
5499
        break;
5500
    case 0xe8: /* call im */
5501
        {
5502
            if (dflag)
5503
                tval = (int32_t)insn_get(s, OT_LONG);
5504
            else
5505
                tval = (int16_t)insn_get(s, OT_WORD);
5506
            next_eip = s->pc - s->cs_base;
5507
            tval += next_eip;
5508
            if (s->dflag == 0)
5509
                tval &= 0xffff;
5510
            gen_movtl_T0_im(next_eip);
5511
            gen_push_T0(s);
5512
            gen_jmp(s, tval);
5513
        }
5514
        break;
5515
    case 0x9a: /* lcall im */
5516
        {
5517
            unsigned int selector, offset;
5518

    
5519
            if (CODE64(s))
5520
                goto illegal_op;
5521
            ot = dflag ? OT_LONG : OT_WORD;
5522
            offset = insn_get(s, ot);
5523
            selector = insn_get(s, OT_WORD);
5524

    
5525
            gen_op_movl_T0_im(selector);
5526
            gen_op_movl_T1_imu(offset);
5527
        }
5528
        goto do_lcall;
5529
    case 0xe9: /* jmp im */
5530
        if (dflag)
5531
            tval = (int32_t)insn_get(s, OT_LONG);
5532
        else
5533
            tval = (int16_t)insn_get(s, OT_WORD);
5534
        tval += s->pc - s->cs_base;
5535
        if (s->dflag == 0)
5536
            tval &= 0xffff;
5537
        gen_jmp(s, tval);
5538
        break;
5539
    case 0xea: /* ljmp im */
5540
        {
5541
            unsigned int selector, offset;
5542

    
5543
            if (CODE64(s))
5544
                goto illegal_op;
5545
            ot = dflag ? OT_LONG : OT_WORD;
5546
            offset = insn_get(s, ot);
5547
            selector = insn_get(s, OT_WORD);
5548

    
5549
            gen_op_movl_T0_im(selector);
5550
            gen_op_movl_T1_imu(offset);
5551
        }
5552
        goto do_ljmp;
5553
    case 0xeb: /* jmp Jb */
5554
        tval = (int8_t)insn_get(s, OT_BYTE);
5555
        tval += s->pc - s->cs_base;
5556
        if (s->dflag == 0)
5557
            tval &= 0xffff;
5558
        gen_jmp(s, tval);
5559
        break;
5560
    case 0x70 ... 0x7f: /* jcc Jb */
5561
        tval = (int8_t)insn_get(s, OT_BYTE);
5562
        goto do_jcc;
5563
    case 0x180 ... 0x18f: /* jcc Jv */
5564
        if (dflag) {
5565
            tval = (int32_t)insn_get(s, OT_LONG);
5566
        } else {
5567
            tval = (int16_t)insn_get(s, OT_WORD);
5568
        }
5569
    do_jcc:
5570
        next_eip = s->pc - s->cs_base;
5571
        tval += next_eip;
5572
        if (s->dflag == 0)
5573
            tval &= 0xffff;
5574
        gen_jcc(s, b, tval, next_eip);
5575
        break;
5576

    
5577
    case 0x190 ... 0x19f: /* setcc Gv */
5578
        modrm = ldub_code(s->pc++);
5579
        gen_setcc(s, b);
5580
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5581
        break;
5582
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5583
        ot = dflag + OT_WORD;
5584
        modrm = ldub_code(s->pc++);
5585
        reg = ((modrm >> 3) & 7) | rex_r;
5586
        mod = (modrm >> 6) & 3;
5587
        gen_setcc(s, b);
5588
        if (mod != 3) {
5589
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5590
            gen_op_ld_T1_A0(ot + s->mem_index);
5591
        } else {
5592
            rm = (modrm & 7) | REX_B(s);
5593
            gen_op_mov_TN_reg(ot, 1, rm);
5594
        }
5595
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5596
        break;
5597

    
5598
        /************************/
5599
        /* flags */
5600
    case 0x9c: /* pushf */
5601
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5602
            break;
5603
        if (s->vm86 && s->iopl != 3) {
5604
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5605
        } else {
5606
            if (s->cc_op != CC_OP_DYNAMIC)
5607
                gen_op_set_cc_op(s->cc_op);
5608
            gen_op_movl_T0_eflags();
5609
            gen_push_T0(s);
5610
        }
5611
        break;
5612
    case 0x9d: /* popf */
5613
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5614
            break;
5615
        if (s->vm86 && s->iopl != 3) {
5616
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5617
        } else {
5618
            gen_pop_T0(s);
5619
            if (s->cpl == 0) {
5620
                if (s->dflag) {
5621
                    gen_op_movl_eflags_T0_cpl0();
5622
                } else {
5623
                    gen_op_movw_eflags_T0_cpl0();
5624
                }
5625
            } else {
5626
                if (s->cpl <= s->iopl) {
5627
                    if (s->dflag) {
5628
                        gen_op_movl_eflags_T0_io();
5629
                    } else {
5630
                        gen_op_movw_eflags_T0_io();
5631
                    }
5632
                } else {
5633
                    if (s->dflag) {
5634
                        gen_op_movl_eflags_T0();
5635
                    } else {
5636
                        gen_op_movw_eflags_T0();
5637
                    }
5638
                }
5639
            }
5640
            gen_pop_update(s);
5641
            s->cc_op = CC_OP_EFLAGS;
5642
            /* abort translation because TF flag may change */
5643
            gen_jmp_im(s->pc - s->cs_base);
5644
            gen_eob(s);
5645
        }
5646
        break;
5647
    case 0x9e: /* sahf */
5648
        if (CODE64(s))
5649
            goto illegal_op;
5650
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5651
        if (s->cc_op != CC_OP_DYNAMIC)
5652
            gen_op_set_cc_op(s->cc_op);
5653
        gen_op_movb_eflags_T0();
5654
        s->cc_op = CC_OP_EFLAGS;
5655
        break;
5656
    case 0x9f: /* lahf */
5657
        if (CODE64(s))
5658
            goto illegal_op;
5659
        if (s->cc_op != CC_OP_DYNAMIC)
5660
            gen_op_set_cc_op(s->cc_op);
5661
        gen_op_movl_T0_eflags();
5662
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5663
        break;
5664
    case 0xf5: /* cmc */
5665
        if (s->cc_op != CC_OP_DYNAMIC)
5666
            gen_op_set_cc_op(s->cc_op);
5667
        gen_op_cmc();
5668
        s->cc_op = CC_OP_EFLAGS;
5669
        break;
5670
    case 0xf8: /* clc */
5671
        if (s->cc_op != CC_OP_DYNAMIC)
5672
            gen_op_set_cc_op(s->cc_op);
5673
        gen_op_clc();
5674
        s->cc_op = CC_OP_EFLAGS;
5675
        break;
5676
    case 0xf9: /* stc */
5677
        if (s->cc_op != CC_OP_DYNAMIC)
5678
            gen_op_set_cc_op(s->cc_op);
5679
        gen_op_stc();
5680
        s->cc_op = CC_OP_EFLAGS;
5681
        break;
5682
    case 0xfc: /* cld */
5683
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5684
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5685
        break;
5686
    case 0xfd: /* std */
5687
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5688
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5689
        break;
5690

    
5691
        /************************/
5692
        /* bit operations */
5693
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5694
        ot = dflag + OT_WORD;
5695
        modrm = ldub_code(s->pc++);
5696
        op = (modrm >> 3) & 7;
5697
        mod = (modrm >> 6) & 3;
5698
        rm = (modrm & 7) | REX_B(s);
5699
        if (mod != 3) {
5700
            s->rip_offset = 1;
5701
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5702
            gen_op_ld_T0_A0(ot + s->mem_index);
5703
        } else {
5704
            gen_op_mov_TN_reg(ot, 0, rm);
5705
        }
5706
        /* load shift */
5707
        val = ldub_code(s->pc++);
5708
        gen_op_movl_T1_im(val);
5709
        if (op < 4)
5710
            goto illegal_op;
5711
        op -= 4;
5712
        goto bt_op;
5713
    case 0x1a3: /* bt Gv, Ev */
5714
        op = 0;
5715
        goto do_btx;
5716
    case 0x1ab: /* bts */
5717
        op = 1;
5718
        goto do_btx;
5719
    case 0x1b3: /* btr */
5720
        op = 2;
5721
        goto do_btx;
5722
    case 0x1bb: /* btc */
5723
        op = 3;
5724
    do_btx:
5725
        ot = dflag + OT_WORD;
5726
        modrm = ldub_code(s->pc++);
5727
        reg = ((modrm >> 3) & 7) | rex_r;
5728
        mod = (modrm >> 6) & 3;
5729
        rm = (modrm & 7) | REX_B(s);
5730
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5731
        if (mod != 3) {
5732
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5733
            /* specific case: we need to add a displacement */
5734
            gen_exts(ot, cpu_T[1]);
5735
            tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5736
            tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5737
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5738
            gen_op_ld_T0_A0(ot + s->mem_index);
5739
        } else {
5740
            gen_op_mov_TN_reg(ot, 0, rm);
5741
        }
5742
    bt_op:
5743
        tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5744
        switch(op) {
5745
        case 0:
5746
            tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5747
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5748
            break;
5749
        case 1:
5750
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5751
            tcg_gen_movi_tl(cpu_tmp0, 1);
5752
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5753
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5754
            break;
5755
        case 2:
5756
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5757
            tcg_gen_movi_tl(cpu_tmp0, 1);
5758
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5759
            tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5760
            tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5761
            break;
5762
        default:
5763
        case 3:
5764
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5765
            tcg_gen_movi_tl(cpu_tmp0, 1);
5766
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5767
            tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5768
            break;
5769
        }
5770
        s->cc_op = CC_OP_SARB + ot;
5771
        if (op != 0) {
5772
            if (mod != 3)
5773
                gen_op_st_T0_A0(ot + s->mem_index);
5774
            else
5775
                gen_op_mov_reg_T0(ot, rm);
5776
            tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5777
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5778
        }
5779
        break;
5780
    case 0x1bc: /* bsf */
5781
    case 0x1bd: /* bsr */
5782
        {
5783
            int label1;
5784
            ot = dflag + OT_WORD;
5785
            modrm = ldub_code(s->pc++);
5786
            reg = ((modrm >> 3) & 7) | rex_r;
5787
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5788
            gen_extu(ot, cpu_T[0]);
5789
            label1 = gen_new_label();
5790
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5791
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5792
            if (b & 1) {
5793
                tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5794
            } else {
5795
                tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5796
            }
5797
            gen_op_mov_reg_T0(ot, reg);
5798
            tcg_gen_movi_tl(cpu_cc_dst, 1);
5799
            gen_set_label(label1);
5800
            tcg_gen_discard_tl(cpu_cc_src);
5801
            s->cc_op = CC_OP_LOGICB + ot;
5802
        }
5803
        break;
5804
        /************************/
5805
        /* bcd */
5806
    case 0x27: /* daa */
5807
        if (CODE64(s))
5808
            goto illegal_op;
5809
        if (s->cc_op != CC_OP_DYNAMIC)
5810
            gen_op_set_cc_op(s->cc_op);
5811
        gen_op_daa();
5812
        s->cc_op = CC_OP_EFLAGS;
5813
        break;
5814
    case 0x2f: /* das */
5815
        if (CODE64(s))
5816
            goto illegal_op;
5817
        if (s->cc_op != CC_OP_DYNAMIC)
5818
            gen_op_set_cc_op(s->cc_op);
5819
        gen_op_das();
5820
        s->cc_op = CC_OP_EFLAGS;
5821
        break;
5822
    case 0x37: /* aaa */
5823
        if (CODE64(s))
5824
            goto illegal_op;
5825
        if (s->cc_op != CC_OP_DYNAMIC)
5826
            gen_op_set_cc_op(s->cc_op);
5827
        gen_op_aaa();
5828
        s->cc_op = CC_OP_EFLAGS;
5829
        break;
5830
    case 0x3f: /* aas */
5831
        if (CODE64(s))
5832
            goto illegal_op;
5833
        if (s->cc_op != CC_OP_DYNAMIC)
5834
            gen_op_set_cc_op(s->cc_op);
5835
        gen_op_aas();
5836
        s->cc_op = CC_OP_EFLAGS;
5837
        break;
5838
    case 0xd4: /* aam */
5839
        if (CODE64(s))
5840
            goto illegal_op;
5841
        val = ldub_code(s->pc++);
5842
        if (val == 0) {
5843
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5844
        } else {
5845
            gen_op_aam(val);
5846
            s->cc_op = CC_OP_LOGICB;
5847
        }
5848
        break;
5849
    case 0xd5: /* aad */
5850
        if (CODE64(s))
5851
            goto illegal_op;
5852
        val = ldub_code(s->pc++);
5853
        gen_op_aad(val);
5854
        s->cc_op = CC_OP_LOGICB;
5855
        break;
5856
        /************************/
5857
        /* misc */
5858
    case 0x90: /* nop */
5859
        /* XXX: xchg + rex handling */
5860
        /* XXX: correct lock test for all insn */
5861
        if (prefixes & PREFIX_LOCK)
5862
            goto illegal_op;
5863
        if (prefixes & PREFIX_REPZ) {
5864
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5865
        }
5866
        break;
5867
    case 0x9b: /* fwait */
5868
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5869
            (HF_MP_MASK | HF_TS_MASK)) {
5870
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5871
        } else {
5872
            if (s->cc_op != CC_OP_DYNAMIC)
5873
                gen_op_set_cc_op(s->cc_op);
5874
            gen_jmp_im(pc_start - s->cs_base);
5875
            tcg_gen_helper_0_0(helper_fwait);
5876
        }
5877
        break;
5878
    case 0xcc: /* int3 */
5879
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5880
            break;
5881
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5882
        break;
5883
    case 0xcd: /* int N */
5884
        val = ldub_code(s->pc++);
5885
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5886
            break;
5887
        if (s->vm86 && s->iopl != 3) {
5888
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5889
        } else {
5890
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5891
        }
5892
        break;
5893
    case 0xce: /* into */
5894
        if (CODE64(s))
5895
            goto illegal_op;
5896
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5897
            break;
5898
        if (s->cc_op != CC_OP_DYNAMIC)
5899
            gen_op_set_cc_op(s->cc_op);
5900
        gen_jmp_im(pc_start - s->cs_base);
5901
        gen_op_into(s->pc - pc_start);
5902
        break;
5903
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5904
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5905
            break;
5906
#if 1
5907
        gen_debug(s, pc_start - s->cs_base);
5908
#else
5909
        /* start debug */
5910
        tb_flush(cpu_single_env);
5911
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5912
#endif
5913
        break;
5914
    case 0xfa: /* cli */
5915
        if (!s->vm86) {
5916
            if (s->cpl <= s->iopl) {
5917
                tcg_gen_helper_0_0(helper_cli);
5918
            } else {
5919
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5920
            }
5921
        } else {
5922
            if (s->iopl == 3) {
5923
                tcg_gen_helper_0_0(helper_cli);
5924
            } else {
5925
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5926
            }
5927
        }
5928
        break;
5929
    case 0xfb: /* sti */
5930
        if (!s->vm86) {
5931
            if (s->cpl <= s->iopl) {
5932
            gen_sti:
5933
                tcg_gen_helper_0_0(helper_sti);
5934
                /* interruptions are enabled only the first insn after sti */
5935
                /* If several instructions disable interrupts, only the
5936
                   _first_ does it */
5937
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5938
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
5939
                /* give a chance to handle pending irqs */
5940
                gen_jmp_im(s->pc - s->cs_base);
5941
                gen_eob(s);
5942
            } else {
5943
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5944
            }
5945
        } else {
5946
            if (s->iopl == 3) {
5947
                goto gen_sti;
5948
            } else {
5949
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5950
            }
5951
        }
5952
        break;
5953
    case 0x62: /* bound */
5954
        if (CODE64(s))
5955
            goto illegal_op;
5956
        ot = dflag ? OT_LONG : OT_WORD;
5957
        modrm = ldub_code(s->pc++);
5958
        reg = (modrm >> 3) & 7;
5959
        mod = (modrm >> 6) & 3;
5960
        if (mod == 3)
5961
            goto illegal_op;
5962
        gen_op_mov_TN_reg(ot, 0, reg);
5963
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5964
        gen_jmp_im(pc_start - s->cs_base);
5965
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5966
        if (ot == OT_WORD)
5967
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
5968
        else
5969
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
5970
        break;
5971
    case 0x1c8 ... 0x1cf: /* bswap reg */
5972
        reg = (b & 7) | REX_B(s);
5973
#ifdef TARGET_X86_64
5974
        if (dflag == 2) {
5975
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5976
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5977
            gen_op_mov_reg_T0(OT_QUAD, reg);
5978
        } else
5979
        {
5980
            TCGv tmp0;
5981
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5982
            
5983
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
5984
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5985
            tcg_gen_bswap_i32(tmp0, tmp0);
5986
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5987
            gen_op_mov_reg_T0(OT_LONG, reg);
5988
        }
5989
#else
5990
        {
5991
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5992
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5993
            gen_op_mov_reg_T0(OT_LONG, reg);
5994
        }
5995
#endif
5996
        break;
5997
    case 0xd6: /* salc */
5998
        if (CODE64(s))
5999
            goto illegal_op;
6000
        if (s->cc_op != CC_OP_DYNAMIC)
6001
            gen_op_set_cc_op(s->cc_op);
6002
        gen_op_salc();
6003
        break;
6004
    case 0xe0: /* loopnz */
6005
    case 0xe1: /* loopz */
6006
    case 0xe2: /* loop */
6007
    case 0xe3: /* jecxz */
6008
        {
6009
            int l1, l2, l3;
6010

    
6011
            tval = (int8_t)insn_get(s, OT_BYTE);
6012
            next_eip = s->pc - s->cs_base;
6013
            tval += next_eip;
6014
            if (s->dflag == 0)
6015
                tval &= 0xffff;
6016

    
6017
            l1 = gen_new_label();
6018
            l2 = gen_new_label();
6019
            l3 = gen_new_label();
6020
            b &= 3;
6021
            switch(b) {
6022
            case 0: /* loopnz */
6023
            case 1: /* loopz */
6024
                if (s->cc_op != CC_OP_DYNAMIC)
6025
                    gen_op_set_cc_op(s->cc_op);
6026
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6027
                gen_op_jz_ecx(s->aflag, l3);
6028
                gen_compute_eflags(cpu_tmp0);
6029
                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6030
                if (b == 0) {
6031
                    tcg_gen_brcond_tl(TCG_COND_EQ, 
6032
                                      cpu_tmp0, tcg_const_tl(0), l1);
6033
                } else {
6034
                    tcg_gen_brcond_tl(TCG_COND_NE, 
6035
                                      cpu_tmp0, tcg_const_tl(0), l1);
6036
                }
6037
                break;
6038
            case 2: /* loop */
6039
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6040
                gen_op_jnz_ecx(s->aflag, l1);
6041
                break;
6042
            default:
6043
            case 3: /* jcxz */
6044
                gen_op_jz_ecx(s->aflag, l1);
6045
                break;
6046
            }
6047

    
6048
            gen_set_label(l3);
6049
            gen_jmp_im(next_eip);
6050
            gen_op_jmp_label(l2);
6051

    
6052
            gen_set_label(l1);
6053
            gen_jmp_im(tval);
6054
            gen_set_label(l2);
6055
            gen_eob(s);
6056
        }
6057
        break;
6058
    case 0x130: /* wrmsr */
6059
    case 0x132: /* rdmsr */
6060
        if (s->cpl != 0) {
6061
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6062
        } else {
6063
            int retval = 0;
6064
            if (b & 2) {
6065
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6066
                tcg_gen_helper_0_0(helper_rdmsr);
6067
            } else {
6068
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6069
                tcg_gen_helper_0_0(helper_wrmsr);
6070
            }
6071
            if(retval)
6072
                gen_eob(s);
6073
        }
6074
        break;
6075
    case 0x131: /* rdtsc */
6076
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6077
            break;
6078
        gen_jmp_im(pc_start - s->cs_base);
6079
        tcg_gen_helper_0_0(helper_rdtsc);
6080
        break;
6081
    case 0x133: /* rdpmc */
6082
        gen_jmp_im(pc_start - s->cs_base);
6083
        tcg_gen_helper_0_0(helper_rdpmc);
6084
        break;
6085
    case 0x134: /* sysenter */
6086
        if (CODE64(s))
6087
            goto illegal_op;
6088
        if (!s->pe) {
6089
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6090
        } else {
6091
            if (s->cc_op != CC_OP_DYNAMIC) {
6092
                gen_op_set_cc_op(s->cc_op);
6093
                s->cc_op = CC_OP_DYNAMIC;
6094
            }
6095
            gen_jmp_im(pc_start - s->cs_base);
6096
            tcg_gen_helper_0_0(helper_sysenter);
6097
            gen_eob(s);
6098
        }
6099
        break;
6100
    case 0x135: /* sysexit */
6101
        if (CODE64(s))
6102
            goto illegal_op;
6103
        if (!s->pe) {
6104
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6105
        } else {
6106
            if (s->cc_op != CC_OP_DYNAMIC) {
6107
                gen_op_set_cc_op(s->cc_op);
6108
                s->cc_op = CC_OP_DYNAMIC;
6109
            }
6110
            gen_jmp_im(pc_start - s->cs_base);
6111
            tcg_gen_helper_0_0(helper_sysexit);
6112
            gen_eob(s);
6113
        }
6114
        break;
6115
#ifdef TARGET_X86_64
6116
    case 0x105: /* syscall */
6117
        /* XXX: is it usable in real mode ? */
6118
        if (s->cc_op != CC_OP_DYNAMIC) {
6119
            gen_op_set_cc_op(s->cc_op);
6120
            s->cc_op = CC_OP_DYNAMIC;
6121
        }
6122
        gen_jmp_im(pc_start - s->cs_base);
6123
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6124
        gen_eob(s);
6125
        break;
6126
    case 0x107: /* sysret */
6127
        if (!s->pe) {
6128
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6129
        } else {
6130
            if (s->cc_op != CC_OP_DYNAMIC) {
6131
                gen_op_set_cc_op(s->cc_op);
6132
                s->cc_op = CC_OP_DYNAMIC;
6133
            }
6134
            gen_jmp_im(pc_start - s->cs_base);
6135
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6136
            /* condition codes are modified only in long mode */
6137
            if (s->lma)
6138
                s->cc_op = CC_OP_EFLAGS;
6139
            gen_eob(s);
6140
        }
6141
        break;
6142
#endif
6143
    case 0x1a2: /* cpuid */
6144
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6145
            break;
6146
        tcg_gen_helper_0_0(helper_cpuid);
6147
        break;
6148
    case 0xf4: /* hlt */
6149
        if (s->cpl != 0) {
6150
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6151
        } else {
6152
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6153
                break;
6154
            if (s->cc_op != CC_OP_DYNAMIC)
6155
                gen_op_set_cc_op(s->cc_op);
6156
            gen_jmp_im(s->pc - s->cs_base);
6157
            tcg_gen_helper_0_0(helper_hlt);
6158
            s->is_jmp = 3;
6159
        }
6160
        break;
6161
    case 0x100:
6162
        modrm = ldub_code(s->pc++);
6163
        mod = (modrm >> 6) & 3;
6164
        op = (modrm >> 3) & 7;
6165
        switch(op) {
6166
        case 0: /* sldt */
6167
            if (!s->pe || s->vm86)
6168
                goto illegal_op;
6169
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6170
                break;
6171
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
6172
            ot = OT_WORD;
6173
            if (mod == 3)
6174
                ot += s->dflag;
6175
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6176
            break;
6177
        case 2: /* lldt */
6178
            if (!s->pe || s->vm86)
6179
                goto illegal_op;
6180
            if (s->cpl != 0) {
6181
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6182
            } else {
6183
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6184
                    break;
6185
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6186
                gen_jmp_im(pc_start - s->cs_base);
6187
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6188
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6189
            }
6190
            break;
6191
        case 1: /* str */
6192
            if (!s->pe || s->vm86)
6193
                goto illegal_op;
6194
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6195
                break;
6196
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
6197
            ot = OT_WORD;
6198
            if (mod == 3)
6199
                ot += s->dflag;
6200
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6201
            break;
6202
        case 3: /* ltr */
6203
            if (!s->pe || s->vm86)
6204
                goto illegal_op;
6205
            if (s->cpl != 0) {
6206
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6207
            } else {
6208
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6209
                    break;
6210
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6211
                gen_jmp_im(pc_start - s->cs_base);
6212
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6213
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6214
            }
6215
            break;
6216
        case 4: /* verr */
6217
        case 5: /* verw */
6218
            if (!s->pe || s->vm86)
6219
                goto illegal_op;
6220
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6221
            if (s->cc_op != CC_OP_DYNAMIC)
6222
                gen_op_set_cc_op(s->cc_op);
6223
            if (op == 4)
6224
                gen_op_verr();
6225
            else
6226
                gen_op_verw();
6227
            s->cc_op = CC_OP_EFLAGS;
6228
            break;
6229
        default:
6230
            goto illegal_op;
6231
        }
6232
        break;
6233
    case 0x101:
6234
        modrm = ldub_code(s->pc++);
6235
        mod = (modrm >> 6) & 3;
6236
        op = (modrm >> 3) & 7;
6237
        rm = modrm & 7;
6238
        switch(op) {
6239
        case 0: /* sgdt */
6240
            if (mod == 3)
6241
                goto illegal_op;
6242
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6243
                break;
6244
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6245
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
6246
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6247
            gen_add_A0_im(s, 2);
6248
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
6249
            if (!s->dflag)
6250
                gen_op_andl_T0_im(0xffffff);
6251
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6252
            break;
6253
        case 1:
6254
            if (mod == 3) {
6255
                switch (rm) {
6256
                case 0: /* monitor */
6257
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6258
                        s->cpl != 0)
6259
                        goto illegal_op;
6260
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6261
                        break;
6262
                    gen_jmp_im(pc_start - s->cs_base);
6263
#ifdef TARGET_X86_64
6264
                    if (s->aflag == 2) {
6265
                        gen_op_movq_A0_reg(R_EAX);
6266
                    } else
6267
#endif
6268
                    {
6269
                        gen_op_movl_A0_reg(R_EAX);
6270
                        if (s->aflag == 0)
6271
                            gen_op_andl_A0_ffff();
6272
                    }
6273
                    gen_add_A0_ds_seg(s);
6274
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6275
                    break;
6276
                case 1: /* mwait */
6277
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6278
                        s->cpl != 0)
6279
                        goto illegal_op;
6280
                    if (s->cc_op != CC_OP_DYNAMIC) {
6281
                        gen_op_set_cc_op(s->cc_op);
6282
                        s->cc_op = CC_OP_DYNAMIC;
6283
                    }
6284
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6285
                        break;
6286
                    gen_jmp_im(s->pc - s->cs_base);
6287
                    tcg_gen_helper_0_0(helper_mwait);
6288
                    gen_eob(s);
6289
                    break;
6290
                default:
6291
                    goto illegal_op;
6292
                }
6293
            } else { /* sidt */
6294
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6295
                    break;
6296
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6297
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6298
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6299
                gen_add_A0_im(s, 2);
6300
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6301
                if (!s->dflag)
6302
                    gen_op_andl_T0_im(0xffffff);
6303
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6304
            }
6305
            break;
6306
        case 2: /* lgdt */
6307
        case 3: /* lidt */
6308
            if (mod == 3) {
6309
                switch(rm) {
6310
                case 0: /* VMRUN */
6311
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6312
                        break;
6313
                    if (s->cc_op != CC_OP_DYNAMIC)
6314
                        gen_op_set_cc_op(s->cc_op);
6315
                    gen_jmp_im(s->pc - s->cs_base);
6316
                    tcg_gen_helper_0_0(helper_vmrun);
6317
                    s->cc_op = CC_OP_EFLAGS;
6318
                    gen_eob(s);
6319
                    break;
6320
                case 1: /* VMMCALL */
6321
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6322
                         break;
6323
                    /* FIXME: cause #UD if hflags & SVM */
6324
                    tcg_gen_helper_0_0(helper_vmmcall);
6325
                    break;
6326
                case 2: /* VMLOAD */
6327
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6328
                         break;
6329
                    tcg_gen_helper_0_0(helper_vmload);
6330
                    break;
6331
                case 3: /* VMSAVE */
6332
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6333
                         break;
6334
                    tcg_gen_helper_0_0(helper_vmsave);
6335
                    break;
6336
                case 4: /* STGI */
6337
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6338
                         break;
6339
                    tcg_gen_helper_0_0(helper_stgi);
6340
                    break;
6341
                case 5: /* CLGI */
6342
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6343
                         break;
6344
                    tcg_gen_helper_0_0(helper_clgi);
6345
                    break;
6346
                case 6: /* SKINIT */
6347
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6348
                         break;
6349
                    tcg_gen_helper_0_0(helper_skinit);
6350
                    break;
6351
                case 7: /* INVLPGA */
6352
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6353
                         break;
6354
                    tcg_gen_helper_0_0(helper_invlpga);
6355
                    break;
6356
                default:
6357
                    goto illegal_op;
6358
                }
6359
            } else if (s->cpl != 0) {
6360
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6361
            } else {
6362
                if (gen_svm_check_intercept(s, pc_start,
6363
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6364
                    break;
6365
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6366
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6367
                gen_add_A0_im(s, 2);
6368
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6369
                if (!s->dflag)
6370
                    gen_op_andl_T0_im(0xffffff);
6371
                if (op == 2) {
6372
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6373
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6374
                } else {
6375
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6376
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6377
                }
6378
            }
6379
            break;
6380
        case 4: /* smsw */
6381
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6382
                break;
6383
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6384
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6385
            break;
6386
        case 6: /* lmsw */
6387
            if (s->cpl != 0) {
6388
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6389
            } else {
6390
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6391
                    break;
6392
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6393
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6394
                gen_jmp_im(s->pc - s->cs_base);
6395
                gen_eob(s);
6396
            }
6397
            break;
6398
        case 7: /* invlpg */
6399
            if (s->cpl != 0) {
6400
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6401
            } else {
6402
                if (mod == 3) {
6403
#ifdef TARGET_X86_64
6404
                    if (CODE64(s) && rm == 0) {
6405
                        /* swapgs */
6406
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6407
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6408
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6409
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6410
                    } else
6411
#endif
6412
                    {
6413
                        goto illegal_op;
6414
                    }
6415
                } else {
6416
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6417
                        break;
6418
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6419
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6420
                    gen_jmp_im(s->pc - s->cs_base);
6421
                    gen_eob(s);
6422
                }
6423
            }
6424
            break;
6425
        default:
6426
            goto illegal_op;
6427
        }
6428
        break;
6429
    case 0x108: /* invd */
6430
    case 0x109: /* wbinvd */
6431
        if (s->cpl != 0) {
6432
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6433
        } else {
6434
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6435
                break;
6436
            /* nothing to do */
6437
        }
6438
        break;
6439
    case 0x63: /* arpl or movslS (x86_64) */
6440
#ifdef TARGET_X86_64
6441
        if (CODE64(s)) {
6442
            int d_ot;
6443
            /* d_ot is the size of destination */
6444
            d_ot = dflag + OT_WORD;
6445

    
6446
            modrm = ldub_code(s->pc++);
6447
            reg = ((modrm >> 3) & 7) | rex_r;
6448
            mod = (modrm >> 6) & 3;
6449
            rm = (modrm & 7) | REX_B(s);
6450

    
6451
            if (mod == 3) {
6452
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6453
                /* sign extend */
6454
                if (d_ot == OT_QUAD)
6455
                    tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6456
                gen_op_mov_reg_T0(d_ot, reg);
6457
            } else {
6458
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6459
                if (d_ot == OT_QUAD) {
6460
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6461
                } else {
6462
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6463
                }
6464
                gen_op_mov_reg_T0(d_ot, reg);
6465
            }
6466
        } else
6467
#endif
6468
        {
6469
            if (!s->pe || s->vm86)
6470
                goto illegal_op;
6471
            ot = dflag ? OT_LONG : OT_WORD;
6472
            modrm = ldub_code(s->pc++);
6473
            reg = (modrm >> 3) & 7;
6474
            mod = (modrm >> 6) & 3;
6475
            rm = modrm & 7;
6476
            if (mod != 3) {
6477
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6478
                gen_op_ld_T0_A0(ot + s->mem_index);
6479
            } else {
6480
                gen_op_mov_TN_reg(ot, 0, rm);
6481
            }
6482
            gen_op_mov_TN_reg(ot, 1, reg);
6483
            if (s->cc_op != CC_OP_DYNAMIC)
6484
                gen_op_set_cc_op(s->cc_op);
6485
            gen_op_arpl();
6486
            s->cc_op = CC_OP_EFLAGS;
6487
            if (mod != 3) {
6488
                gen_op_st_T0_A0(ot + s->mem_index);
6489
            } else {
6490
                gen_op_mov_reg_T0(ot, rm);
6491
            }
6492
            gen_op_arpl_update();
6493
        }
6494
        break;
6495
    case 0x102: /* lar */
6496
    case 0x103: /* lsl */
6497
        if (!s->pe || s->vm86)
6498
            goto illegal_op;
6499
        ot = dflag ? OT_LONG : OT_WORD;
6500
        modrm = ldub_code(s->pc++);
6501
        reg = ((modrm >> 3) & 7) | rex_r;
6502
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6503
        gen_op_mov_TN_reg(ot, 1, reg);
6504
        if (s->cc_op != CC_OP_DYNAMIC)
6505
            gen_op_set_cc_op(s->cc_op);
6506
        if (b == 0x102)
6507
            gen_op_lar();
6508
        else
6509
            gen_op_lsl();
6510
        s->cc_op = CC_OP_EFLAGS;
6511
        gen_op_mov_reg_T1(ot, reg);
6512
        break;
6513
    case 0x118:
6514
        modrm = ldub_code(s->pc++);
6515
        mod = (modrm >> 6) & 3;
6516
        op = (modrm >> 3) & 7;
6517
        switch(op) {
6518
        case 0: /* prefetchnta */
6519
        case 1: /* prefetchnt0 */
6520
        case 2: /* prefetchnt0 */
6521
        case 3: /* prefetchnt0 */
6522
            if (mod == 3)
6523
                goto illegal_op;
6524
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6525
            /* nothing more to do */
6526
            break;
6527
        default: /* nop (multi byte) */
6528
            gen_nop_modrm(s, modrm);
6529
            break;
6530
        }
6531
        break;
6532
    case 0x119 ... 0x11f: /* nop (multi byte) */
6533
        modrm = ldub_code(s->pc++);
6534
        gen_nop_modrm(s, modrm);
6535
        break;
6536
    case 0x120: /* mov reg, crN */
6537
    case 0x122: /* mov crN, reg */
6538
        if (s->cpl != 0) {
6539
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6540
        } else {
6541
            modrm = ldub_code(s->pc++);
6542
            if ((modrm & 0xc0) != 0xc0)
6543
                goto illegal_op;
6544
            rm = (modrm & 7) | REX_B(s);
6545
            reg = ((modrm >> 3) & 7) | rex_r;
6546
            if (CODE64(s))
6547
                ot = OT_QUAD;
6548
            else
6549
                ot = OT_LONG;
6550
            switch(reg) {
6551
            case 0:
6552
            case 2:
6553
            case 3:
6554
            case 4:
6555
            case 8:
6556
                if (b & 2) {
6557
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6558
                    gen_op_mov_TN_reg(ot, 0, rm);
6559
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6560
                                       tcg_const_i32(reg), cpu_T[0]);
6561
                    gen_jmp_im(s->pc - s->cs_base);
6562
                    gen_eob(s);
6563
                } else {
6564
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6565
#if !defined(CONFIG_USER_ONLY)
6566
                    if (reg == 8)
6567
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6568
                    else
6569
#endif
6570
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6571
                    gen_op_mov_reg_T0(ot, rm);
6572
                }
6573
                break;
6574
            default:
6575
                goto illegal_op;
6576
            }
6577
        }
6578
        break;
6579
    case 0x121: /* mov reg, drN */
6580
    case 0x123: /* mov drN, reg */
6581
        if (s->cpl != 0) {
6582
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6583
        } else {
6584
            modrm = ldub_code(s->pc++);
6585
            if ((modrm & 0xc0) != 0xc0)
6586
                goto illegal_op;
6587
            rm = (modrm & 7) | REX_B(s);
6588
            reg = ((modrm >> 3) & 7) | rex_r;
6589
            if (CODE64(s))
6590
                ot = OT_QUAD;
6591
            else
6592
                ot = OT_LONG;
6593
            /* XXX: do it dynamically with CR4.DE bit */
6594
            if (reg == 4 || reg == 5 || reg >= 8)
6595
                goto illegal_op;
6596
            if (b & 2) {
6597
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6598
                gen_op_mov_TN_reg(ot, 0, rm);
6599
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6600
                                   tcg_const_i32(reg), cpu_T[0]);
6601
                gen_jmp_im(s->pc - s->cs_base);
6602
                gen_eob(s);
6603
            } else {
6604
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6605
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6606
                gen_op_mov_reg_T0(ot, rm);
6607
            }
6608
        }
6609
        break;
6610
    case 0x106: /* clts */
6611
        if (s->cpl != 0) {
6612
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6613
        } else {
6614
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6615
            tcg_gen_helper_0_0(helper_clts);
6616
            /* abort block because static cpu state changed */
6617
            gen_jmp_im(s->pc - s->cs_base);
6618
            gen_eob(s);
6619
        }
6620
        break;
6621
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6622
    case 0x1c3: /* MOVNTI reg, mem */
6623
        if (!(s->cpuid_features & CPUID_SSE2))
6624
            goto illegal_op;
6625
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6626
        modrm = ldub_code(s->pc++);
6627
        mod = (modrm >> 6) & 3;
6628
        if (mod == 3)
6629
            goto illegal_op;
6630
        reg = ((modrm >> 3) & 7) | rex_r;
6631
        /* generate a generic store */
6632
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6633
        break;
6634
    case 0x1ae:
6635
        modrm = ldub_code(s->pc++);
6636
        mod = (modrm >> 6) & 3;
6637
        op = (modrm >> 3) & 7;
6638
        switch(op) {
6639
        case 0: /* fxsave */
6640
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6641
                (s->flags & HF_EM_MASK))
6642
                goto illegal_op;
6643
            if (s->flags & HF_TS_MASK) {
6644
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6645
                break;
6646
            }
6647
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6648
            if (s->cc_op != CC_OP_DYNAMIC)
6649
                gen_op_set_cc_op(s->cc_op);
6650
            gen_jmp_im(pc_start - s->cs_base);
6651
            tcg_gen_helper_0_2(helper_fxsave, 
6652
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6653
            break;
6654
        case 1: /* fxrstor */
6655
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6656
                (s->flags & HF_EM_MASK))
6657
                goto illegal_op;
6658
            if (s->flags & HF_TS_MASK) {
6659
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6660
                break;
6661
            }
6662
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6663
            if (s->cc_op != CC_OP_DYNAMIC)
6664
                gen_op_set_cc_op(s->cc_op);
6665
            gen_jmp_im(pc_start - s->cs_base);
6666
            tcg_gen_helper_0_2(helper_fxrstor,
6667
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6668
            break;
6669
        case 2: /* ldmxcsr */
6670
        case 3: /* stmxcsr */
6671
            if (s->flags & HF_TS_MASK) {
6672
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6673
                break;
6674
            }
6675
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6676
                mod == 3)
6677
                goto illegal_op;
6678
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6679
            if (op == 2) {
6680
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6681
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6682
            } else {
6683
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6684
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6685
            }
6686
            break;
6687
        case 5: /* lfence */
6688
        case 6: /* mfence */
6689
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6690
                goto illegal_op;
6691
            break;
6692
        case 7: /* sfence / clflush */
6693
            if ((modrm & 0xc7) == 0xc0) {
6694
                /* sfence */
6695
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6696
                if (!(s->cpuid_features & CPUID_SSE))
6697
                    goto illegal_op;
6698
            } else {
6699
                /* clflush */
6700
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6701
                    goto illegal_op;
6702
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6703
            }
6704
            break;
6705
        default:
6706
            goto illegal_op;
6707
        }
6708
        break;
6709
    case 0x10d: /* 3DNow! prefetch(w) */
6710
        modrm = ldub_code(s->pc++);
6711
        mod = (modrm >> 6) & 3;
6712
        if (mod == 3)
6713
            goto illegal_op;
6714
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6715
        /* ignore for now */
6716
        break;
6717
    case 0x1aa: /* rsm */
6718
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6719
            break;
6720
        if (!(s->flags & HF_SMM_MASK))
6721
            goto illegal_op;
6722
        if (s->cc_op != CC_OP_DYNAMIC) {
6723
            gen_op_set_cc_op(s->cc_op);
6724
            s->cc_op = CC_OP_DYNAMIC;
6725
        }
6726
        gen_jmp_im(s->pc - s->cs_base);
6727
        tcg_gen_helper_0_0(helper_rsm);
6728
        gen_eob(s);
6729
        break;
6730
    case 0x10e ... 0x10f:
6731
        /* 3DNow! instructions, ignore prefixes */
6732
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6733
    case 0x110 ... 0x117:
6734
    case 0x128 ... 0x12f:
6735
    case 0x150 ... 0x177:
6736
    case 0x17c ... 0x17f:
6737
    case 0x1c2:
6738
    case 0x1c4 ... 0x1c6:
6739
    case 0x1d0 ... 0x1fe:
6740
        gen_sse(s, b, pc_start, rex_r);
6741
        break;
6742
    default:
6743
        goto illegal_op;
6744
    }
6745
    /* lock generation */
6746
    if (s->prefix & PREFIX_LOCK)
6747
        tcg_gen_helper_0_0(helper_unlock);
6748
    return s->pc;
6749
 illegal_op:
6750
    if (s->prefix & PREFIX_LOCK)
6751
        tcg_gen_helper_0_0(helper_unlock);
6752
    /* XXX: ensure that no lock was generated */
6753
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6754
    return s->pc;
6755
}
6756

    
6757
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6758
{
6759
    switch(macro_id) {
6760
#ifdef MACRO_TEST
6761
    case MACRO_TEST:
6762
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6763
        break;
6764
#endif
6765
    }
6766
}
6767

    
6768
void optimize_flags_init(void)
6769
{
6770
#if TCG_TARGET_REG_BITS == 32
6771
    assert(sizeof(CCTable) == (1 << 3));
6772
#else
6773
    assert(sizeof(CCTable) == (1 << 4));
6774
#endif
6775
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6776

    
6777
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6778
#if TARGET_LONG_BITS > HOST_LONG_BITS
6779
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6780
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6781
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6782
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6783
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6784
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6785
#else
6786
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6787
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6788
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6789
#endif
6790
    cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6791
                                TCG_AREG0, offsetof(CPUState, t3), "T3");
6792
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6793
    /* XXX: must be suppressed once there are less fixed registers */
6794
    cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6795
#endif
6796
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6797
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6798
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6799
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6800
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6801
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6802
}
6803

    
6804
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6805
   basic block 'tb'. If search_pc is TRUE, also generate PC
6806
   information for each intermediate instruction. */
6807
static inline int gen_intermediate_code_internal(CPUState *env,
6808
                                                 TranslationBlock *tb,
6809
                                                 int search_pc)
6810
{
6811
    DisasContext dc1, *dc = &dc1;
6812
    target_ulong pc_ptr;
6813
    uint16_t *gen_opc_end;
6814
    int j, lj, cflags;
6815
    uint64_t flags;
6816
    target_ulong pc_start;
6817
    target_ulong cs_base;
6818

    
6819
    /* generate intermediate code */
6820
    pc_start = tb->pc;
6821
    cs_base = tb->cs_base;
6822
    flags = tb->flags;
6823
    cflags = tb->cflags;
6824

    
6825
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6826
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6827
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6828
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6829
    dc->f_st = 0;
6830
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6831
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6832
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6833
    dc->tf = (flags >> TF_SHIFT) & 1;
6834
    dc->singlestep_enabled = env->singlestep_enabled;
6835
    dc->cc_op = CC_OP_DYNAMIC;
6836
    dc->cs_base = cs_base;
6837
    dc->tb = tb;
6838
    dc->popl_esp_hack = 0;
6839
    /* select memory access functions */
6840
    dc->mem_index = 0;
6841
    if (flags & HF_SOFTMMU_MASK) {
6842
        if (dc->cpl == 3)
6843
            dc->mem_index = 2 * 4;
6844
        else
6845
            dc->mem_index = 1 * 4;
6846
    }
6847
    dc->cpuid_features = env->cpuid_features;
6848
    dc->cpuid_ext_features = env->cpuid_ext_features;
6849
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
6850
#ifdef TARGET_X86_64
6851
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6852
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6853
#endif
6854
    dc->flags = flags;
6855
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6856
                    (flags & HF_INHIBIT_IRQ_MASK)
6857
#ifndef CONFIG_SOFTMMU
6858
                    || (flags & HF_SOFTMMU_MASK)
6859
#endif
6860
                    );
6861
#if 0
6862
    /* check addseg logic */
6863
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6864
        printf("ERROR addseg\n");
6865
#endif
6866

    
6867
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6868
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6869
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
6870
#endif
6871
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
6872
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
6873
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
6874
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
6875
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
6876
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6877
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6878

    
6879
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6880

    
6881
    dc->is_jmp = DISAS_NEXT;
6882
    pc_ptr = pc_start;
6883
    lj = -1;
6884

    
6885
    for(;;) {
6886
        if (env->nb_breakpoints > 0) {
6887
            for(j = 0; j < env->nb_breakpoints; j++) {
6888
                if (env->breakpoints[j] == pc_ptr) {
6889
                    gen_debug(dc, pc_ptr - dc->cs_base);
6890
                    break;
6891
                }
6892
            }
6893
        }
6894
        if (search_pc) {
6895
            j = gen_opc_ptr - gen_opc_buf;
6896
            if (lj < j) {
6897
                lj++;
6898
                while (lj < j)
6899
                    gen_opc_instr_start[lj++] = 0;
6900
            }
6901
            gen_opc_pc[lj] = pc_ptr;
6902
            gen_opc_cc_op[lj] = dc->cc_op;
6903
            gen_opc_instr_start[lj] = 1;
6904
        }
6905
        pc_ptr = disas_insn(dc, pc_ptr);
6906
        /* stop translation if indicated */
6907
        if (dc->is_jmp)
6908
            break;
6909
        /* if single step mode, we generate only one instruction and
6910
           generate an exception */
6911
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6912
           the flag and abort the translation to give the irqs a
6913
           change to be happen */
6914
        if (dc->tf || dc->singlestep_enabled ||
6915
            (flags & HF_INHIBIT_IRQ_MASK) ||
6916
            (cflags & CF_SINGLE_INSN)) {
6917
            gen_jmp_im(pc_ptr - dc->cs_base);
6918
            gen_eob(dc);
6919
            break;
6920
        }
6921
        /* if too long translation, stop generation too */
6922
        if (gen_opc_ptr >= gen_opc_end ||
6923
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6924
            gen_jmp_im(pc_ptr - dc->cs_base);
6925
            gen_eob(dc);
6926
            break;
6927
        }
6928
    }
6929
    *gen_opc_ptr = INDEX_op_end;
6930
    /* we don't forget to fill the last values */
6931
    if (search_pc) {
6932
        j = gen_opc_ptr - gen_opc_buf;
6933
        lj++;
6934
        while (lj <= j)
6935
            gen_opc_instr_start[lj++] = 0;
6936
    }
6937

    
6938
#ifdef DEBUG_DISAS
6939
    if (loglevel & CPU_LOG_TB_CPU) {
6940
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6941
    }
6942
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6943
        int disas_flags;
6944
        fprintf(logfile, "----------------\n");
6945
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6946
#ifdef TARGET_X86_64
6947
        if (dc->code64)
6948
            disas_flags = 2;
6949
        else
6950
#endif
6951
            disas_flags = !dc->code32;
6952
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6953
        fprintf(logfile, "\n");
6954
        if (loglevel & CPU_LOG_TB_OP_OPT) {
6955
            fprintf(logfile, "OP before opt:\n");
6956
            tcg_dump_ops(&tcg_ctx, logfile);
6957
            fprintf(logfile, "\n");
6958
        }
6959
    }
6960
#endif
6961

    
6962
    if (!search_pc)
6963
        tb->size = pc_ptr - pc_start;
6964
    return 0;
6965
}
6966

    
6967
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6968
{
6969
    return gen_intermediate_code_internal(env, tb, 0);
6970
}
6971

    
6972
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6973
{
6974
    return gen_intermediate_code_internal(env, tb, 1);
6975
}
6976

    
6977
void gen_pc_load(CPUState *env, TranslationBlock *tb,
6978
                unsigned long searched_pc, int pc_pos, void *puc)
6979
{
6980
    int cc_op;
6981
#ifdef DEBUG_DISAS
6982
    if (loglevel & CPU_LOG_TB_OP) {
6983
        int i;
6984
        fprintf(logfile, "RESTORE:\n");
6985
        for(i = 0;i <= pc_pos; i++) {
6986
            if (gen_opc_instr_start[i]) {
6987
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
6988
            }
6989
        }
6990
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
6991
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
6992
                (uint32_t)tb->cs_base);
6993
    }
6994
#endif
6995
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
6996
    cc_op = gen_opc_cc_op[pc_pos];
6997
    if (cc_op != CC_OP_DYNAMIC)
6998
        env->cc_op = cc_op;
6999
}