Revision 0573fbfc target-i386/translate.c

b/target-i386/translate.c
1995 1995
    }
1996 1996
}
1997 1997

  
1998
#ifdef TARGET_X86_64
1999
#define SVM_movq_T1_im(x) gen_op_movq_T1_im64((x) >> 32, x)
2000
#else
2001
#define SVM_movq_T1_im(x) gen_op_movl_T1_im(x)
2002
#endif
2003

  
2004
static inline int
2005
gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
2006
{
2007
#if !defined(CONFIG_USER_ONLY)
2008
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2009
        if (s->cc_op != CC_OP_DYNAMIC)
2010
            gen_op_set_cc_op(s->cc_op);
2011
        SVM_movq_T1_im(s->pc - s->cs_base);
2012
        gen_jmp_im(pc_start - s->cs_base);
2013
        gen_op_geneflags();
2014
        gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2015
        s->cc_op = CC_OP_DYNAMIC;
2016
        /* FIXME: maybe we could move the io intercept vector to the TB as well
2017
                  so we know if this is an EOB or not ... let's assume it's not
2018
                  for now. */
2019
    }
2020
#endif
2021
    return 0;
2022
}
2023

  
2024
static inline int svm_is_rep(int prefixes)
2025
{
2026
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2027
}
2028

  
2029
static inline int
2030
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2031
                              uint64_t type, uint64_t param)
2032
{
2033
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2034
	/* no SVM activated */
2035
        return 0;
2036
    switch(type) {
2037
        /* CRx and DRx reads/writes */
2038
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2039
            if (s->cc_op != CC_OP_DYNAMIC) {
2040
                gen_op_set_cc_op(s->cc_op);
2041
                s->cc_op = CC_OP_DYNAMIC;
2042
            }
2043
            gen_jmp_im(pc_start - s->cs_base);
2044
            SVM_movq_T1_im(param);
2045
            gen_op_geneflags();
2046
            gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2047
            /* this is a special case as we do not know if the interception occurs
2048
               so we assume there was none */
2049
            return 0;
2050
        case SVM_EXIT_MSR:
2051
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2052
                if (s->cc_op != CC_OP_DYNAMIC) {
2053
                    gen_op_set_cc_op(s->cc_op);
2054
                    s->cc_op = CC_OP_DYNAMIC;
2055
                }
2056
                gen_jmp_im(pc_start - s->cs_base);
2057
                SVM_movq_T1_im(param);
2058
                gen_op_geneflags();
2059
                gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2060
                /* this is a special case as we do not know if the interception occurs
2061
                   so we assume there was none */
2062
                return 0;
2063
            }
2064
            break;
2065
        default:
2066
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2067
                if (s->cc_op != CC_OP_DYNAMIC) {
2068
                    gen_op_set_cc_op(s->cc_op);
2069
		    s->cc_op = CC_OP_EFLAGS;
2070
                }
2071
                gen_jmp_im(pc_start - s->cs_base);
2072
                SVM_movq_T1_im(param);
2073
                gen_op_geneflags();
2074
                gen_op_svm_vmexit(type >> 32, type);
2075
                /* we can optimize this one so TBs don't get longer
2076
                   than up to vmexit */
2077
                gen_eob(s);
2078
                return 1;
2079
            }
2080
    }
2081
    return 0;
2082
}
2083

  
2084
static inline int
2085
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2086
{
2087
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2088
}
2089

  
1998 2090
static inline void gen_stack_update(DisasContext *s, int addend)
1999 2091
{
2000 2092
#ifdef TARGET_X86_64
......
4880 4972
        else
4881 4973
            ot = dflag ? OT_LONG : OT_WORD;
4882 4974
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4975
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4976
        gen_op_andl_T0_ffff();
4977
        if (gen_svm_check_io(s, pc_start,
4978
                             SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
4979
                             svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
4980
            break;
4883 4981
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4884 4982
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4885 4983
        } else {
......
4893 4991
        else
4894 4992
            ot = dflag ? OT_LONG : OT_WORD;
4895 4993
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4994
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4995
        gen_op_andl_T0_ffff();
4996
        if (gen_svm_check_io(s, pc_start,
4997
                             (1 << (4+ot)) | svm_is_rep(prefixes) |
4998
                             4 | (1 << (7+s->aflag))))
4999
            break;
4896 5000
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4897 5001
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4898 5002
        } else {
......
4902 5006

  
4903 5007
        /************************/
4904 5008
        /* port I/O */
5009

  
4905 5010
    case 0xe4:
4906 5011
    case 0xe5:
4907 5012
        if ((b & 1) == 0)
......
4911 5016
        val = ldub_code(s->pc++);
4912 5017
        gen_op_movl_T0_im(val);
4913 5018
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5019
        if (gen_svm_check_io(s, pc_start,
5020
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5021
                             (1 << (4+ot))))
5022
            break;
4914 5023
        gen_op_in[ot]();
4915 5024
        gen_op_mov_reg_T1[ot][R_EAX]();
4916 5025
        break;
......
4923 5032
        val = ldub_code(s->pc++);
4924 5033
        gen_op_movl_T0_im(val);
4925 5034
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5035
        if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5036
                             (1 << (4+ot))))
5037
            break;
4926 5038
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4927 5039
        gen_op_out[ot]();
4928 5040
        break;
......
4935 5047
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4936 5048
        gen_op_andl_T0_ffff();
4937 5049
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5050
        if (gen_svm_check_io(s, pc_start,
5051
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5052
                             (1 << (4+ot))))
5053
            break;
4938 5054
        gen_op_in[ot]();
4939 5055
        gen_op_mov_reg_T1[ot][R_EAX]();
4940 5056
        break;
......
4947 5063
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4948 5064
        gen_op_andl_T0_ffff();
4949 5065
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5066
        if (gen_svm_check_io(s, pc_start,
5067
                             svm_is_rep(prefixes) | (1 << (4+ot))))
5068
            break;
4950 5069
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4951 5070
        gen_op_out[ot]();
4952 5071
        break;
......
5004 5123
        val = 0;
5005 5124
        goto do_lret;
5006 5125
    case 0xcf: /* iret */
5126
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5127
            break;
5007 5128
        if (!s->pe) {
5008 5129
            /* real mode */
5009 5130
            gen_op_iret_real(s->dflag);
......
5125 5246
        /************************/
5126 5247
        /* flags */
5127 5248
    case 0x9c: /* pushf */
5249
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5250
            break;
5128 5251
        if (s->vm86 && s->iopl != 3) {
5129 5252
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5130 5253
        } else {
......
5135 5258
        }
5136 5259
        break;
5137 5260
    case 0x9d: /* popf */
5261
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5262
            break;
5138 5263
        if (s->vm86 && s->iopl != 3) {
5139 5264
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5140 5265
        } else {
......
5348 5473
        /* XXX: correct lock test for all insn */
5349 5474
        if (prefixes & PREFIX_LOCK)
5350 5475
            goto illegal_op;
5476
        if (prefixes & PREFIX_REPZ) {
5477
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5478
        }
5351 5479
        break;
5352 5480
    case 0x9b: /* fwait */
5353 5481
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
......
5361 5489
        }
5362 5490
        break;
5363 5491
    case 0xcc: /* int3 */
5492
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5493
            break;
5364 5494
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5365 5495
        break;
5366 5496
    case 0xcd: /* int N */
5367 5497
        val = ldub_code(s->pc++);
5498
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5499
            break;
5368 5500
        if (s->vm86 && s->iopl != 3) {
5369 5501
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5370 5502
        } else {
......
5374 5506
    case 0xce: /* into */
5375 5507
        if (CODE64(s))
5376 5508
            goto illegal_op;
5509
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5510
            break;
5377 5511
        if (s->cc_op != CC_OP_DYNAMIC)
5378 5512
            gen_op_set_cc_op(s->cc_op);
5379 5513
        gen_jmp_im(pc_start - s->cs_base);
5380 5514
        gen_op_into(s->pc - pc_start);
5381 5515
        break;
5382 5516
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5517
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5518
            break;
5383 5519
#if 1
5384 5520
        gen_debug(s, pc_start - s->cs_base);
5385 5521
#else
......
5415 5551
                    gen_op_set_inhibit_irq();
5416 5552
                /* give a chance to handle pending irqs */
5417 5553
                gen_jmp_im(s->pc - s->cs_base);
5554
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VINTR))
5555
                    break;
5418 5556
                gen_eob(s);
5419 5557
            } else {
5420 5558
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
......
5507 5645
        if (s->cpl != 0) {
5508 5646
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5509 5647
        } else {
5510
            if (b & 2)
5648
            int retval = 0;
5649
            if (b & 2) {
5650
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5511 5651
                gen_op_rdmsr();
5512
            else
5652
            } else {
5653
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5513 5654
                gen_op_wrmsr();
5655
            }
5656
            if(retval)
5657
                gen_eob(s);
5514 5658
        }
5515 5659
        break;
5516 5660
    case 0x131: /* rdtsc */
5661
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5662
            break;
5517 5663
        gen_jmp_im(pc_start - s->cs_base);
5518 5664
        gen_op_rdtsc();
5519 5665
        break;
......
5576 5722
        break;
5577 5723
#endif
5578 5724
    case 0x1a2: /* cpuid */
5725
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5726
            break;
5579 5727
        gen_op_cpuid();
5580 5728
        break;
5581 5729
    case 0xf4: /* hlt */
5582 5730
        if (s->cpl != 0) {
5583 5731
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5584 5732
        } else {
5733
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5734
                break;
5585 5735
            if (s->cc_op != CC_OP_DYNAMIC)
5586 5736
                gen_op_set_cc_op(s->cc_op);
5587 5737
            gen_jmp_im(s->pc - s->cs_base);
......
5597 5747
        case 0: /* sldt */
5598 5748
            if (!s->pe || s->vm86)
5599 5749
                goto illegal_op;
5750
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5751
                break;
5600 5752
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5601 5753
            ot = OT_WORD;
5602 5754
            if (mod == 3)
......
5609 5761
            if (s->cpl != 0) {
5610 5762
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5611 5763
            } else {
5764
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5765
                    break;
5612 5766
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5613 5767
                gen_jmp_im(pc_start - s->cs_base);
5614 5768
                gen_op_lldt_T0();
......
5617 5771
        case 1: /* str */
5618 5772
            if (!s->pe || s->vm86)
5619 5773
                goto illegal_op;
5774
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5775
                break;
5620 5776
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5621 5777
            ot = OT_WORD;
5622 5778
            if (mod == 3)
......
5629 5785
            if (s->cpl != 0) {
5630 5786
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5631 5787
            } else {
5788
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5789
                    break;
5632 5790
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5633 5791
                gen_jmp_im(pc_start - s->cs_base);
5634 5792
                gen_op_ltr_T0();
......
5660 5818
        case 0: /* sgdt */
5661 5819
            if (mod == 3)
5662 5820
                goto illegal_op;
5821
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5822
                break;
5663 5823
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5664 5824
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5665 5825
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
......
5676 5836
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5677 5837
                        s->cpl != 0)
5678 5838
                        goto illegal_op;
5839
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5840
                        break;
5679 5841
                    gen_jmp_im(pc_start - s->cs_base);
5680 5842
#ifdef TARGET_X86_64
5681 5843
                    if (s->aflag == 2) {
......
5700 5862
                        gen_op_set_cc_op(s->cc_op);
5701 5863
                        s->cc_op = CC_OP_DYNAMIC;
5702 5864
                    }
5865
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5866
                        break;
5703 5867
                    gen_jmp_im(s->pc - s->cs_base);
5704 5868
                    gen_op_mwait();
5705 5869
                    gen_eob(s);
......
5708 5872
                    goto illegal_op;
5709 5873
                }
5710 5874
            } else { /* sidt */
5875
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5876
                    break;
5711 5877
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5712 5878
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5713 5879
                gen_op_st_T0_A0[OT_WORD + s->mem_index]();
......
5720 5886
            break;
5721 5887
        case 2: /* lgdt */
5722 5888
        case 3: /* lidt */
5723
            if (mod == 3)
5724
                goto illegal_op;
5725
            if (s->cpl != 0) {
5889
            if (mod == 3) {
5890
                switch(rm) {
5891
                case 0: /* VMRUN */
5892
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5893
                        break;
5894
                    if (s->cc_op != CC_OP_DYNAMIC)
5895
                        gen_op_set_cc_op(s->cc_op);
5896
                    gen_jmp_im(s->pc - s->cs_base);
5897
                    gen_op_vmrun();
5898
                    s->cc_op = CC_OP_EFLAGS;
5899
                    gen_eob(s);
5900
                    break;
5901
                case 1: /* VMMCALL */
5902
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5903
                         break;
5904
                    /* FIXME: cause #UD if hflags & SVM */
5905
                    gen_op_vmmcall();
5906
                    break;
5907
                case 2: /* VMLOAD */
5908
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5909
                         break;
5910
                    gen_op_vmload();
5911
                    break;
5912
                case 3: /* VMSAVE */
5913
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5914
                         break;
5915
                    gen_op_vmsave();
5916
                    break;
5917
                case 4: /* STGI */
5918
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5919
                         break;
5920
                    gen_op_stgi();
5921
                    break;
5922
                case 5: /* CLGI */
5923
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
5924
                         break;
5925
                    gen_op_clgi();
5926
                    break;
5927
                case 6: /* SKINIT */
5928
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
5929
                         break;
5930
                    gen_op_skinit();
5931
                    break;
5932
                case 7: /* INVLPGA */
5933
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
5934
                         break;
5935
                    gen_op_invlpga();
5936
                    break;
5937
                default:
5938
                    goto illegal_op;
5939
                }
5940
            } else if (s->cpl != 0) {
5726 5941
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5727 5942
            } else {
5943
                if (gen_svm_check_intercept(s, pc_start,
5944
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
5945
                    break;
5728 5946
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5729 5947
                gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5730 5948
                gen_add_A0_im(s, 2);
......
5741 5959
            }
5742 5960
            break;
5743 5961
        case 4: /* smsw */
5962
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
5963
                break;
5744 5964
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5745 5965
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5746 5966
            break;
......
5748 5968
            if (s->cpl != 0) {
5749 5969
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5750 5970
            } else {
5971
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
5972
                    break;
5751 5973
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5752 5974
                gen_op_lmsw_T0();
5753 5975
                gen_jmp_im(s->pc - s->cs_base);
......
5772 5994
                        goto illegal_op;
5773 5995
                    }
5774 5996
                } else {
5997
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
5998
                        break;
5775 5999
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5776 6000
                    gen_op_invlpg_A0();
5777 6001
                    gen_jmp_im(s->pc - s->cs_base);
......
5788 6012
        if (s->cpl != 0) {
5789 6013
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5790 6014
        } else {
6015
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVD))
6016
                break;
5791 6017
            /* nothing to do */
5792 6018
        }
5793 6019
        break;
......
5908 6134
            case 4:
5909 6135
            case 8:
5910 6136
                if (b & 2) {
6137
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
5911 6138
                    gen_op_mov_TN_reg[ot][0][rm]();
5912 6139
                    gen_op_movl_crN_T0(reg);
5913 6140
                    gen_jmp_im(s->pc - s->cs_base);
5914 6141
                    gen_eob(s);
5915 6142
                } else {
6143
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
5916 6144
#if !defined(CONFIG_USER_ONLY)
5917 6145
                    if (reg == 8)
5918 6146
                        gen_op_movtl_T0_cr8();
......
5945 6173
            if (reg == 4 || reg == 5 || reg >= 8)
5946 6174
                goto illegal_op;
5947 6175
            if (b & 2) {
6176
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
5948 6177
                gen_op_mov_TN_reg[ot][0][rm]();
5949 6178
                gen_op_movl_drN_T0(reg);
5950 6179
                gen_jmp_im(s->pc - s->cs_base);
5951 6180
                gen_eob(s);
5952 6181
            } else {
6182
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
5953 6183
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5954 6184
                gen_op_mov_reg_T0[ot][rm]();
5955 6185
            }
......
5959 6189
        if (s->cpl != 0) {
5960 6190
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5961 6191
        } else {
6192
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
5962 6193
            gen_op_clts();
5963 6194
            /* abort block because static cpu state changed */
5964 6195
            gen_jmp_im(s->pc - s->cs_base);
......
6050 6281
        /* ignore for now */
6051 6282
        break;
6052 6283
    case 0x1aa: /* rsm */
6284
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6285
            break;
6053 6286
        if (!(s->flags & HF_SMM_MASK))
6054 6287
            goto illegal_op;
6055 6288
        if (s->cc_op != CC_OP_DYNAMIC) {

Also available in: Unified diff