Revision de9e9d9f

b/target-sparc/translate.c
58 58
#else
59 59
static TCGv cpu_wim;
60 60
#endif
61
/* local register indexes (only used inside old micro ops) */
62
static TCGv cpu_tmp0;
63 61
/* Floating point registers */
64 62
static TCGv_i64 cpu_fpr[TARGET_DPREGS];
65 63

  
......
608 606

  
609 607
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
610 608
{
611
    TCGv r_temp, zero;
609
    TCGv r_temp, zero, t0;
612 610

  
613 611
    r_temp = tcg_temp_new();
612
    t0 = tcg_temp_new();
614 613

  
615 614
    /* old op:
616 615
    if (!(env->y & 1))
......
628 627
    // env->y = (b2 << 31) | (env->y >> 1);
629 628
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
630 629
    tcg_gen_shli_tl(r_temp, r_temp, 31);
631
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
632
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
633
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
634
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
630
    tcg_gen_shri_tl(t0, cpu_y, 1);
631
    tcg_gen_andi_tl(t0, t0, 0x7fffffff);
632
    tcg_gen_or_tl(t0, t0, r_temp);
633
    tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
635 634

  
636 635
    // b1 = N ^ V;
637
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
636
    gen_mov_reg_N(t0, cpu_psr);
638 637
    gen_mov_reg_V(r_temp, cpu_psr);
639
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
638
    tcg_gen_xor_tl(t0, t0, r_temp);
640 639
    tcg_temp_free(r_temp);
641 640

  
642 641
    // T0 = (b1 << 31) | (T0 >> 1);
643 642
    // src1 = T0;
644
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
643
    tcg_gen_shli_tl(t0, t0, 31);
645 644
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
646
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
645
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
646
    tcg_temp_free(t0);
647 647

  
648 648
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
649 649

  
......
675 675
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
676 676

  
677 677
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
678
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
678
    tcg_gen_trunc_i64_tl(cpu_y, r_temp);
679 679
    tcg_temp_free_i64(r_temp);
680
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
680
    tcg_gen_andi_tl(cpu_y, cpu_y, 0xffffffff);
681 681

  
682 682
    tcg_gen_trunc_i64_tl(dst, r_temp2);
683 683

  
......
714 714
// Z | (N ^ V)
715 715
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
716 716
{
717
    gen_mov_reg_N(cpu_tmp0, src);
717
    TCGv t0 = tcg_temp_new();
718
    gen_mov_reg_N(t0, src);
718 719
    gen_mov_reg_V(dst, src);
719
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
720
    gen_mov_reg_Z(cpu_tmp0, src);
721
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
720
    tcg_gen_xor_tl(dst, dst, t0);
721
    gen_mov_reg_Z(t0, src);
722
    tcg_gen_or_tl(dst, dst, t0);
723
    tcg_temp_free(t0);
722 724
}
723 725

  
724 726
// N ^ V
725 727
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
726 728
{
727
    gen_mov_reg_V(cpu_tmp0, src);
729
    TCGv t0 = tcg_temp_new();
730
    gen_mov_reg_V(t0, src);
728 731
    gen_mov_reg_N(dst, src);
729
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
732
    tcg_gen_xor_tl(dst, dst, t0);
733
    tcg_temp_free(t0);
730 734
}
731 735

  
732 736
// C | Z
733 737
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
734 738
{
735
    gen_mov_reg_Z(cpu_tmp0, src);
739
    TCGv t0 = tcg_temp_new();
740
    gen_mov_reg_Z(t0, src);
736 741
    gen_mov_reg_C(dst, src);
737
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
742
    tcg_gen_or_tl(dst, dst, t0);
743
    tcg_temp_free(t0);
738 744
}
739 745

  
740 746
// C
......
771 777
// !(Z | (N ^ V))
772 778
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
773 779
{
774
    gen_mov_reg_N(cpu_tmp0, src);
775
    gen_mov_reg_V(dst, src);
776
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
777
    gen_mov_reg_Z(cpu_tmp0, src);
778
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
780
    gen_op_eval_ble(dst, src);
779 781
    tcg_gen_xori_tl(dst, dst, 0x1);
780 782
}
781 783

  
782 784
// !(N ^ V)
783 785
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
784 786
{
785
    gen_mov_reg_V(cpu_tmp0, src);
786
    gen_mov_reg_N(dst, src);
787
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
787
    gen_op_eval_bl(dst, src);
788 788
    tcg_gen_xori_tl(dst, dst, 0x1);
789 789
}
790 790

  
791 791
// !(C | Z)
792 792
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
793 793
{
794
    gen_mov_reg_Z(cpu_tmp0, src);
795
    gen_mov_reg_C(dst, src);
796
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
794
    gen_op_eval_bleu(dst, src);
797 795
    tcg_gen_xori_tl(dst, dst, 0x1);
798 796
}
799 797

  
......
843 841
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
844 842
                                    unsigned int fcc_offset)
845 843
{
844
    TCGv t0 = tcg_temp_new();
846 845
    gen_mov_reg_FCC0(dst, src, fcc_offset);
847
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
848
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
846
    gen_mov_reg_FCC1(t0, src, fcc_offset);
847
    tcg_gen_or_tl(dst, dst, t0);
848
    tcg_temp_free(t0);
849 849
}
850 850

  
851 851
// 1 or 2: FCC0 ^ FCC1
852 852
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
853 853
                                    unsigned int fcc_offset)
854 854
{
855
    TCGv t0 = tcg_temp_new();
855 856
    gen_mov_reg_FCC0(dst, src, fcc_offset);
856
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
857
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
857
    gen_mov_reg_FCC1(t0, src, fcc_offset);
858
    tcg_gen_xor_tl(dst, dst, t0);
859
    tcg_temp_free(t0);
858 860
}
859 861

  
860 862
// 1 or 3: FCC0
......
868 870
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
869 871
                                    unsigned int fcc_offset)
870 872
{
873
    TCGv t0 = tcg_temp_new();
871 874
    gen_mov_reg_FCC0(dst, src, fcc_offset);
872
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
873
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
874
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
875
    gen_mov_reg_FCC1(t0, src, fcc_offset);
876
    tcg_gen_andc_tl(dst, dst, t0);
877
    tcg_temp_free(t0);
875 878
}
876 879

  
877 880
// 2 or 3: FCC1
......
885 888
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
886 889
                                    unsigned int fcc_offset)
887 890
{
891
    TCGv t0 = tcg_temp_new();
888 892
    gen_mov_reg_FCC0(dst, src, fcc_offset);
889
    tcg_gen_xori_tl(dst, dst, 0x1);
890
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
891
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
893
    gen_mov_reg_FCC1(t0, src, fcc_offset);
894
    tcg_gen_andc_tl(dst, t0, dst);
895
    tcg_temp_free(t0);
892 896
}
893 897

  
894 898
// 3: FCC0 & FCC1
895 899
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
896 900
                                    unsigned int fcc_offset)
897 901
{
902
    TCGv t0 = tcg_temp_new();
898 903
    gen_mov_reg_FCC0(dst, src, fcc_offset);
899
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
900
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
904
    gen_mov_reg_FCC1(t0, src, fcc_offset);
905
    tcg_gen_and_tl(dst, dst, t0);
906
    tcg_temp_free(t0);
901 907
}
902 908

  
903 909
// 0: !(FCC0 | FCC1)
904 910
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
905 911
                                    unsigned int fcc_offset)
906 912
{
913
    TCGv t0 = tcg_temp_new();
907 914
    gen_mov_reg_FCC0(dst, src, fcc_offset);
908
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
909
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
915
    gen_mov_reg_FCC1(t0, src, fcc_offset);
916
    tcg_gen_or_tl(dst, dst, t0);
910 917
    tcg_gen_xori_tl(dst, dst, 0x1);
918
    tcg_temp_free(t0);
911 919
}
912 920

  
913 921
// 0 or 3: !(FCC0 ^ FCC1)
914 922
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
915 923
                                    unsigned int fcc_offset)
916 924
{
925
    TCGv t0 = tcg_temp_new();
917 926
    gen_mov_reg_FCC0(dst, src, fcc_offset);
918
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
919
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
927
    gen_mov_reg_FCC1(t0, src, fcc_offset);
928
    tcg_gen_xor_tl(dst, dst, t0);
920 929
    tcg_gen_xori_tl(dst, dst, 0x1);
930
    tcg_temp_free(t0);
921 931
}
922 932

  
923 933
// 0 or 2: !FCC0
......
932 942
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
933 943
                                    unsigned int fcc_offset)
934 944
{
945
    TCGv t0 = tcg_temp_new();
935 946
    gen_mov_reg_FCC0(dst, src, fcc_offset);
936
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
937
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
938
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
947
    gen_mov_reg_FCC1(t0, src, fcc_offset);
948
    tcg_gen_andc_tl(dst, dst, t0);
939 949
    tcg_gen_xori_tl(dst, dst, 0x1);
950
    tcg_temp_free(t0);
940 951
}
941 952

  
942 953
// 0 or 1: !FCC1
......
951 962
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
952 963
                                    unsigned int fcc_offset)
953 964
{
965
    TCGv t0 = tcg_temp_new();
954 966
    gen_mov_reg_FCC0(dst, src, fcc_offset);
967
    gen_mov_reg_FCC1(t0, src, fcc_offset);
968
    tcg_gen_andc_tl(dst, t0, dst);
955 969
    tcg_gen_xori_tl(dst, dst, 0x1);
956
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
958
    tcg_gen_xori_tl(dst, dst, 0x1);
970
    tcg_temp_free(t0);
959 971
}
960 972

  
961 973
// !3: !(FCC0 & FCC1)
962 974
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
963 975
                                    unsigned int fcc_offset)
964 976
{
977
    TCGv t0 = tcg_temp_new();
965 978
    gen_mov_reg_FCC0(dst, src, fcc_offset);
966
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
967
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
979
    gen_mov_reg_FCC1(t0, src, fcc_offset);
980
    tcg_gen_and_tl(dst, dst, t0);
968 981
    tcg_gen_xori_tl(dst, dst, 0x1);
982
    tcg_temp_free(t0);
969 983
}
970 984

  
971 985
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
......
2620 2634
        {
2621 2635
            unsigned int xop = GET_FIELD(insn, 7, 12);
2622 2636
            TCGv cpu_dst = gen_dest_gpr(dc, rd);
2637
            TCGv cpu_tmp0;
2623 2638

  
2624 2639
            if (xop == 0x3a) {  /* generate trap */
2625 2640
                int cond = GET_FIELD(insn, 3, 6);
......
2839 2854
                gen_store_gpr(dc, rd, cpu_dst);
2840 2855
                break;
2841 2856
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2842
                if (!supervisor(dc))
2857
                if (!supervisor(dc)) {
2843 2858
                    goto priv_insn;
2859
                }
2860
                cpu_tmp0 = get_temp_tl(dc);
2844 2861
#ifdef TARGET_SPARC64
2845 2862
                rs1 = GET_FIELD(insn, 13, 17);
2846 2863
                switch (rs1) {
......
3347 3364
                } else {                /* register */
3348 3365
                    rs2 = GET_FIELD(insn, 27, 31);
3349 3366
                    cpu_src2 = gen_load_gpr(dc, rs2);
3367
                    cpu_tmp0 = get_temp_tl(dc);
3350 3368
                    if (insn & (1 << 12)) {
3351 3369
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3352 3370
                    } else {
......
3368 3386
                } else {                /* register */
3369 3387
                    rs2 = GET_FIELD(insn, 27, 31);
3370 3388
                    cpu_src2 = gen_load_gpr(dc, rs2);
3389
                    cpu_tmp0 = get_temp_tl(dc);
3371 3390
                    if (insn & (1 << 12)) {
3372 3391
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3373 3392
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
......
3391 3410
                } else {                /* register */
3392 3411
                    rs2 = GET_FIELD(insn, 27, 31);
3393 3412
                    cpu_src2 = gen_load_gpr(dc, rs2);
3413
                    cpu_tmp0 = get_temp_tl(dc);
3394 3414
                    if (insn & (1 << 12)) {
3395 3415
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3396 3416
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
......
3576 3596
                            simm = GET_FIELDs(insn, 20, 31);
3577 3597
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3578 3598
                        } else { /* register */
3599
                            cpu_tmp0 = get_temp_tl(dc);
3579 3600
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3580 3601
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3581 3602
                        }
......
3586 3607
                            simm = GET_FIELDs(insn, 20, 31);
3587 3608
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3588 3609
                        } else { /* register */
3610
                            cpu_tmp0 = get_temp_tl(dc);
3589 3611
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3590 3612
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3591 3613
                        }
......
3596 3618
                            simm = GET_FIELDs(insn, 20, 31);
3597 3619
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3598 3620
                        } else { /* register */
3621
                            cpu_tmp0 = get_temp_tl(dc);
3599 3622
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3600 3623
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3601 3624
                        }
......
3604 3627
#endif
3605 3628
                    case 0x30:
3606 3629
                        {
3630
                            cpu_tmp0 = get_temp_tl(dc);
3607 3631
                            switch(rd) {
3608 3632
                            case 0: /* wry */
3609 3633
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
......
3757 3781
                                goto illegal_insn;
3758 3782
                            }
3759 3783
#else
3784
                            cpu_tmp0 = get_temp_tl(dc);
3760 3785
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3761 3786
                            gen_helper_wrpsr(cpu_env, cpu_tmp0);
3762 3787
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
......
3772 3797
                        {
3773 3798
                            if (!supervisor(dc))
3774 3799
                                goto priv_insn;
3800
                            cpu_tmp0 = get_temp_tl(dc);
3775 3801
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3776 3802
#ifdef TARGET_SPARC64
3777 3803
                            switch (rd) {
......
3910 3936
                            CHECK_IU_FEATURE(dc, HYPV);
3911 3937
                            if (!hypervisor(dc))
3912 3938
                                goto priv_insn;
3939
                            cpu_tmp0 = get_temp_tl(dc);
3913 3940
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3914 3941
                            switch (rd) {
3915 3942
                            case 0: // hpstate
......
4477 4504

  
4478 4505
                save_state(dc);
4479 4506
                cpu_src1 = get_src1(dc, insn);
4507
                cpu_tmp0 = get_temp_tl(dc);
4480 4508
                if (IS_IMM) {   /* immediate */
4481 4509
                    simm = GET_FIELDs(insn, 19, 31);
4482 4510
                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
......
4500 4528
#endif
4501 4529
            } else {
4502 4530
                cpu_src1 = get_src1(dc, insn);
4531
                cpu_tmp0 = get_temp_tl(dc);
4503 4532
                if (IS_IMM) {   /* immediate */
4504 4533
                    simm = GET_FIELDs(insn, 19, 31);
4505 4534
                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
......
4647 4676
                        gen_address_mask(dc, cpu_addr);
4648 4677
                        t64 = tcg_temp_new_i64();
4649 4678
                        tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4650
                        tcg_gen_trunc_i64_tl(cpu_tmp0, t64);
4651
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4652
                        gen_store_gpr(dc, rd + 1, cpu_tmp0);
4679
                        tcg_gen_trunc_i64_tl(cpu_val, t64);
4680
                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
4681
                        gen_store_gpr(dc, rd + 1, cpu_val);
4653 4682
                        tcg_gen_shri_i64(t64, t64, 32);
4654 4683
                        tcg_gen_trunc_i64_tl(cpu_val, t64);
4655 4684
                        tcg_temp_free_i64(t64);
4656
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4685
                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
4657 4686
                    }
4658 4687
                    break;
4659 4688
                case 0x9:       /* ldsb, load signed byte */
......
4675 4704
                        tcg_temp_free(r_const);
4676 4705
                    }
4677 4706
                    break;
4678
                case 0x0f:      /* swap, swap register with memory. Also
4679
                                   atomically */
4680
                    CHECK_IU_FEATURE(dc, SWAP);
4681
                    cpu_src1 = gen_load_gpr(dc, rd);
4682
                    gen_address_mask(dc, cpu_addr);
4683
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4684
                    tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4685
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4707
                case 0x0f:
4708
                    /* swap, swap register with memory. Also atomically */
4709
                    {
4710
                        TCGv t0 = get_temp_tl(dc);
4711
                        CHECK_IU_FEATURE(dc, SWAP);
4712
                        cpu_src1 = gen_load_gpr(dc, rd);
4713
                        gen_address_mask(dc, cpu_addr);
4714
                        tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4715
                        tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4716
                        tcg_gen_mov_tl(cpu_val, t0);
4717
                    }
4686 4718
                    break;
4687 4719
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4688 4720
                case 0x10:      /* lda, V9 lduwa, load word alternate */
......
4833 4865
            skip_move: ;
4834 4866
#endif
4835 4867
            } else if (xop >= 0x20 && xop < 0x24) {
4868
                TCGv t0;
4869

  
4836 4870
                if (gen_trap_ifnofpu(dc)) {
4837 4871
                    goto jmp_insn;
4838 4872
                }
......
4840 4874
                switch (xop) {
4841 4875
                case 0x20:      /* ldf, load fpreg */
4842 4876
                    gen_address_mask(dc, cpu_addr);
4843
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4877
                    t0 = get_temp_tl(dc);
4878
                    tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4844 4879
                    cpu_dst_32 = gen_dest_fpr_F(dc);
4845
                    tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4880
                    tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4846 4881
                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4847 4882
                    break;
4848 4883
                case 0x21:      /* ldfsr, V9 ldxfsr */
......
4856 4891
                        break;
4857 4892
                    }
4858 4893
#endif
4859
                    {
4860
                        TCGv_i32 t32 = get_temp_i32(dc);
4861
                        tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4862
                        tcg_gen_trunc_tl_i32(t32, cpu_tmp0);
4863
                        gen_helper_ldfsr(cpu_env, t32);
4864
                    }
4894
                    cpu_dst_32 = get_temp_i32(dc);
4895
                    t0 = get_temp_tl(dc);
4896
                    tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4897
                    tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4898
                    gen_helper_ldfsr(cpu_env, cpu_dst_32);
4865 4899
                    break;
4866 4900
                case 0x22:      /* ldqf, load quad fpreg */
4867 4901
                    {
......
4994 5028
                save_state(dc);
4995 5029
                switch (xop) {
4996 5030
                case 0x24: /* stf, store fpreg */
4997
                    gen_address_mask(dc, cpu_addr);
4998
                    cpu_src1_32 = gen_load_fpr_F(dc, rd);
4999
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5000
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5031
                    {
5032
                        TCGv t = get_temp_tl(dc);
5033
                        gen_address_mask(dc, cpu_addr);
5034
                        cpu_src1_32 = gen_load_fpr_F(dc, rd);
5035
                        tcg_gen_ext_i32_tl(t, cpu_src1_32);
5036
                        tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5037
                    }
5001 5038
                    break;
5002 5039
                case 0x25: /* stfsr, V9 stxfsr */
5003 5040
                    {
......
5258 5295
        last_pc = dc->pc;
5259 5296
        insn = cpu_ldl_code(env, dc->pc);
5260 5297

  
5261
        cpu_tmp0 = tcg_temp_new();
5262

  
5263 5298
        disas_sparc_insn(dc, insn);
5264 5299
        num_insns++;
5265 5300

  
5266
        tcg_temp_free(cpu_tmp0);
5267

  
5268 5301
        if (dc->is_br)
5269 5302
            break;
5270 5303
        /* if the next PC is different, we abort now */

Also available in: Unified diff