Revision d9ba4830

b/target-arm/helpers.h
1 1
#define DEF_HELPER(name, ret, args) ret glue(helper_,name) args;
2 2

  
3 3
#ifdef GEN_HELPER
4
#define DEF_HELPER_0_0(name, ret, args) \
5
DEF_HELPER(name, ret, args) \
6
static inline void gen_helper_##name(void) \
7
{ \
8
    tcg_gen_helper_0_0(helper_##name); \
9
}
10
#define DEF_HELPER_0_1(name, ret, args) \
11
DEF_HELPER(name, ret, args) \
12
static inline void gen_helper_##name(TCGv arg1) \
13
{ \
14
    tcg_gen_helper_0_1(helper_##name, arg1); \
15
}
16
#define DEF_HELPER_0_2(name, ret, args) \
17
DEF_HELPER(name, ret, args) \
18
static inline void gen_helper_##name(TCGv arg1, TCGv arg2) \
19
{ \
20
    tcg_gen_helper_0_2(helper_##name, arg1, arg2); \
21
}
22
#define DEF_HELPER_1_0(name, ret, args) \
23
DEF_HELPER(name, ret, args) \
24
static inline void gen_helper_##name(TCGv ret) \
25
{ \
26
    tcg_gen_helper_1_0(helper_##name, ret); \
27
}
4 28
#define DEF_HELPER_1_1(name, ret, args) \
5 29
DEF_HELPER(name, ret, args) \
6 30
static inline void gen_helper_##name(TCGv ret, TCGv arg1) \
......
21 45
    tcg_gen_helper_1_3(helper_##name, ret, arg1, arg2, arg3); \
22 46
}
23 47
#else /* !GEN_HELPER */
48
#define DEF_HELPER_0_0 DEF_HELPER
49
#define DEF_HELPER_0_1 DEF_HELPER
50
#define DEF_HELPER_0_2 DEF_HELPER
51
#define DEF_HELPER_1_0 DEF_HELPER
24 52
#define DEF_HELPER_1_1 DEF_HELPER
25 53
#define DEF_HELPER_1_2 DEF_HELPER
26 54
#define DEF_HELPER_1_3 DEF_HELPER
......
74 102
DEF_HELPER_1_2(usad8, uint32_t, (uint32_t, uint32_t))
75 103

  
76 104
DEF_HELPER_1_3(sel_flags, uint32_t, (uint32_t, uint32_t, uint32_t))
105
DEF_HELPER_0_1(exception, void, (uint32_t))
106
DEF_HELPER_0_0(wfi, void, (void))
107

  
108
DEF_HELPER_0_2(cpsr_write, void, (uint32_t, uint32_t))
109
DEF_HELPER_1_0(cpsr_read, uint32_t, (void))
77 110

  
78 111
#undef DEF_HELPER
112
#undef DEF_HELPER_0_0
113
#undef DEF_HELPER_0_1
114
#undef DEF_HELPER_0_2
115
#undef DEF_HELPER_1_0
79 116
#undef DEF_HELPER_1_1
80 117
#undef DEF_HELPER_1_2
118
#undef DEF_HELPER_1_3
81 119
#undef GEN_HELPER
b/target-arm/op.c
80 80

  
81 81
OPSUB(rsb, rsc, T0, T1, T0)
82 82

  
83
#define EIP (env->regs[15])
84

  
85
void OPPROTO op_test_eq(void)
86
{
87
    if (env->NZF == 0)
88
        GOTO_LABEL_PARAM(1);;
89
    FORCE_RET();
90
}
91

  
92
void OPPROTO op_test_ne(void)
93
{
94
    if (env->NZF != 0)
95
        GOTO_LABEL_PARAM(1);;
96
    FORCE_RET();
97
}
98

  
99
void OPPROTO op_test_cs(void)
100
{
101
    if (env->CF != 0)
102
        GOTO_LABEL_PARAM(1);
103
    FORCE_RET();
104
}
105

  
106
void OPPROTO op_test_cc(void)
107
{
108
    if (env->CF == 0)
109
        GOTO_LABEL_PARAM(1);
110
    FORCE_RET();
111
}
112

  
113
void OPPROTO op_test_mi(void)
114
{
115
    if ((env->NZF & 0x80000000) != 0)
116
        GOTO_LABEL_PARAM(1);
117
    FORCE_RET();
118
}
119

  
120
void OPPROTO op_test_pl(void)
121
{
122
    if ((env->NZF & 0x80000000) == 0)
123
        GOTO_LABEL_PARAM(1);
124
    FORCE_RET();
125
}
126

  
127
void OPPROTO op_test_vs(void)
128
{
129
    if ((env->VF & 0x80000000) != 0)
130
        GOTO_LABEL_PARAM(1);
131
    FORCE_RET();
132
}
133

  
134
void OPPROTO op_test_vc(void)
135
{
136
    if ((env->VF & 0x80000000) == 0)
137
        GOTO_LABEL_PARAM(1);
138
    FORCE_RET();
139
}
140

  
141
void OPPROTO op_test_hi(void)
142
{
143
    if (env->CF != 0 && env->NZF != 0)
144
        GOTO_LABEL_PARAM(1);
145
    FORCE_RET();
146
}
147

  
148
void OPPROTO op_test_ls(void)
149
{
150
    if (env->CF == 0 || env->NZF == 0)
151
        GOTO_LABEL_PARAM(1);
152
    FORCE_RET();
153
}
154

  
155
void OPPROTO op_test_ge(void)
156
{
157
    if (((env->VF ^ env->NZF) & 0x80000000) == 0)
158
        GOTO_LABEL_PARAM(1);
159
    FORCE_RET();
160
}
161

  
162
void OPPROTO op_test_lt(void)
163
{
164
    if (((env->VF ^ env->NZF) & 0x80000000) != 0)
165
        GOTO_LABEL_PARAM(1);
166
    FORCE_RET();
167
}
168

  
169
void OPPROTO op_test_gt(void)
170
{
171
    if (env->NZF != 0 && ((env->VF ^ env->NZF) & 0x80000000) == 0)
172
        GOTO_LABEL_PARAM(1);
173
    FORCE_RET();
174
}
175

  
176
void OPPROTO op_test_le(void)
177
{
178
    if (env->NZF == 0 || ((env->VF ^ env->NZF) & 0x80000000) != 0)
179
        GOTO_LABEL_PARAM(1);
180
    FORCE_RET();
181
}
182

  
183
void OPPROTO op_test_T0(void)
184
{
185
    if (T0)
186
        GOTO_LABEL_PARAM(1);
187
    FORCE_RET();
188
}
189
void OPPROTO op_testn_T0(void)
190
{
191
    if (!T0)
192
        GOTO_LABEL_PARAM(1);
193
    FORCE_RET();
194
}
195

  
196
void OPPROTO op_movl_T0_cpsr(void)
197
{
198
    /* Execution state bits always read as zero.  */
199
    T0 = cpsr_read(env) & ~CPSR_EXEC;
200
    FORCE_RET();
201
}
202

  
203
void OPPROTO op_movl_T0_spsr(void)
204
{
205
    T0 = env->spsr;
206
}
207

  
208
void OPPROTO op_movl_spsr_T0(void)
209
{
210
    uint32_t mask = PARAM1;
211
    env->spsr = (env->spsr & ~mask) | (T0 & mask);
212
}
213

  
214
void OPPROTO op_movl_cpsr_T0(void)
215
{
216
    cpsr_write(env, T0, PARAM1);
217
    FORCE_RET();
218
}
219

  
220
/* 48 bit signed mul, top 32 bits */
221
void OPPROTO op_imulw_T0_T1(void)
222
{
223
  uint64_t res;
224
  res = (int64_t)((int32_t)T0) * (int64_t)((int32_t)T1);
225
  T0 = res >> 16;
226
}
227

  
228 83
void OPPROTO op_addq_T0_T1(void)
229 84
{
230 85
    uint64_t res;
......
397 252
    FORCE_RET();
398 253
}
399 254

  
400
/* exceptions */
401

  
402
void OPPROTO op_swi(void)
403
{
404
    env->exception_index = EXCP_SWI;
405
    cpu_loop_exit();
406
}
407

  
408
void OPPROTO op_undef_insn(void)
409
{
410
    env->exception_index = EXCP_UDEF;
411
    cpu_loop_exit();
412
}
413

  
414
void OPPROTO op_debug(void)
415
{
416
    env->exception_index = EXCP_DEBUG;
417
    cpu_loop_exit();
418
}
419

  
420
void OPPROTO op_wfi(void)
421
{
422
    env->exception_index = EXCP_HLT;
423
    env->halted = 1;
424
    cpu_loop_exit();
425
}
426

  
427
void OPPROTO op_bkpt(void)
428
{
429
    env->exception_index = EXCP_BKPT;
430
    cpu_loop_exit();
431
}
432

  
433
void OPPROTO op_exception_exit(void)
434
{
435
    env->exception_index = EXCP_EXCEPTION_EXIT;
436
    cpu_loop_exit();
437
}
438

  
439 255
/* VFP support.  We follow the convention used for VFP instrunctions:
440 256
   Single precition routines have a "s" suffix, double precision a
441 257
   "d" suffix.  */
b/target-arm/op_helper.c
436 436
    res |= do_usat(((int32_t)x) >> 16, shift) << 16;
437 437
    return res;
438 438
}
439

  
440
void HELPER(wfi)(void)
441
{
442
    env->exception_index = EXCP_HLT;
443
    env->halted = 1;
444
    cpu_loop_exit();
445
}
446

  
447
void HELPER(exception)(uint32_t excp)
448
{
449
    env->exception_index = excp;
450
    cpu_loop_exit();
451
}
452

  
453
uint32_t HELPER(cpsr_read)(void)
454
{
455
    return cpsr_read(env) & ~CPSR_EXEC;
456
}
457

  
458
void HELPER(cpsr_write)(uint32_t val, uint32_t mask)
459
{
460
    cpsr_write(env, val, mask);
461
}
b/target-arm/translate.c
130 130
    temps[i] = tmp;
131 131
}
132 132

  
133
static inline TCGv load_cpu_offset(int offset)
134
{
135
    TCGv tmp = new_tmp();
136
    tcg_gen_ld_i32(tmp, cpu_env, offset);
137
    return tmp;
138
}
139

  
140
#define load_cpu_field(name) load_cpu_offset(offsetof(CPUState, name))
141

  
142
static inline void store_cpu_offset(TCGv var, int offset)
143
{
144
    tcg_gen_st_i32(var, cpu_env, offset);
145
    dead_tmp(var);
146
}
147

  
148
#define store_cpu_field(var, name) \
149
    store_cpu_offset(var, offsetof(CPUState, name))
150

  
133 151
/* Set a variable to the value of a CPU register.  */
134 152
static void load_reg_var(DisasContext *s, TCGv var, int reg)
135 153
{
......
222 240
/* Copy the most significant bit of T0 to all bits of T1.  */
223 241
#define gen_op_signbit_T1_T0() tcg_gen_sari_i32(cpu_T[1], cpu_T[0], 31)
224 242

  
243
#define gen_set_cpsr(var, mask) gen_helper_cpsr_write(var, tcg_const_i32(mask))
244
/* Set NZCV flags from the high 4 bits of var.  */
245
#define gen_set_nzcv(var) gen_set_cpsr(var, CPSR_NZCV)
246

  
247
static void gen_exception(int excp)
248
{
249
    TCGv tmp = new_tmp();
250
    tcg_gen_movi_i32(tmp, excp);
251
    gen_helper_exception(tmp);
252
    dead_tmp(tmp);
253
}
254

  
225 255
static void gen_smul_dual(TCGv a, TCGv b)
226 256
{
227 257
    TCGv tmp1 = new_tmp();
......
293 323
    tcg_gen_or_i32(dest, base, val);
294 324
}
295 325

  
296
static void gen_op_roundqd_T0_T1(void)
326
/* Round the top 32 bits of a 64-bit value.  */
327
static void gen_roundqd(TCGv a, TCGv b)
297 328
{
298
    tcg_gen_shri_i32(cpu_T[0], cpu_T[0], 31);
299
    tcg_gen_add_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
329
    tcg_gen_shri_i32(a, a, 31);
330
    tcg_gen_add_i32(a, a, b);
300 331
}
301 332

  
302 333
/* FIXME: Most targets have native widening multiplication.
......
316 347
}
317 348

  
318 349
/* Signed 32x32->64 multiply.  */
319
static void gen_op_imull_T0_T1(void)
350
static void gen_imull(TCGv a, TCGv b)
320 351
{
321 352
    TCGv tmp1 = tcg_temp_new(TCG_TYPE_I64);
322 353
    TCGv tmp2 = tcg_temp_new(TCG_TYPE_I64);
323 354

  
324
    tcg_gen_ext_i32_i64(tmp1, cpu_T[0]);
325
    tcg_gen_ext_i32_i64(tmp2, cpu_T[1]);
355
    tcg_gen_ext_i32_i64(tmp1, a);
356
    tcg_gen_ext_i32_i64(tmp2, b);
326 357
    tcg_gen_mul_i64(tmp1, tmp1, tmp2);
327
    tcg_gen_trunc_i64_i32(cpu_T[0], tmp1);
358
    tcg_gen_trunc_i64_i32(a, tmp1);
328 359
    tcg_gen_shri_i64(tmp1, tmp1, 32);
329
    tcg_gen_trunc_i64_i32(cpu_T[1], tmp1);
360
    tcg_gen_trunc_i64_i32(b, tmp1);
361
}
362
#define gen_op_imull_T0_T1() gen_imull(cpu_T[0], cpu_T[1])
363

  
364
/* Signed 32x16 multiply, top 32 bits.  */
365
static void gen_imulw(TCGv a, TCGv b)
366
{
367
  gen_imull(a, b);
368
  tcg_gen_shri_i32(a, a, 16);
369
  tcg_gen_shli_i32(b, b, 16);
370
  tcg_gen_or_i32(a, a, b);
330 371
}
331 372

  
332 373
/* Swap low and high halfwords.  */
......
379 420
/* T0 += T1 + CF.  */
380 421
static void gen_adc_T0_T1(void)
381 422
{
382
    TCGv tmp = new_tmp();
423
    TCGv tmp;
383 424
    gen_op_addl_T0_T1();
384
    tcg_gen_ld_i32(tmp, cpu_env, offsetof(CPUState, CF));
425
    tmp = load_cpu_field(CF);
385 426
    tcg_gen_add_i32(cpu_T[0], cpu_T[0], tmp);
386 427
    dead_tmp(tmp);
387 428
}
......
389 430
/* dest = T0 - T1 + CF - 1.  */
390 431
static void gen_sub_carry(TCGv dest, TCGv t0, TCGv t1)
391 432
{
392
    TCGv tmp = new_tmp();
433
    TCGv tmp;
393 434
    tcg_gen_sub_i32(dest, t0, t1);
394
    tcg_gen_ld_i32(tmp, cpu_env, offsetof(CPUState, CF));
435
    tmp = load_cpu_field(CF);
395 436
    tcg_gen_add_i32(dest, dest, tmp);
396 437
    tcg_gen_subi_i32(dest, dest, 1);
397 438
    dead_tmp(tmp);
......
482 523
                shifter_out_im(var, shift - 1);
483 524
            tcg_gen_rori_i32(var, var, shift); break;
484 525
        } else {
485
            TCGv tmp = new_tmp();
486
            tcg_gen_ld_i32(tmp, cpu_env, offsetof(CPUState, CF));
526
            TCGv tmp = load_cpu_field(CF);
487 527
            if (flags)
488 528
                shifter_out_im(var, 0);
489 529
            tcg_gen_shri_i32(var, var, 1);
......
503 543
    case 4: gen_pas_helper(glue(pfx,add8)); break; \
504 544
    case 7: gen_pas_helper(glue(pfx,sub8)); break; \
505 545
    }
506
void gen_arm_parallel_addsub(int op1, int op2, TCGv a, TCGv b)
546
static void gen_arm_parallel_addsub(int op1, int op2, TCGv a, TCGv b)
507 547
{
508 548
    TCGv tmp;
509 549

  
......
548 588
    case 5: gen_pas_helper(glue(pfx,sub16)); break; \
549 589
    case 6: gen_pas_helper(glue(pfx,subaddx)); break; \
550 590
    }
551
void gen_thumb2_parallel_addsub(int op1, int op2, TCGv a, TCGv b)
591
static void gen_thumb2_parallel_addsub(int op1, int op2, TCGv a, TCGv b)
552 592
{
553 593
    TCGv tmp;
554 594

  
......
583 623
}
584 624
#undef PAS_OP
585 625

  
586
static GenOpFunc1 *gen_test_cc[14] = {
587
    gen_op_test_eq,
588
    gen_op_test_ne,
589
    gen_op_test_cs,
590
    gen_op_test_cc,
591
    gen_op_test_mi,
592
    gen_op_test_pl,
593
    gen_op_test_vs,
594
    gen_op_test_vc,
595
    gen_op_test_hi,
596
    gen_op_test_ls,
597
    gen_op_test_ge,
598
    gen_op_test_lt,
599
    gen_op_test_gt,
600
    gen_op_test_le,
601
};
626
static void gen_test_cc(int cc, int label)
627
{
628
    TCGv tmp;
629
    TCGv tmp2;
630
    TCGv zero;
631
    int inv;
632

  
633
    zero = tcg_const_i32(0);
634
    switch (cc) {
635
    case 0: /* eq: Z */
636
        tmp = load_cpu_field(NZF);
637
        tcg_gen_brcond_i32(TCG_COND_EQ, tmp, zero, label);
638
        break;
639
    case 1: /* ne: !Z */
640
        tmp = load_cpu_field(NZF);
641
        tcg_gen_brcond_i32(TCG_COND_NE, tmp, zero, label);
642
        break;
643
    case 2: /* cs: C */
644
        tmp = load_cpu_field(CF);
645
        tcg_gen_brcond_i32(TCG_COND_NE, tmp, zero, label);
646
        break;
647
    case 3: /* cc: !C */
648
        tmp = load_cpu_field(CF);
649
        tcg_gen_brcond_i32(TCG_COND_EQ, tmp, zero, label);
650
        break;
651
    case 4: /* mi: N */
652
        tmp = load_cpu_field(NZF);
653
        tcg_gen_brcond_i32(TCG_COND_LT, tmp, zero, label);
654
        break;
655
    case 5: /* pl: !N */
656
        tmp = load_cpu_field(NZF);
657
        tcg_gen_brcond_i32(TCG_COND_GE, tmp, zero, label);
658
        break;
659
    case 6: /* vs: V */
660
        tmp = load_cpu_field(VF);
661
        tcg_gen_brcond_i32(TCG_COND_LT, tmp, zero, label);
662
        break;
663
    case 7: /* vc: !V */
664
        tmp = load_cpu_field(VF);
665
        tcg_gen_brcond_i32(TCG_COND_GE, tmp, zero, label);
666
        break;
667
    case 8: /* hi: C && !Z */
668
        inv = gen_new_label();
669
        tmp = load_cpu_field(CF);
670
        tcg_gen_brcond_i32(TCG_COND_EQ, tmp, zero, inv);
671
        dead_tmp(tmp);
672
        tmp = load_cpu_field(NZF);
673
        tcg_gen_brcond_i32(TCG_COND_NE, tmp, zero, label);
674
        gen_set_label(inv);
675
        break;
676
    case 9: /* ls: !C || Z */
677
        tmp = load_cpu_field(CF);
678
        tcg_gen_brcond_i32(TCG_COND_EQ, tmp, zero, label);
679
        dead_tmp(tmp);
680
        tmp = load_cpu_field(NZF);
681
        tcg_gen_brcond_i32(TCG_COND_EQ, tmp, zero, label);
682
        break;
683
    case 10: /* ge: N == V -> N ^ V == 0 */
684
        tmp = load_cpu_field(VF);
685
        tmp2 = load_cpu_field(NZF);
686
        tcg_gen_xor_i32(tmp, tmp, tmp2);
687
        dead_tmp(tmp2);
688
        tcg_gen_brcond_i32(TCG_COND_GE, tmp, zero, label);
689
        break;
690
    case 11: /* lt: N != V -> N ^ V != 0 */
691
        tmp = load_cpu_field(VF);
692
        tmp2 = load_cpu_field(NZF);
693
        tcg_gen_xor_i32(tmp, tmp, tmp2);
694
        dead_tmp(tmp2);
695
        tcg_gen_brcond_i32(TCG_COND_LT, tmp, zero, label);
696
        break;
697
    case 12: /* gt: !Z && N == V */
698
        inv = gen_new_label();
699
        tmp = load_cpu_field(NZF);
700
        tcg_gen_brcond_i32(TCG_COND_EQ, tmp, zero, inv);
701
        dead_tmp(tmp);
702
        tmp = load_cpu_field(VF);
703
        tmp2 = load_cpu_field(NZF);
704
        tcg_gen_xor_i32(tmp, tmp, tmp2);
705
        dead_tmp(tmp2);
706
        tcg_gen_brcond_i32(TCG_COND_GE, tmp, zero, label);
707
        gen_set_label(inv);
708
        break;
709
    case 13: /* le: Z || N != V */
710
        tmp = load_cpu_field(NZF);
711
        tcg_gen_brcond_i32(TCG_COND_EQ, tmp, zero, label);
712
        dead_tmp(tmp);
713
        tmp = load_cpu_field(VF);
714
        tmp2 = load_cpu_field(NZF);
715
        tcg_gen_xor_i32(tmp, tmp, tmp2);
716
        dead_tmp(tmp2);
717
        tcg_gen_brcond_i32(TCG_COND_LT, tmp, zero, label);
718
        break;
719
    default:
720
        fprintf(stderr, "Bad condition code 0x%x\n", cc);
721
        abort();
722
    }
723
    dead_tmp(tmp);
724
}
602 725

  
603 726
const uint8_t table_logic_cc[16] = {
604 727
    1, /* and */
......
633 756
    gen_op_rorl_T1_T0_cc,
634 757
};
635 758

  
636
/* Set PC and thumb state from T0.  Clobbers T0.  */
637
static inline void gen_bx(DisasContext *s)
759
/* Set PC and Thumb state from an immediate address.  */
760
static inline void gen_bx_im(DisasContext *s, uint32_t addr)
638 761
{
639 762
    TCGv tmp;
640 763

  
641 764
    s->is_jmp = DISAS_UPDATE;
642 765
    tmp = new_tmp();
643
    tcg_gen_andi_i32(tmp, cpu_T[0], 1);
644
    tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, thumb));
766
    if (s->thumb != (addr & 1)) {
767
        tcg_gen_movi_i32(tmp, addr & 1);
768
        tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, thumb));
769
    }
770
    tcg_gen_movi_i32(tmp, addr & ~1);
771
    tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, regs[15]));
645 772
    dead_tmp(tmp);
646
    tcg_gen_andi_i32(cpu_T[0], cpu_T[0], ~1);
647
    tcg_gen_st_i32(cpu_T[0], cpu_env, offsetof(CPUState, regs[15]));
773
}
774

  
775
/* Set PC and Thumb state from var.  var is marked as dead.  */
776
static inline void gen_bx(DisasContext *s, TCGv var)
777
{
778
    TCGv tmp;
779

  
780
    s->is_jmp = DISAS_UPDATE;
781
    tmp = new_tmp();
782
    tcg_gen_andi_i32(tmp, var, 1);
783
    store_cpu_field(tmp, thumb);
784
    tcg_gen_andi_i32(var, var, ~1);
785
    store_cpu_field(var, regs[15]);
786
}
787

  
788
/* TODO: This should be removed.  Use gen_bx instead.  */
789
static inline void gen_bx_T0(DisasContext *s)
790
{
791
    TCGv tmp = new_tmp();
792
    tcg_gen_mov_i32(tmp, cpu_T[0]);
793
    gen_bx(s, tmp);
648 794
}
649 795

  
650 796
#if defined(CONFIG_USER_ONLY)
......
1312 1458
            return 1;
1313 1459
        }
1314 1460
        gen_op_shll_T1_im(28);
1315
        gen_op_movl_T0_T1();
1316
        gen_op_movl_cpsr_T0(0xf0000000);
1461
        gen_set_nzcv(cpu_T[1]);
1317 1462
        break;
1318 1463
    case 0x401: case 0x405: case 0x409: case 0x40d:	/* TBCST */
1319 1464
        rd = (insn >> 12) & 0xf;
......
1359 1504
        case 3:
1360 1505
            return 1;
1361 1506
        }
1362
        gen_op_movl_cpsr_T0(0xf0000000);
1507
        gen_set_nzcv(cpu_T[0]);
1363 1508
        break;
1364 1509
    case 0x01c: case 0x41c: case 0x81c: case 0xc1c:	/* WACC */
1365 1510
        wrd = (insn >> 12) & 0xf;
......
1405 1550
        case 3:
1406 1551
            return 1;
1407 1552
        }
1408
        gen_op_movl_T1_im(0xf0000000);
1409
        gen_op_andl_T0_T1();
1410
        gen_op_movl_cpsr_T0(0xf0000000);
1553
        gen_set_nzcv(cpu_T[0]);
1411 1554
        break;
1412 1555
    case 0x103: case 0x503: case 0x903: case 0xd03:	/* TMOVMSK */
1413 1556
        rd = (insn >> 12) & 0xf;
......
2246 2389
                    }
2247 2390
                    if (rd == 15) {
2248 2391
                        /* Set the 4 flag bits in the CPSR.  */
2249
                        gen_op_movl_cpsr_T0(0xf0000000);
2392
                        gen_set_nzcv(cpu_T[0]);
2250 2393
                    } else
2251 2394
                        gen_movl_reg_T0(s, rd);
2252 2395
                } else {
......
2745 2888
    if (__builtin_expect(s->singlestep_enabled, 0)) {
2746 2889
        /* An indirect jump so that we still trigger the debug exception.  */
2747 2890
        if (s->thumb)
2748
          dest |= 1;
2749
        gen_op_movl_T0_im(dest);
2750
        gen_bx(s);
2891
            dest |= 1;
2892
        gen_bx_im(s, dest);
2751 2893
    } else {
2752 2894
        gen_goto_tb(s, 0, dest);
2753 2895
        s->is_jmp = DISAS_TB_JUMP;
2754 2896
    }
2755 2897
}
2756 2898

  
2757
static inline void gen_mulxy(int x, int y)
2899
static inline void gen_mulxy(TCGv t0, TCGv t1, int x, int y)
2758 2900
{
2759 2901
    if (x)
2760
        tcg_gen_sari_i32(cpu_T[0], cpu_T[0], 16);
2902
        tcg_gen_sari_i32(t0, t0, 16);
2761 2903
    else
2762
        gen_sxth(cpu_T[0]);
2904
        gen_sxth(t0);
2763 2905
    if (y)
2764
        gen_op_sarl_T1_im(16);
2906
        tcg_gen_sari_i32(t1, t1, 16);
2765 2907
    else
2766
        gen_sxth(cpu_T[1]);
2767
    gen_op_mul_T0_T1();
2908
        gen_sxth(t1);
2909
    tcg_gen_mul_i32(t0, t0, t1);
2768 2910
}
2769 2911

  
2770 2912
/* Return the mask of PSR bits set by a MSR instruction.  */
......
2799 2941
/* Returns nonzero if access to the PSR is not permitted.  */
2800 2942
static int gen_set_psr_T0(DisasContext *s, uint32_t mask, int spsr)
2801 2943
{
2944
    TCGv tmp;
2802 2945
    if (spsr) {
2803 2946
        /* ??? This is also undefined in system mode.  */
2804 2947
        if (IS_USER(s))
2805 2948
            return 1;
2806
        gen_op_movl_spsr_T0(mask);
2949

  
2950
        tmp = load_cpu_field(spsr);
2951
        tcg_gen_andi_i32(tmp, tmp, ~mask);
2952
        tcg_gen_andi_i32(cpu_T[0], cpu_T[0], mask);
2953
        tcg_gen_or_i32(tmp, tmp, cpu_T[0]);
2954
        store_cpu_field(tmp, spsr);
2807 2955
    } else {
2808
        gen_op_movl_cpsr_T0(mask);
2956
        gen_set_cpsr(cpu_T[0], mask);
2809 2957
    }
2810 2958
    gen_lookup_tb(s);
2811 2959
    return 0;
......
2814 2962
/* Generate an old-style exception return.  */
2815 2963
static void gen_exception_return(DisasContext *s)
2816 2964
{
2965
    TCGv tmp;
2817 2966
    gen_set_pc_T0();
2818
    gen_op_movl_T0_spsr();
2819
    gen_op_movl_cpsr_T0(0xffffffff);
2967
    tmp = load_cpu_field(spsr);
2968
    gen_set_cpsr(tmp, 0xffffffff);
2969
    dead_tmp(tmp);
2820 2970
    s->is_jmp = DISAS_UPDATE;
2821 2971
}
2822 2972

  
2823 2973
/* Generate a v6 exception return.  */
2824 2974
static void gen_rfe(DisasContext *s)
2825 2975
{
2826
    gen_op_movl_cpsr_T0(0xffffffff);
2976
    gen_set_cpsr(cpu_T[0], 0xffffffff);
2827 2977
    gen_op_movl_T0_T2();
2828 2978
    gen_set_pc_T0();
2829 2979
    s->is_jmp = DISAS_UPDATE;
......
2836 2986
        uint32_t val = (s->condexec_cond << 4) | (s->condexec_mask >> 1);
2837 2987
        TCGv tmp = new_tmp();
2838 2988
        tcg_gen_movi_i32(tmp, val);
2839
        tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, condexec_bits));
2840
        dead_tmp(tmp);
2989
        store_cpu_field(tmp, condexec_bits);
2841 2990
    }
2842 2991
}
2843 2992

  
......
5027 5176
                gen_op_addl_T1_im(offset);
5028 5177
            gen_movl_T0_reg(s, 14);
5029 5178
            gen_ldst(stl, s);
5030
            gen_op_movl_T0_cpsr();
5179
            gen_helper_cpsr_read(cpu_T[0]);
5031 5180
            gen_op_addl_T1_im(4);
5032 5181
            gen_ldst(stl, s);
5033 5182
            if (insn & (1 << 21)) {
......
5089 5238
            int32_t offset;
5090 5239

  
5091 5240
            val = (uint32_t)s->pc;
5092
            gen_op_movl_T0_im(val);
5093
            gen_movl_reg_T0(s, 14);
5241
            tmp = new_tmp();
5242
            tcg_gen_movi_i32(tmp, val);
5243
            store_reg(s, 14, tmp);
5094 5244
            /* Sign-extend the 24-bit offset */
5095 5245
            offset = (((int32_t)insn) << 8) >> 8;
5096 5246
            /* offset * 4 + bit24 * 2 + (thumb bit) */
5097 5247
            val += (offset << 2) | ((insn >> 23) & 2) | 1;
5098 5248
            /* pipeline offset */
5099 5249
            val += 4;
5100
            gen_op_movl_T0_im(val);
5101
            gen_bx(s);
5250
            gen_bx_im(s, val);
5102 5251
            return;
5103 5252
        } else if ((insn & 0x0e000f00) == 0x0c000100) {
5104 5253
            if (arm_feature(env, ARM_FEATURE_IWMMXT)) {
......
5144 5293
        /* if not always execute, we generate a conditional jump to
5145 5294
           next instruction */
5146 5295
        s->condlabel = gen_new_label();
5147
        gen_test_cc[cond ^ 1](s->condlabel);
5296
        gen_test_cc(cond ^ 1, s->condlabel);
5148 5297
        s->condjmp = 1;
5149 5298
    }
5150 5299
    if ((insn & 0x0f900000) == 0x03000000) {
......
5201 5350
                if (op1 & 2) {
5202 5351
                    if (IS_USER(s))
5203 5352
                        goto illegal_op;
5204
                    gen_op_movl_T0_spsr();
5353
                    tmp = load_cpu_field(spsr);
5205 5354
                } else {
5206
                    gen_op_movl_T0_cpsr();
5355
                    tmp = new_tmp();
5356
                    gen_helper_cpsr_read(tmp);
5207 5357
                }
5208
                gen_movl_reg_T0(s, rd);
5358
                store_reg(s, rd, tmp);
5209 5359
            }
5210 5360
            break;
5211 5361
        case 0x1:
5212 5362
            if (op1 == 1) {
5213 5363
                /* branch/exchange thumb (bx).  */
5214
                gen_movl_T0_reg(s, rm);
5215
                gen_bx(s);
5364
                tmp = load_reg(s, rm);
5365
                gen_bx(s, tmp);
5216 5366
            } else if (op1 == 3) {
5217 5367
                /* clz */
5218 5368
                rd = (insn >> 12) & 0xf;
......
5227 5377
            if (op1 == 1) {
5228 5378
                ARCH(5J); /* bxj */
5229 5379
                /* Trivial implementation equivalent to bx.  */
5230
                gen_movl_T0_reg(s, rm);
5231
                gen_bx(s);
5380
                tmp = load_reg(s, rm);
5381
                gen_bx(s, tmp);
5232 5382
            } else {
5233 5383
                goto illegal_op;
5234 5384
            }
......
5238 5388
              goto illegal_op;
5239 5389

  
5240 5390
            /* branch link/exchange thumb (blx) */
5241
            val = (uint32_t)s->pc;
5242
            gen_op_movl_T1_im(val);
5243
            gen_movl_T0_reg(s, rm);
5244
            gen_movl_reg_T1(s, 14);
5245
            gen_bx(s);
5391
            tmp = load_reg(s, rm);
5392
            tmp2 = new_tmp();
5393
            tcg_gen_movi_i32(tmp2, s->pc);
5394
            store_reg(s, 14, tmp2);
5395
            gen_bx(s, tmp);
5246 5396
            break;
5247 5397
        case 0x5: /* saturating add/subtract */
5248 5398
            rd = (insn >> 12) & 0xf;
......
5261 5411
            gen_set_condexec(s);
5262 5412
            gen_op_movl_T0_im((long)s->pc - 4);
5263 5413
            gen_set_pc_T0();
5264
            gen_op_bkpt();
5414
            gen_exception(EXCP_BKPT);
5265 5415
            s->is_jmp = DISAS_JUMP;
5266 5416
            break;
5267 5417
        case 0x8: /* signed multiply */
......
5279 5429
                    gen_op_sarl_T1_im(16);
5280 5430
                else
5281 5431
                    gen_sxth(cpu_T[1]);
5282
                gen_op_imulw_T0_T1();
5432
                gen_imulw(cpu_T[0], cpu_T[1]);
5283 5433
                if ((sh & 2) == 0) {
5284 5434
                    gen_movl_T1_reg(s, rn);
5285 5435
                    gen_op_addl_T0_T1_setq();
......
5289 5439
                /* 16 * 16 */
5290 5440
                gen_movl_T0_reg(s, rm);
5291 5441
                gen_movl_T1_reg(s, rs);
5292
                gen_mulxy(sh & 2, sh & 4);
5442
                gen_mulxy(cpu_T[0], cpu_T[1], sh & 2, sh & 4);
5293 5443
                if (op1 == 2) {
5294 5444
                    gen_op_signbit_T1_T0();
5295 5445
                    gen_op_addq_T0_T1(rn, rd);
......
5758 5908
                        /* Signed multiply most significant [accumulate].  */
5759 5909
                        gen_op_imull_T0_T1();
5760 5910
                        if (insn & (1 << 5))
5761
                            gen_op_roundqd_T0_T1();
5911
                            gen_roundqd(cpu_T[0], cpu_T[1]);
5762 5912
                        else
5763 5913
                            gen_op_movl_T0_T1();
5764 5914
                        if (rn != 15) {
......
5926 6076
            if (insn & (1 << 20)) {
5927 6077
                /* Complete the load.  */
5928 6078
                if (rd == 15)
5929
                    gen_bx(s);
6079
                    gen_bx_T0(s);
5930 6080
                else
5931 6081
                    gen_movl_reg_T0(s, rd);
5932 6082
            }
......
5980 6130
                            /* load */
5981 6131
                            gen_ldst(ldl, s);
5982 6132
                            if (i == 15) {
5983
                                gen_bx(s);
6133
                                gen_bx_T0(s);
5984 6134
                            } else if (user) {
5985 6135
                                gen_op_movl_user_T0(i);
5986 6136
                            } else if (i == rn) {
......
6035 6185
                }
6036 6186
                if ((insn & (1 << 22)) && !user) {
6037 6187
                    /* Restore CPSR from SPSR.  */
6038
                    gen_op_movl_T0_spsr();
6039
                    gen_op_movl_cpsr_T0(0xffffffff);
6188
                    tmp = load_cpu_field(spsr);
6189
                    gen_set_cpsr(tmp, 0xffffffff);
6190
                    dead_tmp(tmp);
6040 6191
                    s->is_jmp = DISAS_UPDATE;
6041 6192
                }
6042 6193
            }
......
6075 6226
            gen_set_condexec(s);
6076 6227
            gen_op_movl_T0_im((long)s->pc - 4);
6077 6228
            gen_set_pc_T0();
6078
            gen_op_undef_insn();
6229
            gen_exception(EXCP_UDEF);
6079 6230
            s->is_jmp = DISAS_JUMP;
6080 6231
            break;
6081 6232
        }
......
6186 6337
        if ((insn & (1 << 12)) == 0) {
6187 6338
            /* Second half of blx.  */
6188 6339
            offset = ((insn & 0x7ff) << 1);
6189
            gen_movl_T0_reg(s, 14);
6190
            gen_op_movl_T1_im(offset);
6191
            gen_op_addl_T0_T1();
6192
            gen_op_movl_T1_im(0xfffffffc);
6193
            gen_op_andl_T0_T1();
6340
            tmp = load_reg(s, 14);
6341
            tcg_gen_addi_i32(tmp, tmp, offset);
6342
            tcg_gen_andi_i32(tmp, tmp, 0xfffffffc);
6194 6343

  
6195 6344
            addr = (uint32_t)s->pc;
6196
            gen_op_movl_T1_im(addr | 1);
6197
            gen_movl_reg_T1(s, 14);
6198
            gen_bx(s);
6345
            tmp2 = new_tmp();
6346
            tcg_gen_movi_i32(tmp2, addr | 1);
6347
            store_reg(s, 14, tmp2);
6348
            gen_bx(s, tmp);
6199 6349
            return 0;
6200 6350
        }
6201 6351
        if (insn & (1 << 11)) {
6202 6352
            /* Second half of bl.  */
6203 6353
            offset = ((insn & 0x7ff) << 1) | 1;
6204
            gen_movl_T0_reg(s, 14);
6205
            gen_op_movl_T1_im(offset);
6206
            gen_op_addl_T0_T1();
6354
            tmp = load_reg(s, 14);
6355
            tcg_gen_addi_i32(tmp, tmp, 14);
6207 6356

  
6208 6357
            addr = (uint32_t)s->pc;
6209
            gen_op_movl_T1_im(addr | 1);
6210
            gen_movl_reg_T1(s, 14);
6211
            gen_bx(s);
6358
            tmp2 = new_tmp();
6359
            tcg_gen_movi_i32(tmp2, addr | 1);
6360
            store_reg(s, 14, tmp2);
6361
            gen_bx(s, tmp);
6212 6362
            return 0;
6213 6363
        }
6214 6364
        if ((s->pc & ~TARGET_PAGE_MASK) == 0) {
......
6388 6538
                    }
6389 6539
                    gen_movl_T0_reg(s, 14);
6390 6540
                    gen_ldst(stl, s);
6391
                    gen_op_movl_T0_cpsr();
6541
                    gen_helper_cpsr_read(cpu_T[0]);
6392 6542
                    gen_op_addl_T1_im(4);
6393 6543
                    gen_ldst(stl, s);
6394 6544
                    if (insn & (1 << 21)) {
......
6424 6574
                        /* Load.  */
6425 6575
                        gen_ldst(ldl, s);
6426 6576
                        if (i == 15) {
6427
                            gen_bx(s);
6577
                            gen_bx_T0(s);
6428 6578
                        } else {
6429 6579
                            gen_movl_reg_T0(s, i);
6430 6580
                        }
......
6527 6677
            op = ((insn >> 17) & 0x38) | ((insn >> 4) & 7);
6528 6678
            if (op < 4) {
6529 6679
                /* Saturating add/subtract.  */
6530
                gen_movl_T0_reg(s, rm);
6531
                gen_movl_T1_reg(s, rn);
6680
                tmp = load_reg(s, rn);
6681
                tmp2 = load_reg(s, rm);
6532 6682
                if (op & 2)
6533
                    gen_helper_double_saturate(cpu_T[1], cpu_T[1]);
6683
                    gen_helper_double_saturate(tmp, tmp);
6534 6684
                if (op & 1)
6535
                    gen_op_subl_T0_T1_saturate();
6685
                    gen_helper_sub_saturate(tmp, tmp2, tmp);
6536 6686
                else
6537
                    gen_op_addl_T0_T1_saturate();
6687
                    gen_helper_add_saturate(tmp, tmp, tmp2);
6688
                dead_tmp(tmp2);
6538 6689
            } else {
6539
                gen_movl_T0_reg(s, rn);
6690
                tmp = load_reg(s, rn);
6540 6691
                switch (op) {
6541 6692
                case 0x0a: /* rbit */
6542
                    gen_helper_rbit(cpu_T[0], cpu_T[0]);
6693
                    gen_helper_rbit(tmp, tmp);
6543 6694
                    break;
6544 6695
                case 0x08: /* rev */
6545
                    gen_op_rev_T0();
6696
                    tcg_gen_bswap_i32(tmp, tmp);
6546 6697
                    break;
6547 6698
                case 0x09: /* rev16 */
6548
                    gen_rev16(cpu_T[0]);
6699
                    gen_rev16(tmp);
6549 6700
                    break;
6550 6701
                case 0x0b: /* revsh */
6551
                    gen_revsh(cpu_T[0]);
6702
                    gen_revsh(tmp);
6552 6703
                    break;
6553 6704
                case 0x10: /* sel */
6554
                    gen_movl_T1_reg(s, rm);
6705
                    tmp2 = load_reg(s, rm);
6555 6706
                    tmp3 = new_tmp();
6556 6707
                    tcg_gen_ld_i32(tmp3, cpu_env, offsetof(CPUState, GE));
6557
                    gen_helper_sel_flags(cpu_T[0], tmp3, cpu_T[0], cpu_T[1]);
6708
                    gen_helper_sel_flags(tmp, tmp3, tmp, tmp2);
6558 6709
                    dead_tmp(tmp3);
6710
                    dead_tmp(tmp2);
6559 6711
                    break;
6560 6712
                case 0x18: /* clz */
6561
                    gen_helper_clz(cpu_T[0], cpu_T[0]);
6713
                    gen_helper_clz(tmp, tmp);
6562 6714
                    break;
6563 6715
                default:
6564 6716
                    goto illegal_op;
6565 6717
                }
6566 6718
            }
6567
            gen_movl_reg_T0(s, rd);
6719
            store_reg(s, rd, tmp);
6568 6720
            break;
6569 6721
        case 4: case 5: /* 32-bit multiply.  Sum of absolute differences.  */
6570 6722
            op = (insn >> 4) & 0xf;
6571
            gen_movl_T0_reg(s, rn);
6572
            gen_movl_T1_reg(s, rm);
6723
            tmp = load_reg(s, rn);
6724
            tmp2 = load_reg(s, rm);
6573 6725
            switch ((insn >> 20) & 7) {
6574 6726
            case 0: /* 32 x 32 -> 32 */
6575
                gen_op_mul_T0_T1();
6727
                tcg_gen_mul_i32(tmp, tmp, tmp2);
6728
                dead_tmp(tmp2);
6576 6729
                if (rs != 15) {
6577
                    gen_movl_T1_reg(s, rs);
6730
                    tmp2 = load_reg(s, rs);
6578 6731
                    if (op)
6579
                        gen_op_rsbl_T0_T1();
6732
                        tcg_gen_sub_i32(tmp, tmp2, tmp);
6580 6733
                    else
6581
                        gen_op_addl_T0_T1();
6734
                        tcg_gen_add_i32(tmp, tmp, tmp2);
6735
                    dead_tmp(tmp2);
6582 6736
                }
6583
                gen_movl_reg_T0(s, rd);
6584 6737
                break;
6585 6738
            case 1: /* 16 x 16 -> 32 */
6586
                gen_mulxy(op & 2, op & 1);
6739
                gen_mulxy(tmp, tmp2, op & 2, op & 1);
6740
                dead_tmp(tmp2);
6587 6741
                if (rs != 15) {
6588
                    gen_movl_T1_reg(s, rs);
6589
                    gen_op_addl_T0_T1_setq();
6742
                    tmp2 = load_reg(s, rs);
6743
                    gen_helper_add_setq(tmp, tmp, tmp2);
6744
                    dead_tmp(tmp2);
6590 6745
                }
6591
                gen_movl_reg_T0(s, rd);
6592 6746
                break;
6593 6747
            case 2: /* Dual multiply add.  */
6594 6748
            case 4: /* Dual multiply subtract.  */
6595 6749
                if (op)
6596
                    gen_swap_half(cpu_T[1]);
6597
                gen_smul_dual(cpu_T[0], cpu_T[1]);
6750
                    gen_swap_half(tmp2);
6751
                gen_smul_dual(tmp, tmp2);
6598 6752
                /* This addition cannot overflow.  */
6599 6753
                if (insn & (1 << 22)) {
6600
                    gen_op_subl_T0_T1();
6754
                    tcg_gen_sub_i32(tmp, tmp, tmp2);
6601 6755
                } else {
6602
                    gen_op_addl_T0_T1();
6756
                    tcg_gen_add_i32(tmp, tmp, tmp2);
6603 6757
                }
6758
                dead_tmp(tmp2);
6604 6759
                if (rs != 15)
6605 6760
                  {
6606
                    gen_movl_T1_reg(s, rs);
6607
                    gen_op_addl_T0_T1_setq();
6761
                    tmp2 = load_reg(s, rs);
6762
                    gen_helper_add_setq(tmp, tmp, tmp2);
6763
                    dead_tmp(tmp2);
6608 6764
                  }
6609
                gen_movl_reg_T0(s, rd);
6610 6765
                break;
6611 6766
            case 3: /* 32 * 16 -> 32msb */
6612 6767
                if (op)
6613
                    gen_op_sarl_T1_im(16);
6768
                    tcg_gen_sari_i32(tmp2, tmp2, 16);
6614 6769
                else
6615
                    gen_sxth(cpu_T[1]);
6616
                gen_op_imulw_T0_T1();
6770
                    gen_sxth(tmp2);
6771
                gen_imulw(tmp, tmp2);
6772
                dead_tmp(tmp2);
6617 6773
                if (rs != 15)
6618 6774
                  {
6619
                    gen_movl_T1_reg(s, rs);
6620
                    gen_op_addl_T0_T1_setq();
6775
                    tmp2 = load_reg(s, rs);
6776
                    gen_helper_add_setq(tmp, tmp, tmp2);
6777
                    dead_tmp(tmp2);
6621 6778
                  }
6622
                gen_movl_reg_T0(s, rd);
6623 6779
                break;
6624 6780
            case 5: case 6: /* 32 * 32 -> 32msb */
6625
                gen_op_imull_T0_T1();
6626
                if (insn & (1 << 5))
6627
                    gen_op_roundqd_T0_T1();
6628
                else
6629
                    gen_op_movl_T0_T1();
6781
                gen_imull(tmp, tmp2);
6782
                if (insn & (1 << 5)) {
6783
                    gen_roundqd(tmp, tmp2);
6784
                    dead_tmp(tmp2);
6785
                } else {
6786
                    dead_tmp(tmp);
6787
                    tmp = tmp2;
6788
                }
6630 6789
                if (rs != 15) {
6631
                    gen_movl_T1_reg(s, rs);
6790
                    tmp2 = load_reg(s, rs);
6632 6791
                    if (insn & (1 << 21)) {
6633
                        gen_op_addl_T0_T1();
6792
                        tcg_gen_add_i32(tmp, tmp, tmp2);
6634 6793
                    } else {
6635
                        gen_op_rsbl_T0_T1();
6794
                        tcg_gen_sub_i32(tmp, tmp2, tmp);
6636 6795
                    }
6796
                    dead_tmp(tmp2);
6637 6797
                }
6638
                gen_movl_reg_T0(s, rd);
6639 6798
                break;
6640 6799
            case 7: /* Unsigned sum of absolute differences.  */
6641
                gen_helper_usad8(cpu_T[0], cpu_T[0], cpu_T[1]);
6800
                gen_helper_usad8(tmp, tmp, tmp2);
6801
                dead_tmp(tmp2);
6642 6802
                if (rs != 15) {
6643
                    gen_movl_T1_reg(s, rs);
6644
                    gen_op_addl_T0_T1();
6803
                    tmp2 = load_reg(s, rs);
6804
                    tcg_gen_add_i32(tmp, tmp, tmp2);
6805
                    dead_tmp(tmp2);
6645 6806
                }
6646
                gen_movl_reg_T0(s, rd);
6647 6807
                break;
6648 6808
            }
6809
            store_reg(s, rd, tmp);
6649 6810
            break;
6650 6811
        case 6: case 7: /* 64-bit multiply, Divide.  */
6651 6812
            op = ((insn >> 4) & 0xf) | ((insn >> 16) & 0x70);
......
6681 6842
                } else {
6682 6843
                    if (op & 8) {
6683 6844
                        /* smlalxy */
6684
                        gen_mulxy(op & 2, op & 1);
6845
                        gen_mulxy(cpu_T[0], cpu_T[1], op & 2, op & 1);
6685 6846
                        gen_op_signbit_T1_T0();
6686 6847
                    } else {
6687 6848
                        /* Signed 64-bit multiply  */
......
6745 6906
                } else {
6746 6907
                    /* blx */
6747 6908
                    addr &= ~(uint32_t)2;
6748
                    gen_op_movl_T0_im(addr);
6749
                    gen_bx(s);
6909
                    gen_bx_im(s, addr);
6750 6910
                }
6751 6911
            } else if (((insn >> 23) & 7) == 7) {
6752 6912
                /* Misc control */
......
6822 6982
                        break;
6823 6983
                    case 4: /* bxj */
6824 6984
                        /* Trivial implementation equivalent to bx.  */
6825
                        gen_movl_T0_reg(s, rn);
6826
                        gen_bx(s);
6985
                        tmp = load_reg(s, rn);
6986
                        gen_bx(s, tmp);
6827 6987
                        break;
6828 6988
                    case 5: /* Exception return.  */
6829 6989
                        /* Unpredictable in user mode.  */
......
6832 6992
                        if (IS_M(env)) {
6833 6993
                            gen_op_v7m_mrs_T0(insn & 0xff);
6834 6994
                        } else {
6835
                            gen_op_movl_T0_cpsr();
6995
                            gen_helper_cpsr_read(cpu_T[0]);
6836 6996
                        }
6837 6997
                        gen_movl_reg_T0(s, rd);
6838 6998
                        break;
......
6840 7000
                        /* Not accessible in user mode.  */
6841 7001
                        if (IS_USER(s) || IS_M(env))
6842 7002
                            goto illegal_op;
6843
                        gen_op_movl_T0_spsr();
6844
                        gen_movl_reg_T0(s, rd);
7003
                        tmp = load_cpu_field(spsr);
7004
                        store_reg(s, rd, tmp);
6845 7005
                        break;
6846 7006
                    }
6847 7007
                }
......
6850 7010
                op = (insn >> 22) & 0xf;
6851 7011
                /* Generate a conditional jump to next instruction.  */
6852 7012
                s->condlabel = gen_new_label();
6853
                gen_test_cc[op ^ 1](s->condlabel);
7013
                gen_test_cc(op ^ 1, s->condlabel);
6854 7014
                s->condjmp = 1;
6855 7015

  
6856 7016
                /* offset[11:1] = insn[10:0] */
......
7095 7255
                default: goto illegal_op;
7096 7256
                }
7097 7257
                if (rs == 15) {
7098
                    gen_bx(s);
7258
                    gen_bx_T0(s);
7099 7259
                } else {
7100 7260
                    gen_movl_reg_T0(s, rs);
7101 7261
                }
......
7132 7292
    int32_t offset;
7133 7293
    int i;
7134 7294
    TCGv tmp;
7295
    TCGv tmp2;
7135 7296

  
7136 7297
    if (s->condexec_mask) {
7137 7298
        cond = s->condexec_cond;
7138 7299
        s->condlabel = gen_new_label();
7139
        gen_test_cc[cond ^ 1](s->condlabel);
7300
        gen_test_cc(cond ^ 1, s->condlabel);
7140 7301
        s->condjmp = 1;
7141 7302
    }
7142 7303

  
......
7254 7415
                    gen_op_movl_T1_im(val);
7255 7416
                    gen_movl_reg_T1(s, 14);
7256 7417
                }
7257
                gen_movl_T0_reg(s, rm);
7258
                gen_bx(s);
7418
                tmp = load_reg(s, rm);
7419
                gen_bx(s, tmp);
7259 7420
                break;
7260 7421
            }
7261 7422
            break;
......
7597 7758
            gen_movl_reg_T1(s, 13);
7598 7759
            /* set the new PC value */
7599 7760
            if ((insn & 0x0900) == 0x0900)
7600
                gen_bx(s);
7761
                gen_bx_T0(s);
7601 7762
            break;
7602 7763

  
7603 7764
        case 1: case 3: case 9: case 11: /* czb */
7604 7765
            rm = insn & 7;
7605
            gen_movl_T0_reg(s, rm);
7766
            tmp = load_reg(s, rm);
7767
            tmp2 = tcg_const_i32(0);
7606 7768
            s->condlabel = gen_new_label();
7607 7769
            s->condjmp = 1;
7608 7770
            if (insn & (1 << 11))
7609
                gen_op_testn_T0(s->condlabel);
7771
                tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, s->condlabel);
7610 7772
            else
7611
                gen_op_test_T0(s->condlabel);
7612

  
7773
                tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, s->condlabel);
7774
            dead_tmp(tmp);
7613 7775
            offset = ((insn & 0xf8) >> 2) | (insn & 0x200) >> 3;
7614 7776
            val = (uint32_t)s->pc + 2;
7615 7777
            val += offset;
......
7631 7793
            gen_set_condexec(s);
7632 7794
            gen_op_movl_T0_im((long)s->pc - 2);
7633 7795
            gen_set_pc_T0();
7634
            gen_op_bkpt();
7796
            gen_exception(EXCP_BKPT);
7635 7797
            s->is_jmp = DISAS_JUMP;
7636 7798
            break;
7637 7799

  
......
7722 7884
        }
7723 7885
        /* generate a conditional jump to next instruction */
7724 7886
        s->condlabel = gen_new_label();
7725
        gen_test_cc[cond ^ 1](s->condlabel);
7887
        gen_test_cc(cond ^ 1, s->condlabel);
7726 7888
        s->condjmp = 1;
7727 7889
        gen_movl_T1_reg(s, 15);
7728 7890

  
......
7756 7918
    gen_set_condexec(s);
7757 7919
    gen_op_movl_T0_im((long)s->pc - 4);
7758 7920
    gen_set_pc_T0();
7759
    gen_op_undef_insn();
7921
    gen_exception(EXCP_UDEF);
7760 7922
    s->is_jmp = DISAS_JUMP;
7761 7923
    return;
7762 7924
illegal_op:
......
7764 7926
    gen_set_condexec(s);
7765 7927
    gen_op_movl_T0_im((long)s->pc - 2);
7766 7928
    gen_set_pc_T0();
7767
    gen_op_undef_insn();
7929
    gen_exception(EXCP_UDEF);
7768 7930
    s->is_jmp = DISAS_JUMP;
7769 7931
}
7770 7932

  
......
7814 7976
      {
7815 7977
        TCGv tmp = new_tmp();
7816 7978
        tcg_gen_movi_i32(tmp, 0);
7817
        tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, condexec_bits));
7818
        dead_tmp(tmp);
7979
        store_cpu_field(tmp, condexec_bits);
7819 7980
      }
7820 7981
    do {
7821 7982
#ifndef CONFIG_USER_ONLY
7822 7983
        if (dc->pc >= 0xfffffff0 && IS_M(env)) {
7823 7984
            /* We always get here via a jump, so know we are not in a
7824 7985
               conditional execution block.  */
7825
            gen_op_exception_exit();
7986
            gen_exception(EXCP_EXCEPTION_EXIT);
7826 7987
        }
7827 7988
#endif
7828 7989

  
......
7832 7993
                    gen_set_condexec(dc);
7833 7994
                    gen_op_movl_T0_im((long)dc->pc);
7834 7995
                    gen_set_pc_T0();
7835
                    gen_op_debug();
7996
                    gen_exception(EXCP_DEBUG);
7836 7997
                    dc->is_jmp = DISAS_JUMP;
7837 7998
                    /* Advance PC so that clearing the breakpoint will
7838 7999
                       invalidate this TB.  */
......
7897 8058
        if (dc->condjmp) {
7898 8059
            gen_set_condexec(dc);
7899 8060
            if (dc->is_jmp == DISAS_SWI) {
7900
                gen_op_swi();
8061
                gen_exception(EXCP_SWI);
7901 8062
            } else {
7902
                gen_op_debug();
8063
                gen_exception(EXCP_DEBUG);
7903 8064
            }
7904 8065
            gen_set_label(dc->condlabel);
7905 8066
        }
......
7910 8071
        }
7911 8072
        gen_set_condexec(dc);
7912 8073
        if (dc->is_jmp == DISAS_SWI && !dc->condjmp) {
7913
            gen_op_swi();
8074
            gen_exception(EXCP_SWI);
7914 8075
        } else {
7915 8076
            /* FIXME: Single stepping a WFI insn will not halt
7916 8077
               the CPU.  */
7917
            gen_op_debug();
8078
            gen_exception(EXCP_DEBUG);
7918 8079
        }
7919 8080
    } else {
7920 8081
        /* While branches must always occur at the end of an IT block,
......
7940 8101
            /* nothing more to generate */
7941 8102
            break;
7942 8103
        case DISAS_WFI:
7943
            gen_op_wfi();
8104
            gen_helper_wfi();
7944 8105
            break;
7945 8106
        case DISAS_SWI:
7946
            gen_op_swi();
8107
            gen_exception(EXCP_SWI);
7947 8108
            break;
7948 8109
        }
7949 8110
        if (dc->condjmp) {

Also available in: Unified diff