Statistics
| Branch: | Revision:

root / tcg / tcg-op.h @ 8cfd0495

History | View | Annotate | Download (96.7 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2008 Fabrice Bellard
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24
#include "tcg.h"
25

    
26
int gen_new_label(void);
27

    
28
static inline void tcg_gen_op0(TCGOpcode opc)
29
{
30
    *tcg_ctx.gen_opc_ptr++ = opc;
31
}
32

    
33
static inline void tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 arg1)
34
{
35
    *tcg_ctx.gen_opc_ptr++ = opc;
36
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
37
}
38

    
39
static inline void tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 arg1)
40
{
41
    *tcg_ctx.gen_opc_ptr++ = opc;
42
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
43
}
44

    
45
static inline void tcg_gen_op1i(TCGOpcode opc, TCGArg arg1)
46
{
47
    *tcg_ctx.gen_opc_ptr++ = opc;
48
    *tcg_ctx.gen_opparam_ptr++ = arg1;
49
}
50

    
51
static inline void tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2)
52
{
53
    *tcg_ctx.gen_opc_ptr++ = opc;
54
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
55
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
56
}
57

    
58
static inline void tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2)
59
{
60
    *tcg_ctx.gen_opc_ptr++ = opc;
61
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
62
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
63
}
64

    
65
static inline void tcg_gen_op2i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGArg arg2)
66
{
67
    *tcg_ctx.gen_opc_ptr++ = opc;
68
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
69
    *tcg_ctx.gen_opparam_ptr++ = arg2;
70
}
71

    
72
static inline void tcg_gen_op2i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGArg arg2)
73
{
74
    *tcg_ctx.gen_opc_ptr++ = opc;
75
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
76
    *tcg_ctx.gen_opparam_ptr++ = arg2;
77
}
78

    
79
static inline void tcg_gen_op2ii(TCGOpcode opc, TCGArg arg1, TCGArg arg2)
80
{
81
    *tcg_ctx.gen_opc_ptr++ = opc;
82
    *tcg_ctx.gen_opparam_ptr++ = arg1;
83
    *tcg_ctx.gen_opparam_ptr++ = arg2;
84
}
85

    
86
static inline void tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
87
                                   TCGv_i32 arg3)
88
{
89
    *tcg_ctx.gen_opc_ptr++ = opc;
90
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
91
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
92
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
93
}
94

    
95
static inline void tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
96
                                   TCGv_i64 arg3)
97
{
98
    *tcg_ctx.gen_opc_ptr++ = opc;
99
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
100
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
101
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
102
}
103

    
104
static inline void tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 arg1,
105
                                    TCGv_i32 arg2, TCGArg arg3)
106
{
107
    *tcg_ctx.gen_opc_ptr++ = opc;
108
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
109
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
110
    *tcg_ctx.gen_opparam_ptr++ = arg3;
111
}
112

    
113
static inline void tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 arg1,
114
                                    TCGv_i64 arg2, TCGArg arg3)
115
{
116
    *tcg_ctx.gen_opc_ptr++ = opc;
117
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
118
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
119
    *tcg_ctx.gen_opparam_ptr++ = arg3;
120
}
121

    
122
static inline void tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
123
                                       TCGv_ptr base, TCGArg offset)
124
{
125
    *tcg_ctx.gen_opc_ptr++ = opc;
126
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(val);
127
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_PTR(base);
128
    *tcg_ctx.gen_opparam_ptr++ = offset;
129
}
130

    
131
static inline void tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
132
                                       TCGv_ptr base, TCGArg offset)
133
{
134
    *tcg_ctx.gen_opc_ptr++ = opc;
135
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
136
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_PTR(base);
137
    *tcg_ctx.gen_opparam_ptr++ = offset;
138
}
139

    
140
static inline void tcg_gen_qemu_ldst_op_i64_i32(TCGOpcode opc, TCGv_i64 val,
141
                                                TCGv_i32 addr, TCGArg mem_index)
142
{
143
    *tcg_ctx.gen_opc_ptr++ = opc;
144
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
145
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(addr);
146
    *tcg_ctx.gen_opparam_ptr++ = mem_index;
147
}
148

    
149
static inline void tcg_gen_qemu_ldst_op_i64_i64(TCGOpcode opc, TCGv_i64 val,
150
                                                TCGv_i64 addr, TCGArg mem_index)
151
{
152
    *tcg_ctx.gen_opc_ptr++ = opc;
153
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
154
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(addr);
155
    *tcg_ctx.gen_opparam_ptr++ = mem_index;
156
}
157

    
158
static inline void tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
159
                                   TCGv_i32 arg3, TCGv_i32 arg4)
160
{
161
    *tcg_ctx.gen_opc_ptr++ = opc;
162
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
163
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
164
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
165
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
166
}
167

    
168
static inline void tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
169
                                   TCGv_i64 arg3, TCGv_i64 arg4)
170
{
171
    *tcg_ctx.gen_opc_ptr++ = opc;
172
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
173
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
174
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
175
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
176
}
177

    
178
static inline void tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
179
                                    TCGv_i32 arg3, TCGArg arg4)
180
{
181
    *tcg_ctx.gen_opc_ptr++ = opc;
182
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
183
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
184
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
185
    *tcg_ctx.gen_opparam_ptr++ = arg4;
186
}
187

    
188
static inline void tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
189
                                    TCGv_i64 arg3, TCGArg arg4)
190
{
191
    *tcg_ctx.gen_opc_ptr++ = opc;
192
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
193
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
194
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
195
    *tcg_ctx.gen_opparam_ptr++ = arg4;
196
}
197

    
198
static inline void tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
199
                                     TCGArg arg3, TCGArg arg4)
200
{
201
    *tcg_ctx.gen_opc_ptr++ = opc;
202
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
203
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
204
    *tcg_ctx.gen_opparam_ptr++ = arg3;
205
    *tcg_ctx.gen_opparam_ptr++ = arg4;
206
}
207

    
208
static inline void tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
209
                                     TCGArg arg3, TCGArg arg4)
210
{
211
    *tcg_ctx.gen_opc_ptr++ = opc;
212
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
213
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
214
    *tcg_ctx.gen_opparam_ptr++ = arg3;
215
    *tcg_ctx.gen_opparam_ptr++ = arg4;
216
}
217

    
218
static inline void tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
219
                                   TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5)
220
{
221
    *tcg_ctx.gen_opc_ptr++ = opc;
222
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
223
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
224
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
225
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
226
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5);
227
}
228

    
229
static inline void tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
230
                                   TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5)
231
{
232
    *tcg_ctx.gen_opc_ptr++ = opc;
233
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
234
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
235
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
236
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
237
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5);
238
}
239

    
240
static inline void tcg_gen_op5i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
241
                                    TCGv_i32 arg3, TCGv_i32 arg4, TCGArg arg5)
242
{
243
    *tcg_ctx.gen_opc_ptr++ = opc;
244
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
245
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
246
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
247
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
248
    *tcg_ctx.gen_opparam_ptr++ = arg5;
249
}
250

    
251
static inline void tcg_gen_op5i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
252
                                    TCGv_i64 arg3, TCGv_i64 arg4, TCGArg arg5)
253
{
254
    *tcg_ctx.gen_opc_ptr++ = opc;
255
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
256
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
257
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
258
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
259
    *tcg_ctx.gen_opparam_ptr++ = arg5;
260
}
261

    
262
static inline void tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 arg1,
263
                                     TCGv_i32 arg2, TCGv_i32 arg3,
264
                                     TCGArg arg4, TCGArg arg5)
265
{
266
    *tcg_ctx.gen_opc_ptr++ = opc;
267
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
268
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
269
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
270
    *tcg_ctx.gen_opparam_ptr++ = arg4;
271
    *tcg_ctx.gen_opparam_ptr++ = arg5;
272
}
273

    
274
static inline void tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 arg1,
275
                                     TCGv_i64 arg2, TCGv_i64 arg3,
276
                                     TCGArg arg4, TCGArg arg5)
277
{
278
    *tcg_ctx.gen_opc_ptr++ = opc;
279
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
280
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
281
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
282
    *tcg_ctx.gen_opparam_ptr++ = arg4;
283
    *tcg_ctx.gen_opparam_ptr++ = arg5;
284
}
285

    
286
static inline void tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
287
                                   TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5,
288
                                   TCGv_i32 arg6)
289
{
290
    *tcg_ctx.gen_opc_ptr++ = opc;
291
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
292
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
293
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
294
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
295
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5);
296
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg6);
297
}
298

    
299
static inline void tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
300
                                   TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5,
301
                                   TCGv_i64 arg6)
302
{
303
    *tcg_ctx.gen_opc_ptr++ = opc;
304
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
305
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
306
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
307
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
308
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5);
309
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg6);
310
}
311

    
312
static inline void tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
313
                                    TCGv_i32 arg3, TCGv_i32 arg4,
314
                                    TCGv_i32 arg5, TCGArg arg6)
315
{
316
    *tcg_ctx.gen_opc_ptr++ = opc;
317
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
318
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
319
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
320
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
321
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5);
322
    *tcg_ctx.gen_opparam_ptr++ = arg6;
323
}
324

    
325
static inline void tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
326
                                    TCGv_i64 arg3, TCGv_i64 arg4,
327
                                    TCGv_i64 arg5, TCGArg arg6)
328
{
329
    *tcg_ctx.gen_opc_ptr++ = opc;
330
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
331
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
332
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
333
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
334
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5);
335
    *tcg_ctx.gen_opparam_ptr++ = arg6;
336
}
337

    
338
static inline void tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 arg1,
339
                                     TCGv_i32 arg2, TCGv_i32 arg3,
340
                                     TCGv_i32 arg4, TCGArg arg5, TCGArg arg6)
341
{
342
    *tcg_ctx.gen_opc_ptr++ = opc;
343
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
344
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
345
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
346
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
347
    *tcg_ctx.gen_opparam_ptr++ = arg5;
348
    *tcg_ctx.gen_opparam_ptr++ = arg6;
349
}
350

    
351
static inline void tcg_gen_op6ii_i64(TCGOpcode opc, TCGv_i64 arg1,
352
                                     TCGv_i64 arg2, TCGv_i64 arg3,
353
                                     TCGv_i64 arg4, TCGArg arg5, TCGArg arg6)
354
{
355
    *tcg_ctx.gen_opc_ptr++ = opc;
356
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
357
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
358
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
359
    *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
360
    *tcg_ctx.gen_opparam_ptr++ = arg5;
361
    *tcg_ctx.gen_opparam_ptr++ = arg6;
362
}
363

    
364
static inline void gen_set_label(int n)
365
{
366
    tcg_gen_op1i(INDEX_op_set_label, n);
367
}
368

    
369
static inline void tcg_gen_br(int label)
370
{
371
    tcg_gen_op1i(INDEX_op_br, label);
372
}
373

    
374
static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
375
{
376
    if (!TCGV_EQUAL_I32(ret, arg))
377
        tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
378
}
379

    
380
static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
381
{
382
    tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg);
383
}
384

    
385
/* A version of dh_sizemask from def-helper.h that doesn't rely on
386
   preprocessor magic.  */
387
static inline int tcg_gen_sizemask(int n, int is_64bit, int is_signed)
388
{
389
    return (is_64bit << n*2) | (is_signed << (n*2 + 1));
390
}
391

    
392
/* helper calls */
393
static inline void tcg_gen_helperN(void *func, int flags, int sizemask,
394
                                   TCGArg ret, int nargs, TCGArg *args)
395
{
396
    TCGv_ptr fn;
397
    fn = tcg_const_ptr(func);
398
    tcg_gen_callN(&tcg_ctx, fn, flags, sizemask, ret,
399
                  nargs, args);
400
    tcg_temp_free_ptr(fn);
401
}
402

    
403
/* Note: Both tcg_gen_helper32() and tcg_gen_helper64() are currently
404
   reserved for helpers in tcg-runtime.c. These helpers all do not read
405
   globals and do not have side effects, hence the call to tcg_gen_callN()
406
   with TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS. This may need
407
   to be adjusted if these functions start to be used with other helpers. */
408
static inline void tcg_gen_helper32(void *func, int sizemask, TCGv_i32 ret,
409
                                    TCGv_i32 a, TCGv_i32 b)
410
{
411
    TCGv_ptr fn;
412
    TCGArg args[2];
413
    fn = tcg_const_ptr(func);
414
    args[0] = GET_TCGV_I32(a);
415
    args[1] = GET_TCGV_I32(b);
416
    tcg_gen_callN(&tcg_ctx, fn,
417
                  TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS,
418
                  sizemask, GET_TCGV_I32(ret), 2, args);
419
    tcg_temp_free_ptr(fn);
420
}
421

    
422
static inline void tcg_gen_helper64(void *func, int sizemask, TCGv_i64 ret,
423
                                    TCGv_i64 a, TCGv_i64 b)
424
{
425
    TCGv_ptr fn;
426
    TCGArg args[2];
427
    fn = tcg_const_ptr(func);
428
    args[0] = GET_TCGV_I64(a);
429
    args[1] = GET_TCGV_I64(b);
430
    tcg_gen_callN(&tcg_ctx, fn,
431
                  TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS,
432
                  sizemask, GET_TCGV_I64(ret), 2, args);
433
    tcg_temp_free_ptr(fn);
434
}
435

    
436
/* 32 bit ops */
437

    
438
static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
439
{
440
    tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
441
}
442

    
443
static inline void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
444
{
445
    tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
446
}
447

    
448
static inline void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
449
{
450
    tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
451
}
452

    
453
static inline void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
454
{
455
    tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
456
}
457

    
458
static inline void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
459
{
460
    tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
461
}
462

    
463
static inline void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
464
{
465
    tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
466
}
467

    
468
static inline void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
469
{
470
    tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
471
}
472

    
473
static inline void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
474
{
475
    tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
476
}
477

    
478
static inline void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
479
{
480
    tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
481
}
482

    
483
static inline void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
484
{
485
    /* some cases can be optimized here */
486
    if (arg2 == 0) {
487
        tcg_gen_mov_i32(ret, arg1);
488
    } else {
489
        TCGv_i32 t0 = tcg_const_i32(arg2);
490
        tcg_gen_add_i32(ret, arg1, t0);
491
        tcg_temp_free_i32(t0);
492
    }
493
}
494

    
495
static inline void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
496
{
497
    tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
498
}
499

    
500
static inline void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
501
{
502
    TCGv_i32 t0 = tcg_const_i32(arg1);
503
    tcg_gen_sub_i32(ret, t0, arg2);
504
    tcg_temp_free_i32(t0);
505
}
506

    
507
static inline void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
508
{
509
    /* some cases can be optimized here */
510
    if (arg2 == 0) {
511
        tcg_gen_mov_i32(ret, arg1);
512
    } else {
513
        TCGv_i32 t0 = tcg_const_i32(arg2);
514
        tcg_gen_sub_i32(ret, arg1, t0);
515
        tcg_temp_free_i32(t0);
516
    }
517
}
518

    
519
static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
520
{
521
    if (TCGV_EQUAL_I32(arg1, arg2)) {
522
        tcg_gen_mov_i32(ret, arg1);
523
    } else {
524
        tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
525
    }
526
}
527

    
528
static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
529
{
530
    TCGv_i32 t0;
531
    /* Some cases can be optimized here.  */
532
    switch (arg2) {
533
    case 0:
534
        tcg_gen_movi_i32(ret, 0);
535
        return;
536
    case 0xffffffffu:
537
        tcg_gen_mov_i32(ret, arg1);
538
        return;
539
    case 0xffu:
540
        /* Don't recurse with tcg_gen_ext8u_i32.  */
541
        if (TCG_TARGET_HAS_ext8u_i32) {
542
            tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
543
            return;
544
        }
545
        break;
546
    case 0xffffu:
547
        if (TCG_TARGET_HAS_ext16u_i32) {
548
            tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
549
            return;
550
        }
551
        break;
552
    }
553
    t0 = tcg_const_i32(arg2);
554
    tcg_gen_and_i32(ret, arg1, t0);
555
    tcg_temp_free_i32(t0);
556
}
557

    
558
static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
559
{
560
    if (TCGV_EQUAL_I32(arg1, arg2)) {
561
        tcg_gen_mov_i32(ret, arg1);
562
    } else {
563
        tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
564
    }
565
}
566

    
567
static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
568
{
569
    /* Some cases can be optimized here.  */
570
    if (arg2 == -1) {
571
        tcg_gen_movi_i32(ret, -1);
572
    } else if (arg2 == 0) {
573
        tcg_gen_mov_i32(ret, arg1);
574
    } else {
575
        TCGv_i32 t0 = tcg_const_i32(arg2);
576
        tcg_gen_or_i32(ret, arg1, t0);
577
        tcg_temp_free_i32(t0);
578
    }
579
}
580

    
581
static inline void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
582
{
583
    if (TCGV_EQUAL_I32(arg1, arg2)) {
584
        tcg_gen_movi_i32(ret, 0);
585
    } else {
586
        tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
587
    }
588
}
589

    
590
static inline void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
591
{
592
    /* Some cases can be optimized here.  */
593
    if (arg2 == 0) {
594
        tcg_gen_mov_i32(ret, arg1);
595
    } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
596
        /* Don't recurse with tcg_gen_not_i32.  */
597
        tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
598
    } else {
599
        TCGv_i32 t0 = tcg_const_i32(arg2);
600
        tcg_gen_xor_i32(ret, arg1, t0);
601
        tcg_temp_free_i32(t0);
602
    }
603
}
604

    
605
static inline void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
606
{
607
    tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
608
}
609

    
610
static inline void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
611
{
612
    if (arg2 == 0) {
613
        tcg_gen_mov_i32(ret, arg1);
614
    } else {
615
        TCGv_i32 t0 = tcg_const_i32(arg2);
616
        tcg_gen_shl_i32(ret, arg1, t0);
617
        tcg_temp_free_i32(t0);
618
    }
619
}
620

    
621
static inline void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
622
{
623
    tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
624
}
625

    
626
static inline void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
627
{
628
    if (arg2 == 0) {
629
        tcg_gen_mov_i32(ret, arg1);
630
    } else {
631
        TCGv_i32 t0 = tcg_const_i32(arg2);
632
        tcg_gen_shr_i32(ret, arg1, t0);
633
        tcg_temp_free_i32(t0);
634
    }
635
}
636

    
637
static inline void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
638
{
639
    tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
640
}
641

    
642
static inline void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
643
{
644
    if (arg2 == 0) {
645
        tcg_gen_mov_i32(ret, arg1);
646
    } else {
647
        TCGv_i32 t0 = tcg_const_i32(arg2);
648
        tcg_gen_sar_i32(ret, arg1, t0);
649
        tcg_temp_free_i32(t0);
650
    }
651
}
652

    
653
static inline void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1,
654
                                      TCGv_i32 arg2, int label_index)
655
{
656
    if (cond == TCG_COND_ALWAYS) {
657
        tcg_gen_br(label_index);
658
    } else if (cond != TCG_COND_NEVER) {
659
        tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_index);
660
    }
661
}
662

    
663
static inline void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1,
664
                                       int32_t arg2, int label_index)
665
{
666
    if (cond == TCG_COND_ALWAYS) {
667
        tcg_gen_br(label_index);
668
    } else if (cond != TCG_COND_NEVER) {
669
        TCGv_i32 t0 = tcg_const_i32(arg2);
670
        tcg_gen_brcond_i32(cond, arg1, t0, label_index);
671
        tcg_temp_free_i32(t0);
672
    }
673
}
674

    
675
static inline void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
676
                                       TCGv_i32 arg1, TCGv_i32 arg2)
677
{
678
    if (cond == TCG_COND_ALWAYS) {
679
        tcg_gen_movi_i32(ret, 1);
680
    } else if (cond == TCG_COND_NEVER) {
681
        tcg_gen_movi_i32(ret, 0);
682
    } else {
683
        tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
684
    }
685
}
686

    
687
static inline void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
688
                                        TCGv_i32 arg1, int32_t arg2)
689
{
690
    if (cond == TCG_COND_ALWAYS) {
691
        tcg_gen_movi_i32(ret, 1);
692
    } else if (cond == TCG_COND_NEVER) {
693
        tcg_gen_movi_i32(ret, 0);
694
    } else {
695
        TCGv_i32 t0 = tcg_const_i32(arg2);
696
        tcg_gen_setcond_i32(cond, ret, arg1, t0);
697
        tcg_temp_free_i32(t0);
698
    }
699
}
700

    
701
static inline void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
702
{
703
    tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
704
}
705

    
706
static inline void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
707
{
708
    TCGv_i32 t0 = tcg_const_i32(arg2);
709
    tcg_gen_mul_i32(ret, arg1, t0);
710
    tcg_temp_free_i32(t0);
711
}
712

    
713
static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
714
{
715
    if (TCG_TARGET_HAS_div_i32) {
716
        tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
717
    } else if (TCG_TARGET_HAS_div2_i32) {
718
        TCGv_i32 t0 = tcg_temp_new_i32();
719
        tcg_gen_sari_i32(t0, arg1, 31);
720
        tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
721
        tcg_temp_free_i32(t0);
722
    } else {
723
        int sizemask = 0;
724
        /* Return value and both arguments are 32-bit and signed.  */
725
        sizemask |= tcg_gen_sizemask(0, 0, 1);
726
        sizemask |= tcg_gen_sizemask(1, 0, 1);
727
        sizemask |= tcg_gen_sizemask(2, 0, 1);
728
        tcg_gen_helper32(tcg_helper_div_i32, sizemask, ret, arg1, arg2);
729
    }
730
}
731

    
732
static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
733
{
734
    if (TCG_TARGET_HAS_rem_i32) {
735
        tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
736
    } else if (TCG_TARGET_HAS_div_i32) {
737
        TCGv_i32 t0 = tcg_temp_new_i32();
738
        tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
739
        tcg_gen_mul_i32(t0, t0, arg2);
740
        tcg_gen_sub_i32(ret, arg1, t0);
741
        tcg_temp_free_i32(t0);
742
    } else if (TCG_TARGET_HAS_div2_i32) {
743
        TCGv_i32 t0 = tcg_temp_new_i32();
744
        tcg_gen_sari_i32(t0, arg1, 31);
745
        tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
746
        tcg_temp_free_i32(t0);
747
    } else {
748
        int sizemask = 0;
749
        /* Return value and both arguments are 32-bit and signed.  */
750
        sizemask |= tcg_gen_sizemask(0, 0, 1);
751
        sizemask |= tcg_gen_sizemask(1, 0, 1);
752
        sizemask |= tcg_gen_sizemask(2, 0, 1);
753
        tcg_gen_helper32(tcg_helper_rem_i32, sizemask, ret, arg1, arg2);
754
    }
755
}
756

    
757
static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
758
{
759
    if (TCG_TARGET_HAS_div_i32) {
760
        tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
761
    } else if (TCG_TARGET_HAS_div2_i32) {
762
        TCGv_i32 t0 = tcg_temp_new_i32();
763
        tcg_gen_movi_i32(t0, 0);
764
        tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
765
        tcg_temp_free_i32(t0);
766
    } else {
767
        int sizemask = 0;
768
        /* Return value and both arguments are 32-bit and unsigned.  */
769
        sizemask |= tcg_gen_sizemask(0, 0, 0);
770
        sizemask |= tcg_gen_sizemask(1, 0, 0);
771
        sizemask |= tcg_gen_sizemask(2, 0, 0);
772
        tcg_gen_helper32(tcg_helper_divu_i32, sizemask, ret, arg1, arg2);
773
    }
774
}
775

    
776
static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
777
{
778
    if (TCG_TARGET_HAS_rem_i32) {
779
        tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
780
    } else if (TCG_TARGET_HAS_div_i32) {
781
        TCGv_i32 t0 = tcg_temp_new_i32();
782
        tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
783
        tcg_gen_mul_i32(t0, t0, arg2);
784
        tcg_gen_sub_i32(ret, arg1, t0);
785
        tcg_temp_free_i32(t0);
786
    } else if (TCG_TARGET_HAS_div2_i32) {
787
        TCGv_i32 t0 = tcg_temp_new_i32();
788
        tcg_gen_movi_i32(t0, 0);
789
        tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
790
        tcg_temp_free_i32(t0);
791
    } else {
792
        int sizemask = 0;
793
        /* Return value and both arguments are 32-bit and unsigned.  */
794
        sizemask |= tcg_gen_sizemask(0, 0, 0);
795
        sizemask |= tcg_gen_sizemask(1, 0, 0);
796
        sizemask |= tcg_gen_sizemask(2, 0, 0);
797
        tcg_gen_helper32(tcg_helper_remu_i32, sizemask, ret, arg1, arg2);
798
    }
799
}
800

    
801
#if TCG_TARGET_REG_BITS == 32
802

    
803
static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
804
{
805
    if (!TCGV_EQUAL_I64(ret, arg)) {
806
        tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
807
        tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
808
    }
809
}
810

    
811
static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
812
{
813
    tcg_gen_movi_i32(TCGV_LOW(ret), arg);
814
    tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
815
}
816

    
817
static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
818
                                    tcg_target_long offset)
819
{
820
    tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
821
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
822
}
823

    
824
static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
825
                                    tcg_target_long offset)
826
{
827
    tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
828
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
829
}
830

    
831
static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
832
                                     tcg_target_long offset)
833
{
834
    tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
835
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
836
}
837

    
838
static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
839
                                     tcg_target_long offset)
840
{
841
    tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
842
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
843
}
844

    
845
static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
846
                                     tcg_target_long offset)
847
{
848
    tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
849
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
850
}
851

    
852
static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
853
                                     tcg_target_long offset)
854
{
855
    tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
856
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
857
}
858

    
859
static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2,
860
                                  tcg_target_long offset)
861
{
862
    /* since arg2 and ret have different types, they cannot be the
863
       same temporary */
864
#ifdef TCG_TARGET_WORDS_BIGENDIAN
865
    tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
866
    tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
867
#else
868
    tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
869
    tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
870
#endif
871
}
872

    
873
static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
874
                                   tcg_target_long offset)
875
{
876
    tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
877
}
878

    
879
static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
880
                                    tcg_target_long offset)
881
{
882
    tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
883
}
884

    
885
static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
886
                                    tcg_target_long offset)
887
{
888
    tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
889
}
890

    
891
static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2,
892
                                  tcg_target_long offset)
893
{
894
#ifdef TCG_TARGET_WORDS_BIGENDIAN
895
    tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
896
    tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
897
#else
898
    tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
899
    tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
900
#endif
901
}
902

    
903
static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
904
{
905
    tcg_gen_op6_i32(INDEX_op_add2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
906
                    TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
907
                    TCGV_HIGH(arg2));
908
    /* Allow the optimizer room to replace add2 with two moves.  */
909
    tcg_gen_op0(INDEX_op_nop);
910
}
911

    
912
static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
913
{
914
    tcg_gen_op6_i32(INDEX_op_sub2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
915
                    TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
916
                    TCGV_HIGH(arg2));
917
    /* Allow the optimizer room to replace sub2 with two moves.  */
918
    tcg_gen_op0(INDEX_op_nop);
919
}
920

    
921
static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
922
{
923
    tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
924
    tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
925
}
926

    
927
static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
928
{
929
    tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
930
    tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
931
}
932

    
933
static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
934
{
935
    tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
936
    tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
937
}
938

    
939
static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
940
{
941
    tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
942
    tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
943
}
944

    
945
static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
946
{
947
    tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
948
    tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
949
}
950

    
951
static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
952
{
953
    tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
954
    tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
955
}
956

    
957
/* XXX: use generic code when basic block handling is OK or CPU
958
   specific code (x86) */
959
static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
960
{
961
    int sizemask = 0;
962
    /* Return value and both arguments are 64-bit and signed.  */
963
    sizemask |= tcg_gen_sizemask(0, 1, 1);
964
    sizemask |= tcg_gen_sizemask(1, 1, 1);
965
    sizemask |= tcg_gen_sizemask(2, 1, 1);
966

    
967
    tcg_gen_helper64(tcg_helper_shl_i64, sizemask, ret, arg1, arg2);
968
}
969

    
970
static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
971
{
972
    tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
973
}
974

    
975
static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
976
{
977
    int sizemask = 0;
978
    /* Return value and both arguments are 64-bit and signed.  */
979
    sizemask |= tcg_gen_sizemask(0, 1, 1);
980
    sizemask |= tcg_gen_sizemask(1, 1, 1);
981
    sizemask |= tcg_gen_sizemask(2, 1, 1);
982

    
983
    tcg_gen_helper64(tcg_helper_shr_i64, sizemask, ret, arg1, arg2);
984
}
985

    
986
static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
987
{
988
    tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
989
}
990

    
991
static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
992
{
993
    int sizemask = 0;
994
    /* Return value and both arguments are 64-bit and signed.  */
995
    sizemask |= tcg_gen_sizemask(0, 1, 1);
996
    sizemask |= tcg_gen_sizemask(1, 1, 1);
997
    sizemask |= tcg_gen_sizemask(2, 1, 1);
998

    
999
    tcg_gen_helper64(tcg_helper_sar_i64, sizemask, ret, arg1, arg2);
1000
}
1001

    
1002
static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1003
{
1004
    tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1005
}
1006

    
1007
static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1,
1008
                                      TCGv_i64 arg2, int label_index)
1009
{
1010
    if (cond == TCG_COND_ALWAYS) {
1011
        tcg_gen_br(label_index);
1012
    } else if (cond != TCG_COND_NEVER) {
1013
        tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1014
                          TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
1015
                          TCGV_HIGH(arg2), cond, label_index);
1016
    }
1017
}
1018

    
1019
static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1020
                                       TCGv_i64 arg1, TCGv_i64 arg2)
1021
{
1022
    if (cond == TCG_COND_ALWAYS) {
1023
        tcg_gen_movi_i32(TCGV_LOW(ret), 1);
1024
    } else if (cond == TCG_COND_NEVER) {
1025
        tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1026
    } else {
1027
        tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1028
                         TCGV_LOW(arg1), TCGV_HIGH(arg1),
1029
                         TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1030
    }
1031
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1032
}
1033

    
1034
static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1035
{
1036
    TCGv_i64 t0;
1037
    TCGv_i32 t1;
1038

    
1039
    t0 = tcg_temp_new_i64();
1040
    t1 = tcg_temp_new_i32();
1041

    
1042
    if (TCG_TARGET_HAS_mulu2_i32) {
1043
        tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0),
1044
                        TCGV_LOW(arg1), TCGV_LOW(arg2));
1045
        /* Allow the optimizer room to replace mulu2 with two moves.  */
1046
        tcg_gen_op0(INDEX_op_nop);
1047
    } else {
1048
        tcg_debug_assert(TCG_TARGET_HAS_muluh_i32);
1049
        tcg_gen_op3_i32(INDEX_op_mul_i32, TCGV_LOW(t0),
1050
                        TCGV_LOW(arg1), TCGV_LOW(arg2));
1051
        tcg_gen_op3_i32(INDEX_op_muluh_i32, TCGV_HIGH(t0),
1052
                        TCGV_LOW(arg1), TCGV_LOW(arg2));
1053
    }
1054

    
1055
    tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1056
    tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1057
    tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1058
    tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1059

    
1060
    tcg_gen_mov_i64(ret, t0);
1061
    tcg_temp_free_i64(t0);
1062
    tcg_temp_free_i32(t1);
1063
}
1064

    
1065
static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1066
{
1067
    int sizemask = 0;
1068
    /* Return value and both arguments are 64-bit and signed.  */
1069
    sizemask |= tcg_gen_sizemask(0, 1, 1);
1070
    sizemask |= tcg_gen_sizemask(1, 1, 1);
1071
    sizemask |= tcg_gen_sizemask(2, 1, 1);
1072

    
1073
    tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2);
1074
}
1075

    
1076
static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1077
{
1078
    int sizemask = 0;
1079
    /* Return value and both arguments are 64-bit and signed.  */
1080
    sizemask |= tcg_gen_sizemask(0, 1, 1);
1081
    sizemask |= tcg_gen_sizemask(1, 1, 1);
1082
    sizemask |= tcg_gen_sizemask(2, 1, 1);
1083

    
1084
    tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2);
1085
}
1086

    
1087
static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1088
{
1089
    int sizemask = 0;
1090
    /* Return value and both arguments are 64-bit and unsigned.  */
1091
    sizemask |= tcg_gen_sizemask(0, 1, 0);
1092
    sizemask |= tcg_gen_sizemask(1, 1, 0);
1093
    sizemask |= tcg_gen_sizemask(2, 1, 0);
1094

    
1095
    tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2);
1096
}
1097

    
1098
static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1099
{
1100
    int sizemask = 0;
1101
    /* Return value and both arguments are 64-bit and unsigned.  */
1102
    sizemask |= tcg_gen_sizemask(0, 1, 0);
1103
    sizemask |= tcg_gen_sizemask(1, 1, 0);
1104
    sizemask |= tcg_gen_sizemask(2, 1, 0);
1105

    
1106
    tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2);
1107
}
1108

    
1109
#else
1110

    
1111
static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1112
{
1113
    if (!TCGV_EQUAL_I64(ret, arg))
1114
        tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
1115
}
1116

    
1117
static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1118
{
1119
    tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg);
1120
}
1121

    
1122
static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1123
                                    tcg_target_long offset)
1124
{
1125
    tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
1126
}
1127

    
1128
static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1129
                                    tcg_target_long offset)
1130
{
1131
    tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
1132
}
1133

    
1134
static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1135
                                     tcg_target_long offset)
1136
{
1137
    tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
1138
}
1139

    
1140
static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1141
                                     tcg_target_long offset)
1142
{
1143
    tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
1144
}
1145

    
1146
static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1147
                                     tcg_target_long offset)
1148
{
1149
    tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
1150
}
1151

    
1152
static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1153
                                     tcg_target_long offset)
1154
{
1155
    tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
1156
}
1157

    
1158
static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1159
{
1160
    tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
1161
}
1162

    
1163
static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1164
                                   tcg_target_long offset)
1165
{
1166
    tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
1167
}
1168

    
1169
static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1170
                                    tcg_target_long offset)
1171
{
1172
    tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
1173
}
1174

    
1175
static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1176
                                    tcg_target_long offset)
1177
{
1178
    tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
1179
}
1180

    
1181
static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1182
{
1183
    tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
1184
}
1185

    
1186
static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1187
{
1188
    tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
1189
}
1190

    
1191
static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1192
{
1193
    tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
1194
}
1195

    
1196
static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1197
{
1198
    if (TCGV_EQUAL_I64(arg1, arg2)) {
1199
        tcg_gen_mov_i64(ret, arg1);
1200
    } else {
1201
        tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
1202
    }
1203
}
1204

    
1205
static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1206
{
1207
    TCGv_i64 t0;
1208
    /* Some cases can be optimized here.  */
1209
    switch (arg2) {
1210
    case 0:
1211
        tcg_gen_movi_i64(ret, 0);
1212
        return;
1213
    case 0xffffffffffffffffull:
1214
        tcg_gen_mov_i64(ret, arg1);
1215
        return;
1216
    case 0xffull:
1217
        /* Don't recurse with tcg_gen_ext8u_i32.  */
1218
        if (TCG_TARGET_HAS_ext8u_i64) {
1219
            tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1220
            return;
1221
        }
1222
        break;
1223
    case 0xffffu:
1224
        if (TCG_TARGET_HAS_ext16u_i64) {
1225
            tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1226
            return;
1227
        }
1228
        break;
1229
    case 0xffffffffull:
1230
        if (TCG_TARGET_HAS_ext32u_i64) {
1231
            tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1232
            return;
1233
        }
1234
        break;
1235
    }
1236
    t0 = tcg_const_i64(arg2);
1237
    tcg_gen_and_i64(ret, arg1, t0);
1238
    tcg_temp_free_i64(t0);
1239
}
1240

    
1241
static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1242
{
1243
    if (TCGV_EQUAL_I64(arg1, arg2)) {
1244
        tcg_gen_mov_i64(ret, arg1);
1245
    } else {
1246
        tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
1247
    }
1248
}
1249

    
1250
static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1251
{
1252
    /* Some cases can be optimized here.  */
1253
    if (arg2 == -1) {
1254
        tcg_gen_movi_i64(ret, -1);
1255
    } else if (arg2 == 0) {
1256
        tcg_gen_mov_i64(ret, arg1);
1257
    } else {
1258
        TCGv_i64 t0 = tcg_const_i64(arg2);
1259
        tcg_gen_or_i64(ret, arg1, t0);
1260
        tcg_temp_free_i64(t0);
1261
    }
1262
}
1263

    
1264
static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1265
{
1266
    if (TCGV_EQUAL_I64(arg1, arg2)) {
1267
        tcg_gen_movi_i64(ret, 0);
1268
    } else {
1269
        tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
1270
    }
1271
}
1272

    
1273
static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1274
{
1275
    /* Some cases can be optimized here.  */
1276
    if (arg2 == 0) {
1277
        tcg_gen_mov_i64(ret, arg1);
1278
    } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1279
        /* Don't recurse with tcg_gen_not_i64.  */
1280
        tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1281
    } else {
1282
        TCGv_i64 t0 = tcg_const_i64(arg2);
1283
        tcg_gen_xor_i64(ret, arg1, t0);
1284
        tcg_temp_free_i64(t0);
1285
    }
1286
}
1287

    
1288
static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1289
{
1290
    tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
1291
}
1292

    
1293
static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1294
{
1295
    if (arg2 == 0) {
1296
        tcg_gen_mov_i64(ret, arg1);
1297
    } else {
1298
        TCGv_i64 t0 = tcg_const_i64(arg2);
1299
        tcg_gen_shl_i64(ret, arg1, t0);
1300
        tcg_temp_free_i64(t0);
1301
    }
1302
}
1303

    
1304
static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1305
{
1306
    tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
1307
}
1308

    
1309
static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1310
{
1311
    if (arg2 == 0) {
1312
        tcg_gen_mov_i64(ret, arg1);
1313
    } else {
1314
        TCGv_i64 t0 = tcg_const_i64(arg2);
1315
        tcg_gen_shr_i64(ret, arg1, t0);
1316
        tcg_temp_free_i64(t0);
1317
    }
1318
}
1319

    
1320
static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1321
{
1322
    tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
1323
}
1324

    
1325
static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1326
{
1327
    if (arg2 == 0) {
1328
        tcg_gen_mov_i64(ret, arg1);
1329
    } else {
1330
        TCGv_i64 t0 = tcg_const_i64(arg2);
1331
        tcg_gen_sar_i64(ret, arg1, t0);
1332
        tcg_temp_free_i64(t0);
1333
    }
1334
}
1335

    
1336
static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1,
1337
                                      TCGv_i64 arg2, int label_index)
1338
{
1339
    if (cond == TCG_COND_ALWAYS) {
1340
        tcg_gen_br(label_index);
1341
    } else if (cond != TCG_COND_NEVER) {
1342
        tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond, label_index);
1343
    }
1344
}
1345

    
1346
static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1347
                                       TCGv_i64 arg1, TCGv_i64 arg2)
1348
{
1349
    if (cond == TCG_COND_ALWAYS) {
1350
        tcg_gen_movi_i64(ret, 1);
1351
    } else if (cond == TCG_COND_NEVER) {
1352
        tcg_gen_movi_i64(ret, 0);
1353
    } else {
1354
        tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1355
    }
1356
}
1357

    
1358
static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1359
{
1360
    tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
1361
}
1362

    
1363
static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1364
{
1365
    if (TCG_TARGET_HAS_div_i64) {
1366
        tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1367
    } else if (TCG_TARGET_HAS_div2_i64) {
1368
        TCGv_i64 t0 = tcg_temp_new_i64();
1369
        tcg_gen_sari_i64(t0, arg1, 63);
1370
        tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1371
        tcg_temp_free_i64(t0);
1372
    } else {
1373
        int sizemask = 0;
1374
        /* Return value and both arguments are 64-bit and signed.  */
1375
        sizemask |= tcg_gen_sizemask(0, 1, 1);
1376
        sizemask |= tcg_gen_sizemask(1, 1, 1);
1377
        sizemask |= tcg_gen_sizemask(2, 1, 1);
1378
        tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2);
1379
    }
1380
}
1381

    
1382
static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1383
{
1384
    if (TCG_TARGET_HAS_rem_i64) {
1385
        tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1386
    } else if (TCG_TARGET_HAS_div_i64) {
1387
        TCGv_i64 t0 = tcg_temp_new_i64();
1388
        tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1389
        tcg_gen_mul_i64(t0, t0, arg2);
1390
        tcg_gen_sub_i64(ret, arg1, t0);
1391
        tcg_temp_free_i64(t0);
1392
    } else if (TCG_TARGET_HAS_div2_i64) {
1393
        TCGv_i64 t0 = tcg_temp_new_i64();
1394
        tcg_gen_sari_i64(t0, arg1, 63);
1395
        tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1396
        tcg_temp_free_i64(t0);
1397
    } else {
1398
        int sizemask = 0;
1399
        /* Return value and both arguments are 64-bit and signed.  */
1400
        sizemask |= tcg_gen_sizemask(0, 1, 1);
1401
        sizemask |= tcg_gen_sizemask(1, 1, 1);
1402
        sizemask |= tcg_gen_sizemask(2, 1, 1);
1403
        tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2);
1404
    }
1405
}
1406

    
1407
static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1408
{
1409
    if (TCG_TARGET_HAS_div_i64) {
1410
        tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1411
    } else if (TCG_TARGET_HAS_div2_i64) {
1412
        TCGv_i64 t0 = tcg_temp_new_i64();
1413
        tcg_gen_movi_i64(t0, 0);
1414
        tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1415
        tcg_temp_free_i64(t0);
1416
    } else {
1417
        int sizemask = 0;
1418
        /* Return value and both arguments are 64-bit and unsigned.  */
1419
        sizemask |= tcg_gen_sizemask(0, 1, 0);
1420
        sizemask |= tcg_gen_sizemask(1, 1, 0);
1421
        sizemask |= tcg_gen_sizemask(2, 1, 0);
1422
        tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2);
1423
    }
1424
}
1425

    
1426
static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1427
{
1428
    if (TCG_TARGET_HAS_rem_i64) {
1429
        tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1430
    } else if (TCG_TARGET_HAS_div_i64) {
1431
        TCGv_i64 t0 = tcg_temp_new_i64();
1432
        tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1433
        tcg_gen_mul_i64(t0, t0, arg2);
1434
        tcg_gen_sub_i64(ret, arg1, t0);
1435
        tcg_temp_free_i64(t0);
1436
    } else if (TCG_TARGET_HAS_div2_i64) {
1437
        TCGv_i64 t0 = tcg_temp_new_i64();
1438
        tcg_gen_movi_i64(t0, 0);
1439
        tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1440
        tcg_temp_free_i64(t0);
1441
    } else {
1442
        int sizemask = 0;
1443
        /* Return value and both arguments are 64-bit and unsigned.  */
1444
        sizemask |= tcg_gen_sizemask(0, 1, 0);
1445
        sizemask |= tcg_gen_sizemask(1, 1, 0);
1446
        sizemask |= tcg_gen_sizemask(2, 1, 0);
1447
        tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2);
1448
    }
1449
}
1450
#endif /* TCG_TARGET_REG_BITS == 32 */
1451

    
1452
static inline void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1453
{
1454
    /* some cases can be optimized here */
1455
    if (arg2 == 0) {
1456
        tcg_gen_mov_i64(ret, arg1);
1457
    } else {
1458
        TCGv_i64 t0 = tcg_const_i64(arg2);
1459
        tcg_gen_add_i64(ret, arg1, t0);
1460
        tcg_temp_free_i64(t0);
1461
    }
1462
}
1463

    
1464
static inline void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1465
{
1466
    TCGv_i64 t0 = tcg_const_i64(arg1);
1467
    tcg_gen_sub_i64(ret, t0, arg2);
1468
    tcg_temp_free_i64(t0);
1469
}
1470

    
1471
static inline void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1472
{
1473
    /* some cases can be optimized here */
1474
    if (arg2 == 0) {
1475
        tcg_gen_mov_i64(ret, arg1);
1476
    } else {
1477
        TCGv_i64 t0 = tcg_const_i64(arg2);
1478
        tcg_gen_sub_i64(ret, arg1, t0);
1479
        tcg_temp_free_i64(t0);
1480
    }
1481
}
1482
static inline void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1,
1483
                                       int64_t arg2, int label_index)
1484
{
1485
    if (cond == TCG_COND_ALWAYS) {
1486
        tcg_gen_br(label_index);
1487
    } else if (cond != TCG_COND_NEVER) {
1488
        TCGv_i64 t0 = tcg_const_i64(arg2);
1489
        tcg_gen_brcond_i64(cond, arg1, t0, label_index);
1490
        tcg_temp_free_i64(t0);
1491
    }
1492
}
1493

    
1494
static inline void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1495
                                        TCGv_i64 arg1, int64_t arg2)
1496
{
1497
    TCGv_i64 t0 = tcg_const_i64(arg2);
1498
    tcg_gen_setcond_i64(cond, ret, arg1, t0);
1499
    tcg_temp_free_i64(t0);
1500
}
1501

    
1502
static inline void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1503
{
1504
    TCGv_i64 t0 = tcg_const_i64(arg2);
1505
    tcg_gen_mul_i64(ret, arg1, t0);
1506
    tcg_temp_free_i64(t0);
1507
}
1508

    
1509

    
1510
/***************************************/
1511
/* optional operations */
1512

    
1513
static inline void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1514
{
1515
    if (TCG_TARGET_HAS_ext8s_i32) {
1516
        tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1517
    } else {
1518
        tcg_gen_shli_i32(ret, arg, 24);
1519
        tcg_gen_sari_i32(ret, ret, 24);
1520
    }
1521
}
1522

    
1523
static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1524
{
1525
    if (TCG_TARGET_HAS_ext16s_i32) {
1526
        tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1527
    } else {
1528
        tcg_gen_shli_i32(ret, arg, 16);
1529
        tcg_gen_sari_i32(ret, ret, 16);
1530
    }
1531
}
1532

    
1533
static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1534
{
1535
    if (TCG_TARGET_HAS_ext8u_i32) {
1536
        tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1537
    } else {
1538
        tcg_gen_andi_i32(ret, arg, 0xffu);
1539
    }
1540
}
1541

    
1542
static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1543
{
1544
    if (TCG_TARGET_HAS_ext16u_i32) {
1545
        tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1546
    } else {
1547
        tcg_gen_andi_i32(ret, arg, 0xffffu);
1548
    }
1549
}
1550

    
1551
/* Note: we assume the two high bytes are set to zero */
1552
static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
1553
{
1554
    if (TCG_TARGET_HAS_bswap16_i32) {
1555
        tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
1556
    } else {
1557
        TCGv_i32 t0 = tcg_temp_new_i32();
1558
    
1559
        tcg_gen_ext8u_i32(t0, arg);
1560
        tcg_gen_shli_i32(t0, t0, 8);
1561
        tcg_gen_shri_i32(ret, arg, 8);
1562
        tcg_gen_or_i32(ret, ret, t0);
1563
        tcg_temp_free_i32(t0);
1564
    }
1565
}
1566

    
1567
static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1568
{
1569
    if (TCG_TARGET_HAS_bswap32_i32) {
1570
        tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
1571
    } else {
1572
        TCGv_i32 t0, t1;
1573
        t0 = tcg_temp_new_i32();
1574
        t1 = tcg_temp_new_i32();
1575
    
1576
        tcg_gen_shli_i32(t0, arg, 24);
1577
    
1578
        tcg_gen_andi_i32(t1, arg, 0x0000ff00);
1579
        tcg_gen_shli_i32(t1, t1, 8);
1580
        tcg_gen_or_i32(t0, t0, t1);
1581
    
1582
        tcg_gen_shri_i32(t1, arg, 8);
1583
        tcg_gen_andi_i32(t1, t1, 0x0000ff00);
1584
        tcg_gen_or_i32(t0, t0, t1);
1585
    
1586
        tcg_gen_shri_i32(t1, arg, 24);
1587
        tcg_gen_or_i32(ret, t0, t1);
1588
        tcg_temp_free_i32(t0);
1589
        tcg_temp_free_i32(t1);
1590
    }
1591
}
1592

    
1593
#if TCG_TARGET_REG_BITS == 32
1594
static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1595
{
1596
    tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1597
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1598
}
1599

    
1600
static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1601
{
1602
    tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1603
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1604
}
1605

    
1606
static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1607
{
1608
    tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1609
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1610
}
1611

    
1612
static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1613
{
1614
    tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1615
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1616
}
1617

    
1618
static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1619
{
1620
    tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1621
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1622
}
1623

    
1624
static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1625
{
1626
    tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1627
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1628
}
1629

    
1630
static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1631
{
1632
    tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1633
}
1634

    
1635
static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1636
{
1637
    tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1638
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1639
}
1640

    
1641
static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1642
{
1643
    tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1644
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1645
}
1646

    
1647
/* Note: we assume the six high bytes are set to zero */
1648
static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1649
{
1650
    tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1651
    tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1652
}
1653

    
1654
/* Note: we assume the four high bytes are set to zero */
1655
static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1656
{
1657
    tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1658
    tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1659
}
1660

    
1661
static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1662
{
1663
    TCGv_i32 t0, t1;
1664
    t0 = tcg_temp_new_i32();
1665
    t1 = tcg_temp_new_i32();
1666

    
1667
    tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1668
    tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1669
    tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1670
    tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1671
    tcg_temp_free_i32(t0);
1672
    tcg_temp_free_i32(t1);
1673
}
1674
#else
1675

    
1676
static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1677
{
1678
    if (TCG_TARGET_HAS_ext8s_i64) {
1679
        tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1680
    } else {
1681
        tcg_gen_shli_i64(ret, arg, 56);
1682
        tcg_gen_sari_i64(ret, ret, 56);
1683
    }
1684
}
1685

    
1686
static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1687
{
1688
    if (TCG_TARGET_HAS_ext16s_i64) {
1689
        tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1690
    } else {
1691
        tcg_gen_shli_i64(ret, arg, 48);
1692
        tcg_gen_sari_i64(ret, ret, 48);
1693
    }
1694
}
1695

    
1696
static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1697
{
1698
    if (TCG_TARGET_HAS_ext32s_i64) {
1699
        tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1700
    } else {
1701
        tcg_gen_shli_i64(ret, arg, 32);
1702
        tcg_gen_sari_i64(ret, ret, 32);
1703
    }
1704
}
1705

    
1706
static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1707
{
1708
    if (TCG_TARGET_HAS_ext8u_i64) {
1709
        tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1710
    } else {
1711
        tcg_gen_andi_i64(ret, arg, 0xffu);
1712
    }
1713
}
1714

    
1715
static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1716
{
1717
    if (TCG_TARGET_HAS_ext16u_i64) {
1718
        tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1719
    } else {
1720
        tcg_gen_andi_i64(ret, arg, 0xffffu);
1721
    }
1722
}
1723

    
1724
static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1725
{
1726
    if (TCG_TARGET_HAS_ext32u_i64) {
1727
        tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1728
    } else {
1729
        tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1730
    }
1731
}
1732

    
1733
/* Note: we assume the target supports move between 32 and 64 bit
1734
   registers.  This will probably break MIPS64 targets.  */
1735
static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1736
{
1737
    tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1738
}
1739

    
1740
/* Note: we assume the target supports move between 32 and 64 bit
1741
   registers */
1742
static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1743
{
1744
    tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1745
}
1746

    
1747
/* Note: we assume the target supports move between 32 and 64 bit
1748
   registers */
1749
static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1750
{
1751
    tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1752
}
1753

    
1754
/* Note: we assume the six high bytes are set to zero */
1755
static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1756
{
1757
    if (TCG_TARGET_HAS_bswap16_i64) {
1758
        tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1759
    } else {
1760
        TCGv_i64 t0 = tcg_temp_new_i64();
1761

    
1762
        tcg_gen_ext8u_i64(t0, arg);
1763
        tcg_gen_shli_i64(t0, t0, 8);
1764
        tcg_gen_shri_i64(ret, arg, 8);
1765
        tcg_gen_or_i64(ret, ret, t0);
1766
        tcg_temp_free_i64(t0);
1767
    }
1768
}
1769

    
1770
/* Note: we assume the four high bytes are set to zero */
1771
static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1772
{
1773
    if (TCG_TARGET_HAS_bswap32_i64) {
1774
        tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1775
    } else {
1776
        TCGv_i64 t0, t1;
1777
        t0 = tcg_temp_new_i64();
1778
        t1 = tcg_temp_new_i64();
1779

    
1780
        tcg_gen_shli_i64(t0, arg, 24);
1781
        tcg_gen_ext32u_i64(t0, t0);
1782

    
1783
        tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1784
        tcg_gen_shli_i64(t1, t1, 8);
1785
        tcg_gen_or_i64(t0, t0, t1);
1786

    
1787
        tcg_gen_shri_i64(t1, arg, 8);
1788
        tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1789
        tcg_gen_or_i64(t0, t0, t1);
1790

    
1791
        tcg_gen_shri_i64(t1, arg, 24);
1792
        tcg_gen_or_i64(ret, t0, t1);
1793
        tcg_temp_free_i64(t0);
1794
        tcg_temp_free_i64(t1);
1795
    }
1796
}
1797

    
1798
static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1799
{
1800
    if (TCG_TARGET_HAS_bswap64_i64) {
1801
        tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1802
    } else {
1803
        TCGv_i64 t0 = tcg_temp_new_i64();
1804
        TCGv_i64 t1 = tcg_temp_new_i64();
1805
    
1806
        tcg_gen_shli_i64(t0, arg, 56);
1807
    
1808
        tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1809
        tcg_gen_shli_i64(t1, t1, 40);
1810
        tcg_gen_or_i64(t0, t0, t1);
1811
    
1812
        tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1813
        tcg_gen_shli_i64(t1, t1, 24);
1814
        tcg_gen_or_i64(t0, t0, t1);
1815

    
1816
        tcg_gen_andi_i64(t1, arg, 0xff000000);
1817
        tcg_gen_shli_i64(t1, t1, 8);
1818
        tcg_gen_or_i64(t0, t0, t1);
1819

    
1820
        tcg_gen_shri_i64(t1, arg, 8);
1821
        tcg_gen_andi_i64(t1, t1, 0xff000000);
1822
        tcg_gen_or_i64(t0, t0, t1);
1823
    
1824
        tcg_gen_shri_i64(t1, arg, 24);
1825
        tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1826
        tcg_gen_or_i64(t0, t0, t1);
1827

    
1828
        tcg_gen_shri_i64(t1, arg, 40);
1829
        tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1830
        tcg_gen_or_i64(t0, t0, t1);
1831

    
1832
        tcg_gen_shri_i64(t1, arg, 56);
1833
        tcg_gen_or_i64(ret, t0, t1);
1834
        tcg_temp_free_i64(t0);
1835
        tcg_temp_free_i64(t1);
1836
    }
1837
}
1838

    
1839
#endif
1840

    
1841
static inline void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
1842
{
1843
    if (TCG_TARGET_HAS_neg_i32) {
1844
        tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
1845
    } else {
1846
        TCGv_i32 t0 = tcg_const_i32(0);
1847
        tcg_gen_sub_i32(ret, t0, arg);
1848
        tcg_temp_free_i32(t0);
1849
    }
1850
}
1851

    
1852
static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1853
{
1854
    if (TCG_TARGET_HAS_neg_i64) {
1855
        tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
1856
    } else {
1857
        TCGv_i64 t0 = tcg_const_i64(0);
1858
        tcg_gen_sub_i64(ret, t0, arg);
1859
        tcg_temp_free_i64(t0);
1860
    }
1861
}
1862

    
1863
static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
1864
{
1865
    if (TCG_TARGET_HAS_not_i32) {
1866
        tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
1867
    } else {
1868
        tcg_gen_xori_i32(ret, arg, -1);
1869
    }
1870
}
1871

    
1872
static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1873
{
1874
#if TCG_TARGET_REG_BITS == 64
1875
    if (TCG_TARGET_HAS_not_i64) {
1876
        tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1877
    } else {
1878
        tcg_gen_xori_i64(ret, arg, -1);
1879
    }
1880
#else
1881
    tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1882
    tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1883
#endif
1884
}
1885

    
1886
static inline void tcg_gen_discard_i32(TCGv_i32 arg)
1887
{
1888
    tcg_gen_op1_i32(INDEX_op_discard, arg);
1889
}
1890

    
1891
static inline void tcg_gen_discard_i64(TCGv_i64 arg)
1892
{
1893
#if TCG_TARGET_REG_BITS == 32
1894
    tcg_gen_discard_i32(TCGV_LOW(arg));
1895
    tcg_gen_discard_i32(TCGV_HIGH(arg));
1896
#else
1897
    tcg_gen_op1_i64(INDEX_op_discard, arg);
1898
#endif
1899
}
1900

    
1901
static inline void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1902
{
1903
    if (TCG_TARGET_HAS_andc_i32) {
1904
        tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
1905
    } else {
1906
        TCGv_i32 t0 = tcg_temp_new_i32();
1907
        tcg_gen_not_i32(t0, arg2);
1908
        tcg_gen_and_i32(ret, arg1, t0);
1909
        tcg_temp_free_i32(t0);
1910
    }
1911
}
1912

    
1913
static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1914
{
1915
#if TCG_TARGET_REG_BITS == 64
1916
    if (TCG_TARGET_HAS_andc_i64) {
1917
        tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1918
    } else {
1919
        TCGv_i64 t0 = tcg_temp_new_i64();
1920
        tcg_gen_not_i64(t0, arg2);
1921
        tcg_gen_and_i64(ret, arg1, t0);
1922
        tcg_temp_free_i64(t0);
1923
    }
1924
#else
1925
    tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1926
    tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1927
#endif
1928
}
1929

    
1930
static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1931
{
1932
    if (TCG_TARGET_HAS_eqv_i32) {
1933
        tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
1934
    } else {
1935
        tcg_gen_xor_i32(ret, arg1, arg2);
1936
        tcg_gen_not_i32(ret, ret);
1937
    }
1938
}
1939

    
1940
static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1941
{
1942
#if TCG_TARGET_REG_BITS == 64
1943
    if (TCG_TARGET_HAS_eqv_i64) {
1944
        tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1945
    } else {
1946
        tcg_gen_xor_i64(ret, arg1, arg2);
1947
        tcg_gen_not_i64(ret, ret);
1948
    }
1949
#else
1950
    tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1951
    tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1952
#endif
1953
}
1954

    
1955
static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1956
{
1957
    if (TCG_TARGET_HAS_nand_i32) {
1958
        tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
1959
    } else {
1960
        tcg_gen_and_i32(ret, arg1, arg2);
1961
        tcg_gen_not_i32(ret, ret);
1962
    }
1963
}
1964

    
1965
static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1966
{
1967
#if TCG_TARGET_REG_BITS == 64
1968
    if (TCG_TARGET_HAS_nand_i64) {
1969
        tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1970
    } else {
1971
        tcg_gen_and_i64(ret, arg1, arg2);
1972
        tcg_gen_not_i64(ret, ret);
1973
    }
1974
#else
1975
    tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1976
    tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1977
#endif
1978
}
1979

    
1980
static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1981
{
1982
    if (TCG_TARGET_HAS_nor_i32) {
1983
        tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
1984
    } else {
1985
        tcg_gen_or_i32(ret, arg1, arg2);
1986
        tcg_gen_not_i32(ret, ret);
1987
    }
1988
}
1989

    
1990
static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1991
{
1992
#if TCG_TARGET_REG_BITS == 64
1993
    if (TCG_TARGET_HAS_nor_i64) {
1994
        tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1995
    } else {
1996
        tcg_gen_or_i64(ret, arg1, arg2);
1997
        tcg_gen_not_i64(ret, ret);
1998
    }
1999
#else
2000
    tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2001
    tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2002
#endif
2003
}
2004

    
2005
static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
2006
{
2007
    if (TCG_TARGET_HAS_orc_i32) {
2008
        tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
2009
    } else {
2010
        TCGv_i32 t0 = tcg_temp_new_i32();
2011
        tcg_gen_not_i32(t0, arg2);
2012
        tcg_gen_or_i32(ret, arg1, t0);
2013
        tcg_temp_free_i32(t0);
2014
    }
2015
}
2016

    
2017
static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2018
{
2019
#if TCG_TARGET_REG_BITS == 64
2020
    if (TCG_TARGET_HAS_orc_i64) {
2021
        tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2022
    } else {
2023
        TCGv_i64 t0 = tcg_temp_new_i64();
2024
        tcg_gen_not_i64(t0, arg2);
2025
        tcg_gen_or_i64(ret, arg1, t0);
2026
        tcg_temp_free_i64(t0);
2027
    }
2028
#else
2029
    tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2030
    tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2031
#endif
2032
}
2033

    
2034
static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
2035
{
2036
    if (TCG_TARGET_HAS_rot_i32) {
2037
        tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
2038
    } else {
2039
        TCGv_i32 t0, t1;
2040

    
2041
        t0 = tcg_temp_new_i32();
2042
        t1 = tcg_temp_new_i32();
2043
        tcg_gen_shl_i32(t0, arg1, arg2);
2044
        tcg_gen_subfi_i32(t1, 32, arg2);
2045
        tcg_gen_shr_i32(t1, arg1, t1);
2046
        tcg_gen_or_i32(ret, t0, t1);
2047
        tcg_temp_free_i32(t0);
2048
        tcg_temp_free_i32(t1);
2049
    }
2050
}
2051

    
2052
static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2053
{
2054
    if (TCG_TARGET_HAS_rot_i64) {
2055
        tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2056
    } else {
2057
        TCGv_i64 t0, t1;
2058
        t0 = tcg_temp_new_i64();
2059
        t1 = tcg_temp_new_i64();
2060
        tcg_gen_shl_i64(t0, arg1, arg2);
2061
        tcg_gen_subfi_i64(t1, 64, arg2);
2062
        tcg_gen_shr_i64(t1, arg1, t1);
2063
        tcg_gen_or_i64(ret, t0, t1);
2064
        tcg_temp_free_i64(t0);
2065
        tcg_temp_free_i64(t1);
2066
    }
2067
}
2068

    
2069
static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
2070
{
2071
    /* some cases can be optimized here */
2072
    if (arg2 == 0) {
2073
        tcg_gen_mov_i32(ret, arg1);
2074
    } else if (TCG_TARGET_HAS_rot_i32) {
2075
        TCGv_i32 t0 = tcg_const_i32(arg2);
2076
        tcg_gen_rotl_i32(ret, arg1, t0);
2077
        tcg_temp_free_i32(t0);
2078
    } else {
2079
        TCGv_i32 t0, t1;
2080
        t0 = tcg_temp_new_i32();
2081
        t1 = tcg_temp_new_i32();
2082
        tcg_gen_shli_i32(t0, arg1, arg2);
2083
        tcg_gen_shri_i32(t1, arg1, 32 - arg2);
2084
        tcg_gen_or_i32(ret, t0, t1);
2085
        tcg_temp_free_i32(t0);
2086
        tcg_temp_free_i32(t1);
2087
    }
2088
}
2089

    
2090
static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2091
{
2092
    /* some cases can be optimized here */
2093
    if (arg2 == 0) {
2094
        tcg_gen_mov_i64(ret, arg1);
2095
    } else if (TCG_TARGET_HAS_rot_i64) {
2096
        TCGv_i64 t0 = tcg_const_i64(arg2);
2097
        tcg_gen_rotl_i64(ret, arg1, t0);
2098
        tcg_temp_free_i64(t0);
2099
    } else {
2100
        TCGv_i64 t0, t1;
2101
        t0 = tcg_temp_new_i64();
2102
        t1 = tcg_temp_new_i64();
2103
        tcg_gen_shli_i64(t0, arg1, arg2);
2104
        tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2105
        tcg_gen_or_i64(ret, t0, t1);
2106
        tcg_temp_free_i64(t0);
2107
        tcg_temp_free_i64(t1);
2108
    }
2109
}
2110

    
2111
static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
2112
{
2113
    if (TCG_TARGET_HAS_rot_i32) {
2114
        tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
2115
    } else {
2116
        TCGv_i32 t0, t1;
2117

    
2118
        t0 = tcg_temp_new_i32();
2119
        t1 = tcg_temp_new_i32();
2120
        tcg_gen_shr_i32(t0, arg1, arg2);
2121
        tcg_gen_subfi_i32(t1, 32, arg2);
2122
        tcg_gen_shl_i32(t1, arg1, t1);
2123
        tcg_gen_or_i32(ret, t0, t1);
2124
        tcg_temp_free_i32(t0);
2125
        tcg_temp_free_i32(t1);
2126
    }
2127
}
2128

    
2129
static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2130
{
2131
    if (TCG_TARGET_HAS_rot_i64) {
2132
        tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2133
    } else {
2134
        TCGv_i64 t0, t1;
2135
        t0 = tcg_temp_new_i64();
2136
        t1 = tcg_temp_new_i64();
2137
        tcg_gen_shr_i64(t0, arg1, arg2);
2138
        tcg_gen_subfi_i64(t1, 64, arg2);
2139
        tcg_gen_shl_i64(t1, arg1, t1);
2140
        tcg_gen_or_i64(ret, t0, t1);
2141
        tcg_temp_free_i64(t0);
2142
        tcg_temp_free_i64(t1);
2143
    }
2144
}
2145

    
2146
static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
2147
{
2148
    /* some cases can be optimized here */
2149
    if (arg2 == 0) {
2150
        tcg_gen_mov_i32(ret, arg1);
2151
    } else {
2152
        tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
2153
    }
2154
}
2155

    
2156
static inline void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2157
{
2158
    /* some cases can be optimized here */
2159
    if (arg2 == 0) {
2160
        tcg_gen_mov_i64(ret, arg1);
2161
    } else {
2162
        tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2163
    }
2164
}
2165

    
2166
static inline void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1,
2167
                                       TCGv_i32 arg2, unsigned int ofs,
2168
                                       unsigned int len)
2169
{
2170
    uint32_t mask;
2171
    TCGv_i32 t1;
2172

    
2173
    tcg_debug_assert(ofs < 32);
2174
    tcg_debug_assert(len <= 32);
2175
    tcg_debug_assert(ofs + len <= 32);
2176

    
2177
    if (ofs == 0 && len == 32) {
2178
        tcg_gen_mov_i32(ret, arg2);
2179
        return;
2180
    }
2181
    if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
2182
        tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
2183
        return;
2184
    }
2185

    
2186
    mask = (1u << len) - 1;
2187
    t1 = tcg_temp_new_i32();
2188

    
2189
    if (ofs + len < 32) {
2190
        tcg_gen_andi_i32(t1, arg2, mask);
2191
        tcg_gen_shli_i32(t1, t1, ofs);
2192
    } else {
2193
        tcg_gen_shli_i32(t1, arg2, ofs);
2194
    }
2195
    tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
2196
    tcg_gen_or_i32(ret, ret, t1);
2197

    
2198
    tcg_temp_free_i32(t1);
2199
}
2200

    
2201
static inline void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1,
2202
                                       TCGv_i64 arg2, unsigned int ofs,
2203
                                       unsigned int len)
2204
{
2205
    uint64_t mask;
2206
    TCGv_i64 t1;
2207

    
2208
    tcg_debug_assert(ofs < 64);
2209
    tcg_debug_assert(len <= 64);
2210
    tcg_debug_assert(ofs + len <= 64);
2211

    
2212
    if (ofs == 0 && len == 64) {
2213
        tcg_gen_mov_i64(ret, arg2);
2214
        return;
2215
    }
2216
    if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2217
        tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2218
        return;
2219
    }
2220

    
2221
#if TCG_TARGET_REG_BITS == 32
2222
    if (ofs >= 32) {
2223
        tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2224
                            TCGV_LOW(arg2), ofs - 32, len);
2225
        tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2226
        return;
2227
    }
2228
    if (ofs + len <= 32) {
2229
        tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2230
                            TCGV_LOW(arg2), ofs, len);
2231
        tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2232
        return;
2233
    }
2234
#endif
2235

    
2236
    mask = (1ull << len) - 1;
2237
    t1 = tcg_temp_new_i64();
2238

    
2239
    if (ofs + len < 64) {
2240
        tcg_gen_andi_i64(t1, arg2, mask);
2241
        tcg_gen_shli_i64(t1, t1, ofs);
2242
    } else {
2243
        tcg_gen_shli_i64(t1, arg2, ofs);
2244
    }
2245
    tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2246
    tcg_gen_or_i64(ret, ret, t1);
2247

    
2248
    tcg_temp_free_i64(t1);
2249
}
2250

    
2251
static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low,
2252
                                          TCGv_i32 high)
2253
{
2254
#if TCG_TARGET_REG_BITS == 32
2255
    tcg_gen_mov_i32(TCGV_LOW(dest), low);
2256
    tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2257
#else
2258
    TCGv_i64 tmp = tcg_temp_new_i64();
2259
    /* These extensions are only needed for type correctness.
2260
       We may be able to do better given target specific information.  */
2261
    tcg_gen_extu_i32_i64(tmp, high);
2262
    tcg_gen_extu_i32_i64(dest, low);
2263
    /* If deposit is available, use it.  Otherwise use the extra
2264
       knowledge that we have of the zero-extensions above.  */
2265
    if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2266
        tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2267
    } else {
2268
        tcg_gen_shli_i64(tmp, tmp, 32);
2269
        tcg_gen_or_i64(dest, dest, tmp);
2270
    }
2271
    tcg_temp_free_i64(tmp);
2272
#endif
2273
}
2274

    
2275
static inline void tcg_gen_concat32_i64(TCGv_i64 dest, TCGv_i64 low,
2276
                                        TCGv_i64 high)
2277
{
2278
    tcg_gen_deposit_i64(dest, low, high, 32, 32);
2279
}
2280

    
2281
static inline void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2282
{
2283
#if TCG_TARGET_REG_BITS == 32
2284
    tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2285
    tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2286
#else
2287
    TCGv_i64 t0 = tcg_temp_new_i64();
2288
    tcg_gen_trunc_i64_i32(lo, arg);
2289
    tcg_gen_shri_i64(t0, arg, 32);
2290
    tcg_gen_trunc_i64_i32(hi, t0);
2291
    tcg_temp_free_i64(t0);
2292
#endif
2293
}
2294

    
2295
static inline void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2296
{
2297
    tcg_gen_ext32u_i64(lo, arg);
2298
    tcg_gen_shri_i64(hi, arg, 32);
2299
}
2300

    
2301
static inline void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret,
2302
                                       TCGv_i32 c1, TCGv_i32 c2,
2303
                                       TCGv_i32 v1, TCGv_i32 v2)
2304
{
2305
    if (TCG_TARGET_HAS_movcond_i32) {
2306
        tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
2307
    } else {
2308
        TCGv_i32 t0 = tcg_temp_new_i32();
2309
        TCGv_i32 t1 = tcg_temp_new_i32();
2310
        tcg_gen_setcond_i32(cond, t0, c1, c2);
2311
        tcg_gen_neg_i32(t0, t0);
2312
        tcg_gen_and_i32(t1, v1, t0);
2313
        tcg_gen_andc_i32(ret, v2, t0);
2314
        tcg_gen_or_i32(ret, ret, t1);
2315
        tcg_temp_free_i32(t0);
2316
        tcg_temp_free_i32(t1);
2317
    }
2318
}
2319

    
2320
static inline void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret,
2321
                                       TCGv_i64 c1, TCGv_i64 c2,
2322
                                       TCGv_i64 v1, TCGv_i64 v2)
2323
{
2324
#if TCG_TARGET_REG_BITS == 32
2325
    TCGv_i32 t0 = tcg_temp_new_i32();
2326
    TCGv_i32 t1 = tcg_temp_new_i32();
2327
    tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2328
                     TCGV_LOW(c1), TCGV_HIGH(c1),
2329
                     TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2330

    
2331
    if (TCG_TARGET_HAS_movcond_i32) {
2332
        tcg_gen_movi_i32(t1, 0);
2333
        tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2334
                            TCGV_LOW(v1), TCGV_LOW(v2));
2335
        tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2336
                            TCGV_HIGH(v1), TCGV_HIGH(v2));
2337
    } else {
2338
        tcg_gen_neg_i32(t0, t0);
2339

    
2340
        tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2341
        tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2342
        tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2343

    
2344
        tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2345
        tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2346
        tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2347
    }
2348
    tcg_temp_free_i32(t0);
2349
    tcg_temp_free_i32(t1);
2350
#else
2351
    if (TCG_TARGET_HAS_movcond_i64) {
2352
        tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2353
    } else {
2354
        TCGv_i64 t0 = tcg_temp_new_i64();
2355
        TCGv_i64 t1 = tcg_temp_new_i64();
2356
        tcg_gen_setcond_i64(cond, t0, c1, c2);
2357
        tcg_gen_neg_i64(t0, t0);
2358
        tcg_gen_and_i64(t1, v1, t0);
2359
        tcg_gen_andc_i64(ret, v2, t0);
2360
        tcg_gen_or_i64(ret, ret, t1);
2361
        tcg_temp_free_i64(t0);
2362
        tcg_temp_free_i64(t1);
2363
    }
2364
#endif
2365
}
2366

    
2367
static inline void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
2368
                                    TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
2369
{
2370
    if (TCG_TARGET_HAS_add2_i32) {
2371
        tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
2372
        /* Allow the optimizer room to replace add2 with two moves.  */
2373
        tcg_gen_op0(INDEX_op_nop);
2374
    } else {
2375
        TCGv_i64 t0 = tcg_temp_new_i64();
2376
        TCGv_i64 t1 = tcg_temp_new_i64();
2377
        tcg_gen_concat_i32_i64(t0, al, ah);
2378
        tcg_gen_concat_i32_i64(t1, bl, bh);
2379
        tcg_gen_add_i64(t0, t0, t1);
2380
        tcg_gen_extr_i64_i32(rl, rh, t0);
2381
        tcg_temp_free_i64(t0);
2382
        tcg_temp_free_i64(t1);
2383
    }
2384
}
2385

    
2386
static inline void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
2387
                                    TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
2388
{
2389
    if (TCG_TARGET_HAS_sub2_i32) {
2390
        tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
2391
        /* Allow the optimizer room to replace sub2 with two moves.  */
2392
        tcg_gen_op0(INDEX_op_nop);
2393
    } else {
2394
        TCGv_i64 t0 = tcg_temp_new_i64();
2395
        TCGv_i64 t1 = tcg_temp_new_i64();
2396
        tcg_gen_concat_i32_i64(t0, al, ah);
2397
        tcg_gen_concat_i32_i64(t1, bl, bh);
2398
        tcg_gen_sub_i64(t0, t0, t1);
2399
        tcg_gen_extr_i64_i32(rl, rh, t0);
2400
        tcg_temp_free_i64(t0);
2401
        tcg_temp_free_i64(t1);
2402
    }
2403
}
2404

    
2405
static inline void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh,
2406
                                     TCGv_i32 arg1, TCGv_i32 arg2)
2407
{
2408
    if (TCG_TARGET_HAS_mulu2_i32) {
2409
        tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
2410
        /* Allow the optimizer room to replace mulu2 with two moves.  */
2411
        tcg_gen_op0(INDEX_op_nop);
2412
    } else if (TCG_TARGET_HAS_muluh_i32) {
2413
        TCGv_i32 t = tcg_temp_new_i32();
2414
        tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
2415
        tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
2416
        tcg_gen_mov_i32(rl, t);
2417
        tcg_temp_free_i32(t);
2418
    } else {
2419
        TCGv_i64 t0 = tcg_temp_new_i64();
2420
        TCGv_i64 t1 = tcg_temp_new_i64();
2421
        tcg_gen_extu_i32_i64(t0, arg1);
2422
        tcg_gen_extu_i32_i64(t1, arg2);
2423
        tcg_gen_mul_i64(t0, t0, t1);
2424
        tcg_gen_extr_i64_i32(rl, rh, t0);
2425
        tcg_temp_free_i64(t0);
2426
        tcg_temp_free_i64(t1);
2427
    }
2428
}
2429

    
2430
static inline void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh,
2431
                                     TCGv_i32 arg1, TCGv_i32 arg2)
2432
{
2433
    if (TCG_TARGET_HAS_muls2_i32) {
2434
        tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
2435
        /* Allow the optimizer room to replace muls2 with two moves.  */
2436
        tcg_gen_op0(INDEX_op_nop);
2437
    } else if (TCG_TARGET_HAS_mulsh_i32) {
2438
        TCGv_i32 t = tcg_temp_new_i32();
2439
        tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
2440
        tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
2441
        tcg_gen_mov_i32(rl, t);
2442
        tcg_temp_free_i32(t);
2443
    } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_mulu2_i32) {
2444
        TCGv_i32 t0 = tcg_temp_new_i32();
2445
        TCGv_i32 t1 = tcg_temp_new_i32();
2446
        TCGv_i32 t2 = tcg_temp_new_i32();
2447
        TCGv_i32 t3 = tcg_temp_new_i32();
2448
        tcg_gen_op4_i32(INDEX_op_mulu2_i32, t0, t1, arg1, arg2);
2449
        /* Allow the optimizer room to replace mulu2 with two moves.  */
2450
        tcg_gen_op0(INDEX_op_nop);
2451
        /* Adjust for negative inputs.  */
2452
        tcg_gen_sari_i32(t2, arg1, 31);
2453
        tcg_gen_sari_i32(t3, arg2, 31);
2454
        tcg_gen_and_i32(t2, t2, arg2);
2455
        tcg_gen_and_i32(t3, t3, arg1);
2456
        tcg_gen_sub_i32(rh, t1, t2);
2457
        tcg_gen_sub_i32(rh, rh, t3);
2458
        tcg_gen_mov_i32(rl, t0);
2459
        tcg_temp_free_i32(t0);
2460
        tcg_temp_free_i32(t1);
2461
        tcg_temp_free_i32(t2);
2462
        tcg_temp_free_i32(t3);
2463
    } else {
2464
        TCGv_i64 t0 = tcg_temp_new_i64();
2465
        TCGv_i64 t1 = tcg_temp_new_i64();
2466
        tcg_gen_ext_i32_i64(t0, arg1);
2467
        tcg_gen_ext_i32_i64(t1, arg2);
2468
        tcg_gen_mul_i64(t0, t0, t1);
2469
        tcg_gen_extr_i64_i32(rl, rh, t0);
2470
        tcg_temp_free_i64(t0);
2471
        tcg_temp_free_i64(t1);
2472
    }
2473
}
2474

    
2475
static inline void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2476
                                    TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2477
{
2478
    if (TCG_TARGET_HAS_add2_i64) {
2479
        tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2480
        /* Allow the optimizer room to replace add2 with two moves.  */
2481
        tcg_gen_op0(INDEX_op_nop);
2482
    } else {
2483
        TCGv_i64 t0 = tcg_temp_new_i64();
2484
        TCGv_i64 t1 = tcg_temp_new_i64();
2485
        tcg_gen_add_i64(t0, al, bl);
2486
        tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2487
        tcg_gen_add_i64(rh, ah, bh);
2488
        tcg_gen_add_i64(rh, rh, t1);
2489
        tcg_gen_mov_i64(rl, t0);
2490
        tcg_temp_free_i64(t0);
2491
        tcg_temp_free_i64(t1);
2492
    }
2493
}
2494

    
2495
static inline void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2496
                                    TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2497
{
2498
    if (TCG_TARGET_HAS_sub2_i64) {
2499
        tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2500
        /* Allow the optimizer room to replace sub2 with two moves.  */
2501
        tcg_gen_op0(INDEX_op_nop);
2502
    } else {
2503
        TCGv_i64 t0 = tcg_temp_new_i64();
2504
        TCGv_i64 t1 = tcg_temp_new_i64();
2505
        tcg_gen_sub_i64(t0, al, bl);
2506
        tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2507
        tcg_gen_sub_i64(rh, ah, bh);
2508
        tcg_gen_sub_i64(rh, rh, t1);
2509
        tcg_gen_mov_i64(rl, t0);
2510
        tcg_temp_free_i64(t0);
2511
        tcg_temp_free_i64(t1);
2512
    }
2513
}
2514

    
2515
static inline void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh,
2516
                                     TCGv_i64 arg1, TCGv_i64 arg2)
2517
{
2518
    if (TCG_TARGET_HAS_mulu2_i64) {
2519
        tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2520
        /* Allow the optimizer room to replace mulu2 with two moves.  */
2521
        tcg_gen_op0(INDEX_op_nop);
2522
    } else if (TCG_TARGET_HAS_muluh_i64) {
2523
        TCGv_i64 t = tcg_temp_new_i64();
2524
        tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2525
        tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2526
        tcg_gen_mov_i64(rl, t);
2527
        tcg_temp_free_i64(t);
2528
    } else if (TCG_TARGET_HAS_mulu2_i64) {
2529
        TCGv_i64 t0 = tcg_temp_new_i64();
2530
        TCGv_i64 t1 = tcg_temp_new_i64();
2531
        TCGv_i64 t2 = tcg_temp_new_i64();
2532
        TCGv_i64 t3 = tcg_temp_new_i64();
2533
        tcg_gen_op4_i64(INDEX_op_mulu2_i64, t0, t1, arg1, arg2);
2534
        /* Allow the optimizer room to replace mulu2 with two moves.  */
2535
        tcg_gen_op0(INDEX_op_nop);
2536
        /* Adjust for negative inputs.  */
2537
        tcg_gen_sari_i64(t2, arg1, 63);
2538
        tcg_gen_sari_i64(t3, arg2, 63);
2539
        tcg_gen_and_i64(t2, t2, arg2);
2540
        tcg_gen_and_i64(t3, t3, arg1);
2541
        tcg_gen_sub_i64(rh, t1, t2);
2542
        tcg_gen_sub_i64(rh, rh, t3);
2543
        tcg_gen_mov_i64(rl, t0);
2544
        tcg_temp_free_i64(t0);
2545
        tcg_temp_free_i64(t1);
2546
        tcg_temp_free_i64(t2);
2547
        tcg_temp_free_i64(t3);
2548
    } else {
2549
        TCGv_i64 t0 = tcg_temp_new_i64();
2550
        int sizemask = 0;
2551
        /* Return value and both arguments are 64-bit and unsigned.  */
2552
        sizemask |= tcg_gen_sizemask(0, 1, 0);
2553
        sizemask |= tcg_gen_sizemask(1, 1, 0);
2554
        sizemask |= tcg_gen_sizemask(2, 1, 0);
2555
        tcg_gen_mul_i64(t0, arg1, arg2);
2556
        tcg_gen_helper64(tcg_helper_muluh_i64, sizemask, rh, arg1, arg2);
2557
        tcg_gen_mov_i64(rl, t0);
2558
        tcg_temp_free_i64(t0);
2559
    }
2560
}
2561

    
2562
static inline void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh,
2563
                                     TCGv_i64 arg1, TCGv_i64 arg2)
2564
{
2565
    if (TCG_TARGET_HAS_muls2_i64) {
2566
        tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2567
        /* Allow the optimizer room to replace muls2 with two moves.  */
2568
        tcg_gen_op0(INDEX_op_nop);
2569
    } else if (TCG_TARGET_HAS_mulsh_i64) {
2570
        TCGv_i64 t = tcg_temp_new_i64();
2571
        tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2572
        tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2573
        tcg_gen_mov_i64(rl, t);
2574
        tcg_temp_free_i64(t);
2575
    } else {
2576
        TCGv_i64 t0 = tcg_temp_new_i64();
2577
        int sizemask = 0;
2578
        /* Return value and both arguments are 64-bit and signed.  */
2579
        sizemask |= tcg_gen_sizemask(0, 1, 1);
2580
        sizemask |= tcg_gen_sizemask(1, 1, 1);
2581
        sizemask |= tcg_gen_sizemask(2, 1, 1);
2582
        tcg_gen_mul_i64(t0, arg1, arg2);
2583
        tcg_gen_helper64(tcg_helper_mulsh_i64, sizemask, rh, arg1, arg2);
2584
        tcg_gen_mov_i64(rl, t0);
2585
        tcg_temp_free_i64(t0);
2586
    }
2587
}
2588

    
2589
/***************************************/
2590
/* QEMU specific operations. Their type depend on the QEMU CPU
2591
   type. */
2592
#ifndef TARGET_LONG_BITS
2593
#error must include QEMU headers
2594
#endif
2595

    
2596
#if TARGET_LONG_BITS == 32
2597
#define TCGv TCGv_i32
2598
#define tcg_temp_new() tcg_temp_new_i32()
2599
#define tcg_global_reg_new tcg_global_reg_new_i32
2600
#define tcg_global_mem_new tcg_global_mem_new_i32
2601
#define tcg_temp_local_new() tcg_temp_local_new_i32()
2602
#define tcg_temp_free tcg_temp_free_i32
2603
#define tcg_gen_qemu_ldst_op tcg_gen_op3i_i32
2604
#define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i32
2605
#define TCGV_UNUSED(x) TCGV_UNUSED_I32(x)
2606
#define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I32(x)
2607
#define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b)
2608
#else
2609
#define TCGv TCGv_i64
2610
#define tcg_temp_new() tcg_temp_new_i64()
2611
#define tcg_global_reg_new tcg_global_reg_new_i64
2612
#define tcg_global_mem_new tcg_global_mem_new_i64
2613
#define tcg_temp_local_new() tcg_temp_local_new_i64()
2614
#define tcg_temp_free tcg_temp_free_i64
2615
#define tcg_gen_qemu_ldst_op tcg_gen_op3i_i64
2616
#define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i64
2617
#define TCGV_UNUSED(x) TCGV_UNUSED_I64(x)
2618
#define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I64(x)
2619
#define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b)
2620
#endif
2621

    
2622
/* debug info: write the PC of the corresponding QEMU CPU instruction */
2623
static inline void tcg_gen_debug_insn_start(uint64_t pc)
2624
{
2625
    /* XXX: must really use a 32 bit size for TCGArg in all cases */
2626
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
2627
    tcg_gen_op2ii(INDEX_op_debug_insn_start, 
2628
                  (uint32_t)(pc), (uint32_t)(pc >> 32));
2629
#else
2630
    tcg_gen_op1i(INDEX_op_debug_insn_start, pc);
2631
#endif
2632
}
2633

    
2634
static inline void tcg_gen_exit_tb(uintptr_t val)
2635
{
2636
    tcg_gen_op1i(INDEX_op_exit_tb, val);
2637
}
2638

    
2639
static inline void tcg_gen_goto_tb(unsigned idx)
2640
{
2641
    /* We only support two chained exits.  */
2642
    tcg_debug_assert(idx <= 1);
2643
#ifdef CONFIG_DEBUG_TCG
2644
    /* Verify that we havn't seen this numbered exit before.  */
2645
    tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
2646
    tcg_ctx.goto_tb_issue_mask |= 1 << idx;
2647
#endif
2648
    tcg_gen_op1i(INDEX_op_goto_tb, idx);
2649
}
2650

    
2651
#if TCG_TARGET_REG_BITS == 32
2652
static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
2653
{
2654
#if TARGET_LONG_BITS == 32
2655
    tcg_gen_op3i_i32(INDEX_op_qemu_ld8u, ret, addr, mem_index);
2656
#else
2657
    tcg_gen_op4i_i32(INDEX_op_qemu_ld8u, TCGV_LOW(ret), TCGV_LOW(addr),
2658
                     TCGV_HIGH(addr), mem_index);
2659
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2660
#endif
2661
}
2662

    
2663
static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
2664
{
2665
#if TARGET_LONG_BITS == 32
2666
    tcg_gen_op3i_i32(INDEX_op_qemu_ld8s, ret, addr, mem_index);
2667
#else
2668
    tcg_gen_op4i_i32(INDEX_op_qemu_ld8s, TCGV_LOW(ret), TCGV_LOW(addr),
2669
                     TCGV_HIGH(addr), mem_index);
2670
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2671
#endif
2672
}
2673

    
2674
static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
2675
{
2676
#if TARGET_LONG_BITS == 32
2677
    tcg_gen_op3i_i32(INDEX_op_qemu_ld16u, ret, addr, mem_index);
2678
#else
2679
    tcg_gen_op4i_i32(INDEX_op_qemu_ld16u, TCGV_LOW(ret), TCGV_LOW(addr),
2680
                     TCGV_HIGH(addr), mem_index);
2681
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2682
#endif
2683
}
2684

    
2685
static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
2686
{
2687
#if TARGET_LONG_BITS == 32
2688
    tcg_gen_op3i_i32(INDEX_op_qemu_ld16s, ret, addr, mem_index);
2689
#else
2690
    tcg_gen_op4i_i32(INDEX_op_qemu_ld16s, TCGV_LOW(ret), TCGV_LOW(addr),
2691
                     TCGV_HIGH(addr), mem_index);
2692
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2693
#endif
2694
}
2695

    
2696
static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
2697
{
2698
#if TARGET_LONG_BITS == 32
2699
    tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index);
2700
#else
2701
    tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr),
2702
                     TCGV_HIGH(addr), mem_index);
2703
    tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2704
#endif
2705
}
2706

    
2707
static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
2708
{
2709
#if TARGET_LONG_BITS == 32
2710
    tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index);
2711
#else
2712
    tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr),
2713
                     TCGV_HIGH(addr), mem_index);
2714
    tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2715
#endif
2716
}
2717

    
2718
static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
2719
{
2720
#if TARGET_LONG_BITS == 32
2721
    tcg_gen_op4i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret), addr, mem_index);
2722
#else
2723
    tcg_gen_op5i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret),
2724
                     TCGV_LOW(addr), TCGV_HIGH(addr), mem_index);
2725
#endif
2726
}
2727

    
2728
static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
2729
{
2730
#if TARGET_LONG_BITS == 32
2731
    tcg_gen_op3i_i32(INDEX_op_qemu_st8, arg, addr, mem_index);
2732
#else
2733
    tcg_gen_op4i_i32(INDEX_op_qemu_st8, TCGV_LOW(arg), TCGV_LOW(addr),
2734
                     TCGV_HIGH(addr), mem_index);
2735
#endif
2736
}
2737

    
2738
static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
2739
{
2740
#if TARGET_LONG_BITS == 32
2741
    tcg_gen_op3i_i32(INDEX_op_qemu_st16, arg, addr, mem_index);
2742
#else
2743
    tcg_gen_op4i_i32(INDEX_op_qemu_st16, TCGV_LOW(arg), TCGV_LOW(addr),
2744
                     TCGV_HIGH(addr), mem_index);
2745
#endif
2746
}
2747

    
2748
static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
2749
{
2750
#if TARGET_LONG_BITS == 32
2751
    tcg_gen_op3i_i32(INDEX_op_qemu_st32, arg, addr, mem_index);
2752
#else
2753
    tcg_gen_op4i_i32(INDEX_op_qemu_st32, TCGV_LOW(arg), TCGV_LOW(addr),
2754
                     TCGV_HIGH(addr), mem_index);
2755
#endif
2756
}
2757

    
2758
static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
2759
{
2760
#if TARGET_LONG_BITS == 32
2761
    tcg_gen_op4i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg), addr,
2762
                     mem_index);
2763
#else
2764
    tcg_gen_op5i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg),
2765
                     TCGV_LOW(addr), TCGV_HIGH(addr), mem_index);
2766
#endif
2767
}
2768

    
2769
#define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O))
2770
#define tcg_gen_discard_ptr(A) tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A))
2771

    
2772
#else /* TCG_TARGET_REG_BITS == 32 */
2773

    
2774
static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
2775
{
2776
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8u, ret, addr, mem_index);
2777
}
2778

    
2779
static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
2780
{
2781
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8s, ret, addr, mem_index);
2782
}
2783

    
2784
static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
2785
{
2786
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16u, ret, addr, mem_index);
2787
}
2788

    
2789
static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
2790
{
2791
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16s, ret, addr, mem_index);
2792
}
2793

    
2794
static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
2795
{
2796
#if TARGET_LONG_BITS == 32
2797
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index);
2798
#else
2799
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32u, ret, addr, mem_index);
2800
#endif
2801
}
2802

    
2803
static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
2804
{
2805
#if TARGET_LONG_BITS == 32
2806
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index);
2807
#else
2808
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32s, ret, addr, mem_index);
2809
#endif
2810
}
2811

    
2812
static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
2813
{
2814
    tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_ld64, ret, addr, mem_index);
2815
}
2816

    
2817
static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
2818
{
2819
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_st8, arg, addr, mem_index);
2820
}
2821

    
2822
static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
2823
{
2824
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_st16, arg, addr, mem_index);
2825
}
2826

    
2827
static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
2828
{
2829
    tcg_gen_qemu_ldst_op(INDEX_op_qemu_st32, arg, addr, mem_index);
2830
}
2831

    
2832
static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
2833
{
2834
    tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_st64, arg, addr, mem_index);
2835
}
2836

    
2837
#define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O))
2838
#define tcg_gen_discard_ptr(A) tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A))
2839

    
2840
#endif /* TCG_TARGET_REG_BITS != 32 */
2841

    
2842
#if TARGET_LONG_BITS == 64
2843
#define tcg_gen_movi_tl tcg_gen_movi_i64
2844
#define tcg_gen_mov_tl tcg_gen_mov_i64
2845
#define tcg_gen_ld8u_tl tcg_gen_ld8u_i64
2846
#define tcg_gen_ld8s_tl tcg_gen_ld8s_i64
2847
#define tcg_gen_ld16u_tl tcg_gen_ld16u_i64
2848
#define tcg_gen_ld16s_tl tcg_gen_ld16s_i64
2849
#define tcg_gen_ld32u_tl tcg_gen_ld32u_i64
2850
#define tcg_gen_ld32s_tl tcg_gen_ld32s_i64
2851
#define tcg_gen_ld_tl tcg_gen_ld_i64
2852
#define tcg_gen_st8_tl tcg_gen_st8_i64
2853
#define tcg_gen_st16_tl tcg_gen_st16_i64
2854
#define tcg_gen_st32_tl tcg_gen_st32_i64
2855
#define tcg_gen_st_tl tcg_gen_st_i64
2856
#define tcg_gen_add_tl tcg_gen_add_i64
2857
#define tcg_gen_addi_tl tcg_gen_addi_i64
2858
#define tcg_gen_sub_tl tcg_gen_sub_i64
2859
#define tcg_gen_neg_tl tcg_gen_neg_i64
2860
#define tcg_gen_subfi_tl tcg_gen_subfi_i64
2861
#define tcg_gen_subi_tl tcg_gen_subi_i64
2862
#define tcg_gen_and_tl tcg_gen_and_i64
2863
#define tcg_gen_andi_tl tcg_gen_andi_i64
2864
#define tcg_gen_or_tl tcg_gen_or_i64
2865
#define tcg_gen_ori_tl tcg_gen_ori_i64
2866
#define tcg_gen_xor_tl tcg_gen_xor_i64
2867
#define tcg_gen_xori_tl tcg_gen_xori_i64
2868
#define tcg_gen_not_tl tcg_gen_not_i64
2869
#define tcg_gen_shl_tl tcg_gen_shl_i64
2870
#define tcg_gen_shli_tl tcg_gen_shli_i64
2871
#define tcg_gen_shr_tl tcg_gen_shr_i64
2872
#define tcg_gen_shri_tl tcg_gen_shri_i64
2873
#define tcg_gen_sar_tl tcg_gen_sar_i64
2874
#define tcg_gen_sari_tl tcg_gen_sari_i64
2875
#define tcg_gen_brcond_tl tcg_gen_brcond_i64
2876
#define tcg_gen_brcondi_tl tcg_gen_brcondi_i64
2877
#define tcg_gen_setcond_tl tcg_gen_setcond_i64
2878
#define tcg_gen_setcondi_tl tcg_gen_setcondi_i64
2879
#define tcg_gen_mul_tl tcg_gen_mul_i64
2880
#define tcg_gen_muli_tl tcg_gen_muli_i64
2881
#define tcg_gen_div_tl tcg_gen_div_i64
2882
#define tcg_gen_rem_tl tcg_gen_rem_i64
2883
#define tcg_gen_divu_tl tcg_gen_divu_i64
2884
#define tcg_gen_remu_tl tcg_gen_remu_i64
2885
#define tcg_gen_discard_tl tcg_gen_discard_i64
2886
#define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32
2887
#define tcg_gen_trunc_i64_tl tcg_gen_mov_i64
2888
#define tcg_gen_extu_i32_tl tcg_gen_extu_i32_i64
2889
#define tcg_gen_ext_i32_tl tcg_gen_ext_i32_i64
2890
#define tcg_gen_extu_tl_i64 tcg_gen_mov_i64
2891
#define tcg_gen_ext_tl_i64 tcg_gen_mov_i64
2892
#define tcg_gen_ext8u_tl tcg_gen_ext8u_i64
2893
#define tcg_gen_ext8s_tl tcg_gen_ext8s_i64
2894
#define tcg_gen_ext16u_tl tcg_gen_ext16u_i64
2895
#define tcg_gen_ext16s_tl tcg_gen_ext16s_i64
2896
#define tcg_gen_ext32u_tl tcg_gen_ext32u_i64
2897
#define tcg_gen_ext32s_tl tcg_gen_ext32s_i64
2898
#define tcg_gen_bswap16_tl tcg_gen_bswap16_i64
2899
#define tcg_gen_bswap32_tl tcg_gen_bswap32_i64
2900
#define tcg_gen_bswap64_tl tcg_gen_bswap64_i64
2901
#define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64
2902
#define tcg_gen_extr_i64_tl tcg_gen_extr32_i64
2903
#define tcg_gen_andc_tl tcg_gen_andc_i64
2904
#define tcg_gen_eqv_tl tcg_gen_eqv_i64
2905
#define tcg_gen_nand_tl tcg_gen_nand_i64
2906
#define tcg_gen_nor_tl tcg_gen_nor_i64
2907
#define tcg_gen_orc_tl tcg_gen_orc_i64
2908
#define tcg_gen_rotl_tl tcg_gen_rotl_i64
2909
#define tcg_gen_rotli_tl tcg_gen_rotli_i64
2910
#define tcg_gen_rotr_tl tcg_gen_rotr_i64
2911
#define tcg_gen_rotri_tl tcg_gen_rotri_i64
2912
#define tcg_gen_deposit_tl tcg_gen_deposit_i64
2913
#define tcg_const_tl tcg_const_i64
2914
#define tcg_const_local_tl tcg_const_local_i64
2915
#define tcg_gen_movcond_tl tcg_gen_movcond_i64
2916
#define tcg_gen_add2_tl tcg_gen_add2_i64
2917
#define tcg_gen_sub2_tl tcg_gen_sub2_i64
2918
#define tcg_gen_mulu2_tl tcg_gen_mulu2_i64
2919
#define tcg_gen_muls2_tl tcg_gen_muls2_i64
2920
#else
2921
#define tcg_gen_movi_tl tcg_gen_movi_i32
2922
#define tcg_gen_mov_tl tcg_gen_mov_i32
2923
#define tcg_gen_ld8u_tl tcg_gen_ld8u_i32
2924
#define tcg_gen_ld8s_tl tcg_gen_ld8s_i32
2925
#define tcg_gen_ld16u_tl tcg_gen_ld16u_i32
2926
#define tcg_gen_ld16s_tl tcg_gen_ld16s_i32
2927
#define tcg_gen_ld32u_tl tcg_gen_ld_i32
2928
#define tcg_gen_ld32s_tl tcg_gen_ld_i32
2929
#define tcg_gen_ld_tl tcg_gen_ld_i32
2930
#define tcg_gen_st8_tl tcg_gen_st8_i32
2931
#define tcg_gen_st16_tl tcg_gen_st16_i32
2932
#define tcg_gen_st32_tl tcg_gen_st_i32
2933
#define tcg_gen_st_tl tcg_gen_st_i32
2934
#define tcg_gen_add_tl tcg_gen_add_i32
2935
#define tcg_gen_addi_tl tcg_gen_addi_i32
2936
#define tcg_gen_sub_tl tcg_gen_sub_i32
2937
#define tcg_gen_neg_tl tcg_gen_neg_i32
2938
#define tcg_gen_subfi_tl tcg_gen_subfi_i32
2939
#define tcg_gen_subi_tl tcg_gen_subi_i32
2940
#define tcg_gen_and_tl tcg_gen_and_i32
2941
#define tcg_gen_andi_tl tcg_gen_andi_i32
2942
#define tcg_gen_or_tl tcg_gen_or_i32
2943
#define tcg_gen_ori_tl tcg_gen_ori_i32
2944
#define tcg_gen_xor_tl tcg_gen_xor_i32
2945
#define tcg_gen_xori_tl tcg_gen_xori_i32
2946
#define tcg_gen_not_tl tcg_gen_not_i32
2947
#define tcg_gen_shl_tl tcg_gen_shl_i32
2948
#define tcg_gen_shli_tl tcg_gen_shli_i32
2949
#define tcg_gen_shr_tl tcg_gen_shr_i32
2950
#define tcg_gen_shri_tl tcg_gen_shri_i32
2951
#define tcg_gen_sar_tl tcg_gen_sar_i32
2952
#define tcg_gen_sari_tl tcg_gen_sari_i32
2953
#define tcg_gen_brcond_tl tcg_gen_brcond_i32
2954
#define tcg_gen_brcondi_tl tcg_gen_brcondi_i32
2955
#define tcg_gen_setcond_tl tcg_gen_setcond_i32
2956
#define tcg_gen_setcondi_tl tcg_gen_setcondi_i32
2957
#define tcg_gen_mul_tl tcg_gen_mul_i32
2958
#define tcg_gen_muli_tl tcg_gen_muli_i32
2959
#define tcg_gen_div_tl tcg_gen_div_i32
2960
#define tcg_gen_rem_tl tcg_gen_rem_i32
2961
#define tcg_gen_divu_tl tcg_gen_divu_i32
2962
#define tcg_gen_remu_tl tcg_gen_remu_i32
2963
#define tcg_gen_discard_tl tcg_gen_discard_i32
2964
#define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32
2965
#define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32
2966
#define tcg_gen_extu_i32_tl tcg_gen_mov_i32
2967
#define tcg_gen_ext_i32_tl tcg_gen_mov_i32
2968
#define tcg_gen_extu_tl_i64 tcg_gen_extu_i32_i64
2969
#define tcg_gen_ext_tl_i64 tcg_gen_ext_i32_i64
2970
#define tcg_gen_ext8u_tl tcg_gen_ext8u_i32
2971
#define tcg_gen_ext8s_tl tcg_gen_ext8s_i32
2972
#define tcg_gen_ext16u_tl tcg_gen_ext16u_i32
2973
#define tcg_gen_ext16s_tl tcg_gen_ext16s_i32
2974
#define tcg_gen_ext32u_tl tcg_gen_mov_i32
2975
#define tcg_gen_ext32s_tl tcg_gen_mov_i32
2976
#define tcg_gen_bswap16_tl tcg_gen_bswap16_i32
2977
#define tcg_gen_bswap32_tl tcg_gen_bswap32_i32
2978
#define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64
2979
#define tcg_gen_extr_tl_i64 tcg_gen_extr_i32_i64
2980
#define tcg_gen_andc_tl tcg_gen_andc_i32
2981
#define tcg_gen_eqv_tl tcg_gen_eqv_i32
2982
#define tcg_gen_nand_tl tcg_gen_nand_i32
2983
#define tcg_gen_nor_tl tcg_gen_nor_i32
2984
#define tcg_gen_orc_tl tcg_gen_orc_i32
2985
#define tcg_gen_rotl_tl tcg_gen_rotl_i32
2986
#define tcg_gen_rotli_tl tcg_gen_rotli_i32
2987
#define tcg_gen_rotr_tl tcg_gen_rotr_i32
2988
#define tcg_gen_rotri_tl tcg_gen_rotri_i32
2989
#define tcg_gen_deposit_tl tcg_gen_deposit_i32
2990
#define tcg_const_tl tcg_const_i32
2991
#define tcg_const_local_tl tcg_const_local_i32
2992
#define tcg_gen_movcond_tl tcg_gen_movcond_i32
2993
#define tcg_gen_add2_tl tcg_gen_add2_i32
2994
#define tcg_gen_sub2_tl tcg_gen_sub2_i32
2995
#define tcg_gen_mulu2_tl tcg_gen_mulu2_i32
2996
#define tcg_gen_muls2_tl tcg_gen_muls2_i32
2997
#endif
2998

    
2999
#if TCG_TARGET_REG_BITS == 32
3000
#define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), \
3001
                                               TCGV_PTR_TO_NAT(A), \
3002
                                               TCGV_PTR_TO_NAT(B))
3003
#define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), \
3004
                                                 TCGV_PTR_TO_NAT(A), (B))
3005
#define tcg_gen_ext_i32_ptr(R, A) tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A))
3006
#else /* TCG_TARGET_REG_BITS == 32 */
3007
#define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), \
3008
                                               TCGV_PTR_TO_NAT(A), \
3009
                                               TCGV_PTR_TO_NAT(B))
3010
#define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R),   \
3011
                                                 TCGV_PTR_TO_NAT(A), (B))
3012
#define tcg_gen_ext_i32_ptr(R, A) tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A))
3013
#endif /* TCG_TARGET_REG_BITS != 32 */