root / tcg / optimize.c @ 34b5d2c6
History | View | Annotate | Download (35.6 kB)
1 | 8f2e8c07 | Kirill Batuzov | /*
|
---|---|---|---|
2 | 8f2e8c07 | Kirill Batuzov | * Optimizations for Tiny Code Generator for QEMU
|
3 | 8f2e8c07 | Kirill Batuzov | *
|
4 | 8f2e8c07 | Kirill Batuzov | * Copyright (c) 2010 Samsung Electronics.
|
5 | 8f2e8c07 | Kirill Batuzov | * Contributed by Kirill Batuzov <batuzovk@ispras.ru>
|
6 | 8f2e8c07 | Kirill Batuzov | *
|
7 | 8f2e8c07 | Kirill Batuzov | * Permission is hereby granted, free of charge, to any person obtaining a copy
|
8 | 8f2e8c07 | Kirill Batuzov | * of this software and associated documentation files (the "Software"), to deal
|
9 | 8f2e8c07 | Kirill Batuzov | * in the Software without restriction, including without limitation the rights
|
10 | 8f2e8c07 | Kirill Batuzov | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 | 8f2e8c07 | Kirill Batuzov | * copies of the Software, and to permit persons to whom the Software is
|
12 | 8f2e8c07 | Kirill Batuzov | * furnished to do so, subject to the following conditions:
|
13 | 8f2e8c07 | Kirill Batuzov | *
|
14 | 8f2e8c07 | Kirill Batuzov | * The above copyright notice and this permission notice shall be included in
|
15 | 8f2e8c07 | Kirill Batuzov | * all copies or substantial portions of the Software.
|
16 | 8f2e8c07 | Kirill Batuzov | *
|
17 | 8f2e8c07 | Kirill Batuzov | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
18 | 8f2e8c07 | Kirill Batuzov | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
19 | 8f2e8c07 | Kirill Batuzov | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
20 | 8f2e8c07 | Kirill Batuzov | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
21 | 8f2e8c07 | Kirill Batuzov | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
22 | 8f2e8c07 | Kirill Batuzov | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
23 | 8f2e8c07 | Kirill Batuzov | * THE SOFTWARE.
|
24 | 8f2e8c07 | Kirill Batuzov | */
|
25 | 8f2e8c07 | Kirill Batuzov | |
26 | 8f2e8c07 | Kirill Batuzov | #include "config.h" |
27 | 8f2e8c07 | Kirill Batuzov | |
28 | 8f2e8c07 | Kirill Batuzov | #include <stdlib.h> |
29 | 8f2e8c07 | Kirill Batuzov | #include <stdio.h> |
30 | 8f2e8c07 | Kirill Batuzov | |
31 | 8f2e8c07 | Kirill Batuzov | #include "qemu-common.h" |
32 | 8f2e8c07 | Kirill Batuzov | #include "tcg-op.h" |
33 | 8f2e8c07 | Kirill Batuzov | |
34 | 8f2e8c07 | Kirill Batuzov | #define CASE_OP_32_64(x) \
|
35 | 8f2e8c07 | Kirill Batuzov | glue(glue(case INDEX_op_, x), _i32): \
|
36 | 8f2e8c07 | Kirill Batuzov | glue(glue(case INDEX_op_, x), _i64)
|
37 | 8f2e8c07 | Kirill Batuzov | |
38 | 22613af4 | Kirill Batuzov | typedef enum { |
39 | 22613af4 | Kirill Batuzov | TCG_TEMP_UNDEF = 0,
|
40 | 22613af4 | Kirill Batuzov | TCG_TEMP_CONST, |
41 | 22613af4 | Kirill Batuzov | TCG_TEMP_COPY, |
42 | 22613af4 | Kirill Batuzov | } tcg_temp_state; |
43 | 22613af4 | Kirill Batuzov | |
44 | 22613af4 | Kirill Batuzov | struct tcg_temp_info {
|
45 | 22613af4 | Kirill Batuzov | tcg_temp_state state; |
46 | 22613af4 | Kirill Batuzov | uint16_t prev_copy; |
47 | 22613af4 | Kirill Batuzov | uint16_t next_copy; |
48 | 22613af4 | Kirill Batuzov | tcg_target_ulong val; |
49 | 3a9d8b17 | Paolo Bonzini | tcg_target_ulong mask; |
50 | 22613af4 | Kirill Batuzov | }; |
51 | 22613af4 | Kirill Batuzov | |
52 | 22613af4 | Kirill Batuzov | static struct tcg_temp_info temps[TCG_MAX_TEMPS]; |
53 | 22613af4 | Kirill Batuzov | |
54 | e590d4e6 | Aurelien Jarno | /* Reset TEMP's state to TCG_TEMP_UNDEF. If TEMP only had one copy, remove
|
55 | e590d4e6 | Aurelien Jarno | the copy flag from the left temp. */
|
56 | e590d4e6 | Aurelien Jarno | static void reset_temp(TCGArg temp) |
57 | 22613af4 | Kirill Batuzov | { |
58 | e590d4e6 | Aurelien Jarno | if (temps[temp].state == TCG_TEMP_COPY) {
|
59 | e590d4e6 | Aurelien Jarno | if (temps[temp].prev_copy == temps[temp].next_copy) {
|
60 | e590d4e6 | Aurelien Jarno | temps[temps[temp].next_copy].state = TCG_TEMP_UNDEF; |
61 | e590d4e6 | Aurelien Jarno | } else {
|
62 | e590d4e6 | Aurelien Jarno | temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy; |
63 | e590d4e6 | Aurelien Jarno | temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy; |
64 | 22613af4 | Kirill Batuzov | } |
65 | 22613af4 | Kirill Batuzov | } |
66 | 48b56ce1 | Aurelien Jarno | temps[temp].state = TCG_TEMP_UNDEF; |
67 | 3a9d8b17 | Paolo Bonzini | temps[temp].mask = -1;
|
68 | 22613af4 | Kirill Batuzov | } |
69 | 22613af4 | Kirill Batuzov | |
70 | d193a14a | Paolo Bonzini | /* Reset all temporaries, given that there are NB_TEMPS of them. */
|
71 | d193a14a | Paolo Bonzini | static void reset_all_temps(int nb_temps) |
72 | d193a14a | Paolo Bonzini | { |
73 | d193a14a | Paolo Bonzini | int i;
|
74 | d193a14a | Paolo Bonzini | for (i = 0; i < nb_temps; i++) { |
75 | d193a14a | Paolo Bonzini | temps[i].state = TCG_TEMP_UNDEF; |
76 | 3a9d8b17 | Paolo Bonzini | temps[i].mask = -1;
|
77 | d193a14a | Paolo Bonzini | } |
78 | d193a14a | Paolo Bonzini | } |
79 | d193a14a | Paolo Bonzini | |
80 | fe0de7aa | Blue Swirl | static int op_bits(TCGOpcode op) |
81 | 22613af4 | Kirill Batuzov | { |
82 | 8399ad59 | Richard Henderson | const TCGOpDef *def = &tcg_op_defs[op];
|
83 | 8399ad59 | Richard Henderson | return def->flags & TCG_OPF_64BIT ? 64 : 32; |
84 | 22613af4 | Kirill Batuzov | } |
85 | 22613af4 | Kirill Batuzov | |
86 | fe0de7aa | Blue Swirl | static TCGOpcode op_to_movi(TCGOpcode op)
|
87 | 22613af4 | Kirill Batuzov | { |
88 | 22613af4 | Kirill Batuzov | switch (op_bits(op)) {
|
89 | 22613af4 | Kirill Batuzov | case 32: |
90 | 22613af4 | Kirill Batuzov | return INDEX_op_movi_i32;
|
91 | 22613af4 | Kirill Batuzov | case 64: |
92 | 22613af4 | Kirill Batuzov | return INDEX_op_movi_i64;
|
93 | 22613af4 | Kirill Batuzov | default:
|
94 | 22613af4 | Kirill Batuzov | fprintf(stderr, "op_to_movi: unexpected return value of "
|
95 | 22613af4 | Kirill Batuzov | "function op_bits.\n");
|
96 | 22613af4 | Kirill Batuzov | tcg_abort(); |
97 | 22613af4 | Kirill Batuzov | } |
98 | 22613af4 | Kirill Batuzov | } |
99 | 22613af4 | Kirill Batuzov | |
100 | e590d4e6 | Aurelien Jarno | static TCGArg find_better_copy(TCGContext *s, TCGArg temp)
|
101 | e590d4e6 | Aurelien Jarno | { |
102 | e590d4e6 | Aurelien Jarno | TCGArg i; |
103 | e590d4e6 | Aurelien Jarno | |
104 | e590d4e6 | Aurelien Jarno | /* If this is already a global, we can't do better. */
|
105 | e590d4e6 | Aurelien Jarno | if (temp < s->nb_globals) {
|
106 | e590d4e6 | Aurelien Jarno | return temp;
|
107 | e590d4e6 | Aurelien Jarno | } |
108 | e590d4e6 | Aurelien Jarno | |
109 | e590d4e6 | Aurelien Jarno | /* Search for a global first. */
|
110 | e590d4e6 | Aurelien Jarno | for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
|
111 | e590d4e6 | Aurelien Jarno | if (i < s->nb_globals) {
|
112 | e590d4e6 | Aurelien Jarno | return i;
|
113 | e590d4e6 | Aurelien Jarno | } |
114 | e590d4e6 | Aurelien Jarno | } |
115 | e590d4e6 | Aurelien Jarno | |
116 | e590d4e6 | Aurelien Jarno | /* If it is a temp, search for a temp local. */
|
117 | e590d4e6 | Aurelien Jarno | if (!s->temps[temp].temp_local) {
|
118 | e590d4e6 | Aurelien Jarno | for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
|
119 | e590d4e6 | Aurelien Jarno | if (s->temps[i].temp_local) {
|
120 | e590d4e6 | Aurelien Jarno | return i;
|
121 | e590d4e6 | Aurelien Jarno | } |
122 | e590d4e6 | Aurelien Jarno | } |
123 | e590d4e6 | Aurelien Jarno | } |
124 | e590d4e6 | Aurelien Jarno | |
125 | e590d4e6 | Aurelien Jarno | /* Failure to find a better representation, return the same temp. */
|
126 | e590d4e6 | Aurelien Jarno | return temp;
|
127 | e590d4e6 | Aurelien Jarno | } |
128 | e590d4e6 | Aurelien Jarno | |
129 | e590d4e6 | Aurelien Jarno | static bool temps_are_copies(TCGArg arg1, TCGArg arg2) |
130 | e590d4e6 | Aurelien Jarno | { |
131 | e590d4e6 | Aurelien Jarno | TCGArg i; |
132 | e590d4e6 | Aurelien Jarno | |
133 | e590d4e6 | Aurelien Jarno | if (arg1 == arg2) {
|
134 | e590d4e6 | Aurelien Jarno | return true; |
135 | e590d4e6 | Aurelien Jarno | } |
136 | e590d4e6 | Aurelien Jarno | |
137 | e590d4e6 | Aurelien Jarno | if (temps[arg1].state != TCG_TEMP_COPY
|
138 | e590d4e6 | Aurelien Jarno | || temps[arg2].state != TCG_TEMP_COPY) { |
139 | e590d4e6 | Aurelien Jarno | return false; |
140 | e590d4e6 | Aurelien Jarno | } |
141 | e590d4e6 | Aurelien Jarno | |
142 | e590d4e6 | Aurelien Jarno | for (i = temps[arg1].next_copy ; i != arg1 ; i = temps[i].next_copy) {
|
143 | e590d4e6 | Aurelien Jarno | if (i == arg2) {
|
144 | e590d4e6 | Aurelien Jarno | return true; |
145 | e590d4e6 | Aurelien Jarno | } |
146 | e590d4e6 | Aurelien Jarno | } |
147 | e590d4e6 | Aurelien Jarno | |
148 | e590d4e6 | Aurelien Jarno | return false; |
149 | e590d4e6 | Aurelien Jarno | } |
150 | e590d4e6 | Aurelien Jarno | |
151 | b80bb016 | Aurelien Jarno | static void tcg_opt_gen_mov(TCGContext *s, TCGArg *gen_args, |
152 | b80bb016 | Aurelien Jarno | TCGArg dst, TCGArg src) |
153 | 22613af4 | Kirill Batuzov | { |
154 | 3a9d8b17 | Paolo Bonzini | reset_temp(dst); |
155 | 3a9d8b17 | Paolo Bonzini | temps[dst].mask = temps[src].mask; |
156 | 3a9d8b17 | Paolo Bonzini | assert(temps[src].state != TCG_TEMP_CONST); |
157 | 3a9d8b17 | Paolo Bonzini | |
158 | 3a9d8b17 | Paolo Bonzini | if (s->temps[src].type == s->temps[dst].type) {
|
159 | 3a9d8b17 | Paolo Bonzini | if (temps[src].state != TCG_TEMP_COPY) {
|
160 | 3a9d8b17 | Paolo Bonzini | temps[src].state = TCG_TEMP_COPY; |
161 | 3a9d8b17 | Paolo Bonzini | temps[src].next_copy = src; |
162 | 3a9d8b17 | Paolo Bonzini | temps[src].prev_copy = src; |
163 | 22613af4 | Kirill Batuzov | } |
164 | 3a9d8b17 | Paolo Bonzini | temps[dst].state = TCG_TEMP_COPY; |
165 | 3a9d8b17 | Paolo Bonzini | temps[dst].next_copy = temps[src].next_copy; |
166 | 3a9d8b17 | Paolo Bonzini | temps[dst].prev_copy = src; |
167 | 3a9d8b17 | Paolo Bonzini | temps[temps[dst].next_copy].prev_copy = dst; |
168 | 3a9d8b17 | Paolo Bonzini | temps[src].next_copy = dst; |
169 | 3a9d8b17 | Paolo Bonzini | } |
170 | e590d4e6 | Aurelien Jarno | |
171 | 3a9d8b17 | Paolo Bonzini | gen_args[0] = dst;
|
172 | 3a9d8b17 | Paolo Bonzini | gen_args[1] = src;
|
173 | 22613af4 | Kirill Batuzov | } |
174 | 22613af4 | Kirill Batuzov | |
175 | e590d4e6 | Aurelien Jarno | static void tcg_opt_gen_movi(TCGArg *gen_args, TCGArg dst, TCGArg val) |
176 | 22613af4 | Kirill Batuzov | { |
177 | 3a9d8b17 | Paolo Bonzini | reset_temp(dst); |
178 | 3a9d8b17 | Paolo Bonzini | temps[dst].state = TCG_TEMP_CONST; |
179 | 3a9d8b17 | Paolo Bonzini | temps[dst].val = val; |
180 | 3a9d8b17 | Paolo Bonzini | temps[dst].mask = val; |
181 | 3a9d8b17 | Paolo Bonzini | gen_args[0] = dst;
|
182 | 3a9d8b17 | Paolo Bonzini | gen_args[1] = val;
|
183 | 22613af4 | Kirill Batuzov | } |
184 | 22613af4 | Kirill Batuzov | |
185 | fe0de7aa | Blue Swirl | static TCGOpcode op_to_mov(TCGOpcode op)
|
186 | 53108fb5 | Kirill Batuzov | { |
187 | 53108fb5 | Kirill Batuzov | switch (op_bits(op)) {
|
188 | 53108fb5 | Kirill Batuzov | case 32: |
189 | 53108fb5 | Kirill Batuzov | return INDEX_op_mov_i32;
|
190 | 53108fb5 | Kirill Batuzov | case 64: |
191 | 53108fb5 | Kirill Batuzov | return INDEX_op_mov_i64;
|
192 | 53108fb5 | Kirill Batuzov | default:
|
193 | 53108fb5 | Kirill Batuzov | fprintf(stderr, "op_to_mov: unexpected return value of "
|
194 | 53108fb5 | Kirill Batuzov | "function op_bits.\n");
|
195 | 53108fb5 | Kirill Batuzov | tcg_abort(); |
196 | 53108fb5 | Kirill Batuzov | } |
197 | 53108fb5 | Kirill Batuzov | } |
198 | 53108fb5 | Kirill Batuzov | |
199 | fe0de7aa | Blue Swirl | static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
|
200 | 53108fb5 | Kirill Batuzov | { |
201 | 03271524 | Richard Henderson | uint64_t l64, h64; |
202 | 03271524 | Richard Henderson | |
203 | 53108fb5 | Kirill Batuzov | switch (op) {
|
204 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(add): |
205 | 53108fb5 | Kirill Batuzov | return x + y;
|
206 | 53108fb5 | Kirill Batuzov | |
207 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(sub): |
208 | 53108fb5 | Kirill Batuzov | return x - y;
|
209 | 53108fb5 | Kirill Batuzov | |
210 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(mul): |
211 | 53108fb5 | Kirill Batuzov | return x * y;
|
212 | 53108fb5 | Kirill Batuzov | |
213 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(and): |
214 | 9a81090b | Kirill Batuzov | return x & y;
|
215 | 9a81090b | Kirill Batuzov | |
216 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(or): |
217 | 9a81090b | Kirill Batuzov | return x | y;
|
218 | 9a81090b | Kirill Batuzov | |
219 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(xor): |
220 | 9a81090b | Kirill Batuzov | return x ^ y;
|
221 | 9a81090b | Kirill Batuzov | |
222 | 55c0975c | Kirill Batuzov | case INDEX_op_shl_i32:
|
223 | 55c0975c | Kirill Batuzov | return (uint32_t)x << (uint32_t)y;
|
224 | 55c0975c | Kirill Batuzov | |
225 | 55c0975c | Kirill Batuzov | case INDEX_op_shl_i64:
|
226 | 55c0975c | Kirill Batuzov | return (uint64_t)x << (uint64_t)y;
|
227 | 55c0975c | Kirill Batuzov | |
228 | 55c0975c | Kirill Batuzov | case INDEX_op_shr_i32:
|
229 | 55c0975c | Kirill Batuzov | return (uint32_t)x >> (uint32_t)y;
|
230 | 55c0975c | Kirill Batuzov | |
231 | 55c0975c | Kirill Batuzov | case INDEX_op_shr_i64:
|
232 | 55c0975c | Kirill Batuzov | return (uint64_t)x >> (uint64_t)y;
|
233 | 55c0975c | Kirill Batuzov | |
234 | 55c0975c | Kirill Batuzov | case INDEX_op_sar_i32:
|
235 | 55c0975c | Kirill Batuzov | return (int32_t)x >> (int32_t)y;
|
236 | 55c0975c | Kirill Batuzov | |
237 | 55c0975c | Kirill Batuzov | case INDEX_op_sar_i64:
|
238 | 55c0975c | Kirill Batuzov | return (int64_t)x >> (int64_t)y;
|
239 | 55c0975c | Kirill Batuzov | |
240 | 55c0975c | Kirill Batuzov | case INDEX_op_rotr_i32:
|
241 | 25c4d9cc | Richard Henderson | x = ((uint32_t)x << (32 - y)) | ((uint32_t)x >> y);
|
242 | 55c0975c | Kirill Batuzov | return x;
|
243 | 55c0975c | Kirill Batuzov | |
244 | 55c0975c | Kirill Batuzov | case INDEX_op_rotr_i64:
|
245 | 25c4d9cc | Richard Henderson | x = ((uint64_t)x << (64 - y)) | ((uint64_t)x >> y);
|
246 | 55c0975c | Kirill Batuzov | return x;
|
247 | 55c0975c | Kirill Batuzov | |
248 | 55c0975c | Kirill Batuzov | case INDEX_op_rotl_i32:
|
249 | 25c4d9cc | Richard Henderson | x = ((uint32_t)x << y) | ((uint32_t)x >> (32 - y));
|
250 | 55c0975c | Kirill Batuzov | return x;
|
251 | 55c0975c | Kirill Batuzov | |
252 | 55c0975c | Kirill Batuzov | case INDEX_op_rotl_i64:
|
253 | 25c4d9cc | Richard Henderson | x = ((uint64_t)x << y) | ((uint64_t)x >> (64 - y));
|
254 | 55c0975c | Kirill Batuzov | return x;
|
255 | 25c4d9cc | Richard Henderson | |
256 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(not): |
257 | a640f031 | Kirill Batuzov | return ~x;
|
258 | 25c4d9cc | Richard Henderson | |
259 | cb25c80a | Richard Henderson | CASE_OP_32_64(neg): |
260 | cb25c80a | Richard Henderson | return -x;
|
261 | cb25c80a | Richard Henderson | |
262 | cb25c80a | Richard Henderson | CASE_OP_32_64(andc): |
263 | cb25c80a | Richard Henderson | return x & ~y;
|
264 | cb25c80a | Richard Henderson | |
265 | cb25c80a | Richard Henderson | CASE_OP_32_64(orc): |
266 | cb25c80a | Richard Henderson | return x | ~y;
|
267 | cb25c80a | Richard Henderson | |
268 | cb25c80a | Richard Henderson | CASE_OP_32_64(eqv): |
269 | cb25c80a | Richard Henderson | return ~(x ^ y);
|
270 | cb25c80a | Richard Henderson | |
271 | cb25c80a | Richard Henderson | CASE_OP_32_64(nand): |
272 | cb25c80a | Richard Henderson | return ~(x & y);
|
273 | cb25c80a | Richard Henderson | |
274 | cb25c80a | Richard Henderson | CASE_OP_32_64(nor): |
275 | cb25c80a | Richard Henderson | return ~(x | y);
|
276 | cb25c80a | Richard Henderson | |
277 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext8s): |
278 | a640f031 | Kirill Batuzov | return (int8_t)x;
|
279 | 25c4d9cc | Richard Henderson | |
280 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext16s): |
281 | a640f031 | Kirill Batuzov | return (int16_t)x;
|
282 | 25c4d9cc | Richard Henderson | |
283 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext8u): |
284 | a640f031 | Kirill Batuzov | return (uint8_t)x;
|
285 | 25c4d9cc | Richard Henderson | |
286 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext16u): |
287 | a640f031 | Kirill Batuzov | return (uint16_t)x;
|
288 | a640f031 | Kirill Batuzov | |
289 | a640f031 | Kirill Batuzov | case INDEX_op_ext32s_i64:
|
290 | a640f031 | Kirill Batuzov | return (int32_t)x;
|
291 | a640f031 | Kirill Batuzov | |
292 | a640f031 | Kirill Batuzov | case INDEX_op_ext32u_i64:
|
293 | a640f031 | Kirill Batuzov | return (uint32_t)x;
|
294 | a640f031 | Kirill Batuzov | |
295 | 03271524 | Richard Henderson | case INDEX_op_muluh_i32:
|
296 | 03271524 | Richard Henderson | return ((uint64_t)(uint32_t)x * (uint32_t)y) >> 32; |
297 | 03271524 | Richard Henderson | case INDEX_op_mulsh_i32:
|
298 | 03271524 | Richard Henderson | return ((int64_t)(int32_t)x * (int32_t)y) >> 32; |
299 | 03271524 | Richard Henderson | |
300 | 03271524 | Richard Henderson | case INDEX_op_muluh_i64:
|
301 | 03271524 | Richard Henderson | mulu64(&l64, &h64, x, y); |
302 | 03271524 | Richard Henderson | return h64;
|
303 | 03271524 | Richard Henderson | case INDEX_op_mulsh_i64:
|
304 | 03271524 | Richard Henderson | muls64(&l64, &h64, x, y); |
305 | 03271524 | Richard Henderson | return h64;
|
306 | 03271524 | Richard Henderson | |
307 | 01547f7f | Richard Henderson | case INDEX_op_div_i32:
|
308 | 01547f7f | Richard Henderson | /* Avoid crashing on divide by zero, otherwise undefined. */
|
309 | 01547f7f | Richard Henderson | return (int32_t)x / ((int32_t)y ? : 1); |
310 | 01547f7f | Richard Henderson | case INDEX_op_divu_i32:
|
311 | 01547f7f | Richard Henderson | return (uint32_t)x / ((uint32_t)y ? : 1); |
312 | 01547f7f | Richard Henderson | case INDEX_op_div_i64:
|
313 | 01547f7f | Richard Henderson | return (int64_t)x / ((int64_t)y ? : 1); |
314 | 01547f7f | Richard Henderson | case INDEX_op_divu_i64:
|
315 | 01547f7f | Richard Henderson | return (uint64_t)x / ((uint64_t)y ? : 1); |
316 | 01547f7f | Richard Henderson | |
317 | 01547f7f | Richard Henderson | case INDEX_op_rem_i32:
|
318 | 01547f7f | Richard Henderson | return (int32_t)x % ((int32_t)y ? : 1); |
319 | 01547f7f | Richard Henderson | case INDEX_op_remu_i32:
|
320 | 01547f7f | Richard Henderson | return (uint32_t)x % ((uint32_t)y ? : 1); |
321 | 01547f7f | Richard Henderson | case INDEX_op_rem_i64:
|
322 | 01547f7f | Richard Henderson | return (int64_t)x % ((int64_t)y ? : 1); |
323 | 01547f7f | Richard Henderson | case INDEX_op_remu_i64:
|
324 | 01547f7f | Richard Henderson | return (uint64_t)x % ((uint64_t)y ? : 1); |
325 | 01547f7f | Richard Henderson | |
326 | 53108fb5 | Kirill Batuzov | default:
|
327 | 53108fb5 | Kirill Batuzov | fprintf(stderr, |
328 | 53108fb5 | Kirill Batuzov | "Unrecognized operation %d in do_constant_folding.\n", op);
|
329 | 53108fb5 | Kirill Batuzov | tcg_abort(); |
330 | 53108fb5 | Kirill Batuzov | } |
331 | 53108fb5 | Kirill Batuzov | } |
332 | 53108fb5 | Kirill Batuzov | |
333 | fe0de7aa | Blue Swirl | static TCGArg do_constant_folding(TCGOpcode op, TCGArg x, TCGArg y)
|
334 | 53108fb5 | Kirill Batuzov | { |
335 | 53108fb5 | Kirill Batuzov | TCGArg res = do_constant_folding_2(op, x, y); |
336 | 53108fb5 | Kirill Batuzov | if (op_bits(op) == 32) { |
337 | 53108fb5 | Kirill Batuzov | res &= 0xffffffff;
|
338 | 53108fb5 | Kirill Batuzov | } |
339 | 53108fb5 | Kirill Batuzov | return res;
|
340 | 53108fb5 | Kirill Batuzov | } |
341 | 53108fb5 | Kirill Batuzov | |
342 | 9519da7e | Richard Henderson | static bool do_constant_folding_cond_32(uint32_t x, uint32_t y, TCGCond c) |
343 | 9519da7e | Richard Henderson | { |
344 | 9519da7e | Richard Henderson | switch (c) {
|
345 | 9519da7e | Richard Henderson | case TCG_COND_EQ:
|
346 | 9519da7e | Richard Henderson | return x == y;
|
347 | 9519da7e | Richard Henderson | case TCG_COND_NE:
|
348 | 9519da7e | Richard Henderson | return x != y;
|
349 | 9519da7e | Richard Henderson | case TCG_COND_LT:
|
350 | 9519da7e | Richard Henderson | return (int32_t)x < (int32_t)y;
|
351 | 9519da7e | Richard Henderson | case TCG_COND_GE:
|
352 | 9519da7e | Richard Henderson | return (int32_t)x >= (int32_t)y;
|
353 | 9519da7e | Richard Henderson | case TCG_COND_LE:
|
354 | 9519da7e | Richard Henderson | return (int32_t)x <= (int32_t)y;
|
355 | 9519da7e | Richard Henderson | case TCG_COND_GT:
|
356 | 9519da7e | Richard Henderson | return (int32_t)x > (int32_t)y;
|
357 | 9519da7e | Richard Henderson | case TCG_COND_LTU:
|
358 | 9519da7e | Richard Henderson | return x < y;
|
359 | 9519da7e | Richard Henderson | case TCG_COND_GEU:
|
360 | 9519da7e | Richard Henderson | return x >= y;
|
361 | 9519da7e | Richard Henderson | case TCG_COND_LEU:
|
362 | 9519da7e | Richard Henderson | return x <= y;
|
363 | 9519da7e | Richard Henderson | case TCG_COND_GTU:
|
364 | 9519da7e | Richard Henderson | return x > y;
|
365 | 9519da7e | Richard Henderson | default:
|
366 | 9519da7e | Richard Henderson | tcg_abort(); |
367 | 9519da7e | Richard Henderson | } |
368 | 9519da7e | Richard Henderson | } |
369 | 9519da7e | Richard Henderson | |
370 | 9519da7e | Richard Henderson | static bool do_constant_folding_cond_64(uint64_t x, uint64_t y, TCGCond c) |
371 | 9519da7e | Richard Henderson | { |
372 | 9519da7e | Richard Henderson | switch (c) {
|
373 | 9519da7e | Richard Henderson | case TCG_COND_EQ:
|
374 | 9519da7e | Richard Henderson | return x == y;
|
375 | 9519da7e | Richard Henderson | case TCG_COND_NE:
|
376 | 9519da7e | Richard Henderson | return x != y;
|
377 | 9519da7e | Richard Henderson | case TCG_COND_LT:
|
378 | 9519da7e | Richard Henderson | return (int64_t)x < (int64_t)y;
|
379 | 9519da7e | Richard Henderson | case TCG_COND_GE:
|
380 | 9519da7e | Richard Henderson | return (int64_t)x >= (int64_t)y;
|
381 | 9519da7e | Richard Henderson | case TCG_COND_LE:
|
382 | 9519da7e | Richard Henderson | return (int64_t)x <= (int64_t)y;
|
383 | 9519da7e | Richard Henderson | case TCG_COND_GT:
|
384 | 9519da7e | Richard Henderson | return (int64_t)x > (int64_t)y;
|
385 | 9519da7e | Richard Henderson | case TCG_COND_LTU:
|
386 | 9519da7e | Richard Henderson | return x < y;
|
387 | 9519da7e | Richard Henderson | case TCG_COND_GEU:
|
388 | 9519da7e | Richard Henderson | return x >= y;
|
389 | 9519da7e | Richard Henderson | case TCG_COND_LEU:
|
390 | 9519da7e | Richard Henderson | return x <= y;
|
391 | 9519da7e | Richard Henderson | case TCG_COND_GTU:
|
392 | 9519da7e | Richard Henderson | return x > y;
|
393 | 9519da7e | Richard Henderson | default:
|
394 | 9519da7e | Richard Henderson | tcg_abort(); |
395 | 9519da7e | Richard Henderson | } |
396 | 9519da7e | Richard Henderson | } |
397 | 9519da7e | Richard Henderson | |
398 | 9519da7e | Richard Henderson | static bool do_constant_folding_cond_eq(TCGCond c) |
399 | 9519da7e | Richard Henderson | { |
400 | 9519da7e | Richard Henderson | switch (c) {
|
401 | 9519da7e | Richard Henderson | case TCG_COND_GT:
|
402 | 9519da7e | Richard Henderson | case TCG_COND_LTU:
|
403 | 9519da7e | Richard Henderson | case TCG_COND_LT:
|
404 | 9519da7e | Richard Henderson | case TCG_COND_GTU:
|
405 | 9519da7e | Richard Henderson | case TCG_COND_NE:
|
406 | 9519da7e | Richard Henderson | return 0; |
407 | 9519da7e | Richard Henderson | case TCG_COND_GE:
|
408 | 9519da7e | Richard Henderson | case TCG_COND_GEU:
|
409 | 9519da7e | Richard Henderson | case TCG_COND_LE:
|
410 | 9519da7e | Richard Henderson | case TCG_COND_LEU:
|
411 | 9519da7e | Richard Henderson | case TCG_COND_EQ:
|
412 | 9519da7e | Richard Henderson | return 1; |
413 | 9519da7e | Richard Henderson | default:
|
414 | 9519da7e | Richard Henderson | tcg_abort(); |
415 | 9519da7e | Richard Henderson | } |
416 | 9519da7e | Richard Henderson | } |
417 | 9519da7e | Richard Henderson | |
418 | b336ceb6 | Aurelien Jarno | /* Return 2 if the condition can't be simplified, and the result
|
419 | b336ceb6 | Aurelien Jarno | of the condition (0 or 1) if it can */
|
420 | f8dd19e5 | Aurelien Jarno | static TCGArg do_constant_folding_cond(TCGOpcode op, TCGArg x,
|
421 | f8dd19e5 | Aurelien Jarno | TCGArg y, TCGCond c) |
422 | f8dd19e5 | Aurelien Jarno | { |
423 | b336ceb6 | Aurelien Jarno | if (temps[x].state == TCG_TEMP_CONST && temps[y].state == TCG_TEMP_CONST) {
|
424 | b336ceb6 | Aurelien Jarno | switch (op_bits(op)) {
|
425 | b336ceb6 | Aurelien Jarno | case 32: |
426 | 9519da7e | Richard Henderson | return do_constant_folding_cond_32(temps[x].val, temps[y].val, c);
|
427 | b336ceb6 | Aurelien Jarno | case 64: |
428 | 9519da7e | Richard Henderson | return do_constant_folding_cond_64(temps[x].val, temps[y].val, c);
|
429 | 0aed257f | Richard Henderson | default:
|
430 | 9519da7e | Richard Henderson | tcg_abort(); |
431 | b336ceb6 | Aurelien Jarno | } |
432 | 9519da7e | Richard Henderson | } else if (temps_are_copies(x, y)) { |
433 | 9519da7e | Richard Henderson | return do_constant_folding_cond_eq(c);
|
434 | b336ceb6 | Aurelien Jarno | } else if (temps[y].state == TCG_TEMP_CONST && temps[y].val == 0) { |
435 | b336ceb6 | Aurelien Jarno | switch (c) {
|
436 | f8dd19e5 | Aurelien Jarno | case TCG_COND_LTU:
|
437 | b336ceb6 | Aurelien Jarno | return 0; |
438 | f8dd19e5 | Aurelien Jarno | case TCG_COND_GEU:
|
439 | b336ceb6 | Aurelien Jarno | return 1; |
440 | b336ceb6 | Aurelien Jarno | default:
|
441 | b336ceb6 | Aurelien Jarno | return 2; |
442 | f8dd19e5 | Aurelien Jarno | } |
443 | b336ceb6 | Aurelien Jarno | } else {
|
444 | b336ceb6 | Aurelien Jarno | return 2; |
445 | f8dd19e5 | Aurelien Jarno | } |
446 | f8dd19e5 | Aurelien Jarno | } |
447 | f8dd19e5 | Aurelien Jarno | |
448 | 6c4382f8 | Richard Henderson | /* Return 2 if the condition can't be simplified, and the result
|
449 | 6c4382f8 | Richard Henderson | of the condition (0 or 1) if it can */
|
450 | 6c4382f8 | Richard Henderson | static TCGArg do_constant_folding_cond2(TCGArg *p1, TCGArg *p2, TCGCond c)
|
451 | 6c4382f8 | Richard Henderson | { |
452 | 6c4382f8 | Richard Henderson | TCGArg al = p1[0], ah = p1[1]; |
453 | 6c4382f8 | Richard Henderson | TCGArg bl = p2[0], bh = p2[1]; |
454 | 6c4382f8 | Richard Henderson | |
455 | 6c4382f8 | Richard Henderson | if (temps[bl].state == TCG_TEMP_CONST
|
456 | 6c4382f8 | Richard Henderson | && temps[bh].state == TCG_TEMP_CONST) { |
457 | 6c4382f8 | Richard Henderson | uint64_t b = ((uint64_t)temps[bh].val << 32) | (uint32_t)temps[bl].val;
|
458 | 6c4382f8 | Richard Henderson | |
459 | 6c4382f8 | Richard Henderson | if (temps[al].state == TCG_TEMP_CONST
|
460 | 6c4382f8 | Richard Henderson | && temps[ah].state == TCG_TEMP_CONST) { |
461 | 6c4382f8 | Richard Henderson | uint64_t a; |
462 | 6c4382f8 | Richard Henderson | a = ((uint64_t)temps[ah].val << 32) | (uint32_t)temps[al].val;
|
463 | 6c4382f8 | Richard Henderson | return do_constant_folding_cond_64(a, b, c);
|
464 | 6c4382f8 | Richard Henderson | } |
465 | 6c4382f8 | Richard Henderson | if (b == 0) { |
466 | 6c4382f8 | Richard Henderson | switch (c) {
|
467 | 6c4382f8 | Richard Henderson | case TCG_COND_LTU:
|
468 | 6c4382f8 | Richard Henderson | return 0; |
469 | 6c4382f8 | Richard Henderson | case TCG_COND_GEU:
|
470 | 6c4382f8 | Richard Henderson | return 1; |
471 | 6c4382f8 | Richard Henderson | default:
|
472 | 6c4382f8 | Richard Henderson | break;
|
473 | 6c4382f8 | Richard Henderson | } |
474 | 6c4382f8 | Richard Henderson | } |
475 | 6c4382f8 | Richard Henderson | } |
476 | 6c4382f8 | Richard Henderson | if (temps_are_copies(al, bl) && temps_are_copies(ah, bh)) {
|
477 | 6c4382f8 | Richard Henderson | return do_constant_folding_cond_eq(c);
|
478 | 6c4382f8 | Richard Henderson | } |
479 | 6c4382f8 | Richard Henderson | return 2; |
480 | 6c4382f8 | Richard Henderson | } |
481 | 6c4382f8 | Richard Henderson | |
482 | 24c9ae4e | Richard Henderson | static bool swap_commutative(TCGArg dest, TCGArg *p1, TCGArg *p2) |
483 | 24c9ae4e | Richard Henderson | { |
484 | 24c9ae4e | Richard Henderson | TCGArg a1 = *p1, a2 = *p2; |
485 | 24c9ae4e | Richard Henderson | int sum = 0; |
486 | 24c9ae4e | Richard Henderson | sum += temps[a1].state == TCG_TEMP_CONST; |
487 | 24c9ae4e | Richard Henderson | sum -= temps[a2].state == TCG_TEMP_CONST; |
488 | 24c9ae4e | Richard Henderson | |
489 | 24c9ae4e | Richard Henderson | /* Prefer the constant in second argument, and then the form
|
490 | 24c9ae4e | Richard Henderson | op a, a, b, which is better handled on non-RISC hosts. */
|
491 | 24c9ae4e | Richard Henderson | if (sum > 0 || (sum == 0 && dest == a2)) { |
492 | 24c9ae4e | Richard Henderson | *p1 = a2; |
493 | 24c9ae4e | Richard Henderson | *p2 = a1; |
494 | 24c9ae4e | Richard Henderson | return true; |
495 | 24c9ae4e | Richard Henderson | } |
496 | 24c9ae4e | Richard Henderson | return false; |
497 | 24c9ae4e | Richard Henderson | } |
498 | 24c9ae4e | Richard Henderson | |
499 | 0bfcb865 | Richard Henderson | static bool swap_commutative2(TCGArg *p1, TCGArg *p2) |
500 | 0bfcb865 | Richard Henderson | { |
501 | 0bfcb865 | Richard Henderson | int sum = 0; |
502 | 0bfcb865 | Richard Henderson | sum += temps[p1[0]].state == TCG_TEMP_CONST;
|
503 | 0bfcb865 | Richard Henderson | sum += temps[p1[1]].state == TCG_TEMP_CONST;
|
504 | 0bfcb865 | Richard Henderson | sum -= temps[p2[0]].state == TCG_TEMP_CONST;
|
505 | 0bfcb865 | Richard Henderson | sum -= temps[p2[1]].state == TCG_TEMP_CONST;
|
506 | 0bfcb865 | Richard Henderson | if (sum > 0) { |
507 | 0bfcb865 | Richard Henderson | TCGArg t; |
508 | 0bfcb865 | Richard Henderson | t = p1[0], p1[0] = p2[0], p2[0] = t; |
509 | 0bfcb865 | Richard Henderson | t = p1[1], p1[1] = p2[1], p2[1] = t; |
510 | 0bfcb865 | Richard Henderson | return true; |
511 | 0bfcb865 | Richard Henderson | } |
512 | 0bfcb865 | Richard Henderson | return false; |
513 | 0bfcb865 | Richard Henderson | } |
514 | 0bfcb865 | Richard Henderson | |
515 | 22613af4 | Kirill Batuzov | /* Propagate constants and copies, fold constant expressions. */
|
516 | 8f2e8c07 | Kirill Batuzov | static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
|
517 | 8f2e8c07 | Kirill Batuzov | TCGArg *args, TCGOpDef *tcg_op_defs) |
518 | 8f2e8c07 | Kirill Batuzov | { |
519 | fe0de7aa | Blue Swirl | int i, nb_ops, op_index, nb_temps, nb_globals, nb_call_args;
|
520 | 633f6502 | Paolo Bonzini | tcg_target_ulong mask, affected; |
521 | fe0de7aa | Blue Swirl | TCGOpcode op; |
522 | 8f2e8c07 | Kirill Batuzov | const TCGOpDef *def;
|
523 | 8f2e8c07 | Kirill Batuzov | TCGArg *gen_args; |
524 | 53108fb5 | Kirill Batuzov | TCGArg tmp; |
525 | 5d8f5363 | Richard Henderson | |
526 | 22613af4 | Kirill Batuzov | /* Array VALS has an element for each temp.
|
527 | 22613af4 | Kirill Batuzov | If this temp holds a constant then its value is kept in VALS' element.
|
528 | e590d4e6 | Aurelien Jarno | If this temp is a copy of other ones then the other copies are
|
529 | e590d4e6 | Aurelien Jarno | available through the doubly linked circular list. */
|
530 | 8f2e8c07 | Kirill Batuzov | |
531 | 8f2e8c07 | Kirill Batuzov | nb_temps = s->nb_temps; |
532 | 8f2e8c07 | Kirill Batuzov | nb_globals = s->nb_globals; |
533 | d193a14a | Paolo Bonzini | reset_all_temps(nb_temps); |
534 | 8f2e8c07 | Kirill Batuzov | |
535 | 92414b31 | Evgeny Voevodin | nb_ops = tcg_opc_ptr - s->gen_opc_buf; |
536 | 8f2e8c07 | Kirill Batuzov | gen_args = args; |
537 | 8f2e8c07 | Kirill Batuzov | for (op_index = 0; op_index < nb_ops; op_index++) { |
538 | 92414b31 | Evgeny Voevodin | op = s->gen_opc_buf[op_index]; |
539 | 8f2e8c07 | Kirill Batuzov | def = &tcg_op_defs[op]; |
540 | 22613af4 | Kirill Batuzov | /* Do copy propagation */
|
541 | 1ff8c541 | Aurelien Jarno | if (op == INDEX_op_call) {
|
542 | 1ff8c541 | Aurelien Jarno | int nb_oargs = args[0] >> 16; |
543 | 1ff8c541 | Aurelien Jarno | int nb_iargs = args[0] & 0xffff; |
544 | 1ff8c541 | Aurelien Jarno | for (i = nb_oargs + 1; i < nb_oargs + nb_iargs + 1; i++) { |
545 | 1ff8c541 | Aurelien Jarno | if (temps[args[i]].state == TCG_TEMP_COPY) {
|
546 | 1ff8c541 | Aurelien Jarno | args[i] = find_better_copy(s, args[i]); |
547 | 1ff8c541 | Aurelien Jarno | } |
548 | 1ff8c541 | Aurelien Jarno | } |
549 | 1ff8c541 | Aurelien Jarno | } else {
|
550 | 22613af4 | Kirill Batuzov | for (i = def->nb_oargs; i < def->nb_oargs + def->nb_iargs; i++) {
|
551 | 22613af4 | Kirill Batuzov | if (temps[args[i]].state == TCG_TEMP_COPY) {
|
552 | e590d4e6 | Aurelien Jarno | args[i] = find_better_copy(s, args[i]); |
553 | 22613af4 | Kirill Batuzov | } |
554 | 22613af4 | Kirill Batuzov | } |
555 | 22613af4 | Kirill Batuzov | } |
556 | 22613af4 | Kirill Batuzov | |
557 | 53108fb5 | Kirill Batuzov | /* For commutative operations make constant second argument */
|
558 | 53108fb5 | Kirill Batuzov | switch (op) {
|
559 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(add): |
560 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(mul): |
561 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(and): |
562 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(or): |
563 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(xor): |
564 | cb25c80a | Richard Henderson | CASE_OP_32_64(eqv): |
565 | cb25c80a | Richard Henderson | CASE_OP_32_64(nand): |
566 | cb25c80a | Richard Henderson | CASE_OP_32_64(nor): |
567 | 03271524 | Richard Henderson | CASE_OP_32_64(muluh): |
568 | 03271524 | Richard Henderson | CASE_OP_32_64(mulsh): |
569 | 24c9ae4e | Richard Henderson | swap_commutative(args[0], &args[1], &args[2]); |
570 | 53108fb5 | Kirill Batuzov | break;
|
571 | 65a7cce1 | Aurelien Jarno | CASE_OP_32_64(brcond): |
572 | 24c9ae4e | Richard Henderson | if (swap_commutative(-1, &args[0], &args[1])) { |
573 | 65a7cce1 | Aurelien Jarno | args[2] = tcg_swap_cond(args[2]); |
574 | 65a7cce1 | Aurelien Jarno | } |
575 | 65a7cce1 | Aurelien Jarno | break;
|
576 | 65a7cce1 | Aurelien Jarno | CASE_OP_32_64(setcond): |
577 | 24c9ae4e | Richard Henderson | if (swap_commutative(args[0], &args[1], &args[2])) { |
578 | 65a7cce1 | Aurelien Jarno | args[3] = tcg_swap_cond(args[3]); |
579 | 65a7cce1 | Aurelien Jarno | } |
580 | 65a7cce1 | Aurelien Jarno | break;
|
581 | fa01a208 | Richard Henderson | CASE_OP_32_64(movcond): |
582 | 24c9ae4e | Richard Henderson | if (swap_commutative(-1, &args[1], &args[2])) { |
583 | 24c9ae4e | Richard Henderson | args[5] = tcg_swap_cond(args[5]); |
584 | 5d8f5363 | Richard Henderson | } |
585 | 5d8f5363 | Richard Henderson | /* For movcond, we canonicalize the "false" input reg to match
|
586 | 5d8f5363 | Richard Henderson | the destination reg so that the tcg backend can implement
|
587 | 5d8f5363 | Richard Henderson | a "move if true" operation. */
|
588 | 24c9ae4e | Richard Henderson | if (swap_commutative(args[0], &args[4], &args[3])) { |
589 | 24c9ae4e | Richard Henderson | args[5] = tcg_invert_cond(args[5]); |
590 | fa01a208 | Richard Henderson | } |
591 | 1e484e61 | Richard Henderson | break;
|
592 | d7156f7c | Richard Henderson | CASE_OP_32_64(add2): |
593 | 1e484e61 | Richard Henderson | swap_commutative(args[0], &args[2], &args[4]); |
594 | 1e484e61 | Richard Henderson | swap_commutative(args[1], &args[3], &args[5]); |
595 | 1e484e61 | Richard Henderson | break;
|
596 | d7156f7c | Richard Henderson | CASE_OP_32_64(mulu2): |
597 | 4d3203fd | Richard Henderson | CASE_OP_32_64(muls2): |
598 | 1414968a | Richard Henderson | swap_commutative(args[0], &args[2], &args[3]); |
599 | 1414968a | Richard Henderson | break;
|
600 | 0bfcb865 | Richard Henderson | case INDEX_op_brcond2_i32:
|
601 | 0bfcb865 | Richard Henderson | if (swap_commutative2(&args[0], &args[2])) { |
602 | 0bfcb865 | Richard Henderson | args[4] = tcg_swap_cond(args[4]); |
603 | 0bfcb865 | Richard Henderson | } |
604 | 0bfcb865 | Richard Henderson | break;
|
605 | 0bfcb865 | Richard Henderson | case INDEX_op_setcond2_i32:
|
606 | 0bfcb865 | Richard Henderson | if (swap_commutative2(&args[1], &args[3])) { |
607 | 0bfcb865 | Richard Henderson | args[5] = tcg_swap_cond(args[5]); |
608 | 0bfcb865 | Richard Henderson | } |
609 | 0bfcb865 | Richard Henderson | break;
|
610 | 53108fb5 | Kirill Batuzov | default:
|
611 | 53108fb5 | Kirill Batuzov | break;
|
612 | 53108fb5 | Kirill Batuzov | } |
613 | 53108fb5 | Kirill Batuzov | |
614 | 2d497542 | Richard Henderson | /* Simplify expressions for "shift/rot r, 0, a => movi r, 0",
|
615 | 2d497542 | Richard Henderson | and "sub r, 0, a => neg r, a" case. */
|
616 | 01ee5282 | Aurelien Jarno | switch (op) {
|
617 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(shl): |
618 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(shr): |
619 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(sar): |
620 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(rotl): |
621 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(rotr): |
622 | 01ee5282 | Aurelien Jarno | if (temps[args[1]].state == TCG_TEMP_CONST |
623 | 01ee5282 | Aurelien Jarno | && temps[args[1]].val == 0) { |
624 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_movi(op); |
625 | e590d4e6 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], 0); |
626 | 01ee5282 | Aurelien Jarno | args += 3;
|
627 | 01ee5282 | Aurelien Jarno | gen_args += 2;
|
628 | 01ee5282 | Aurelien Jarno | continue;
|
629 | 01ee5282 | Aurelien Jarno | } |
630 | 01ee5282 | Aurelien Jarno | break;
|
631 | 2d497542 | Richard Henderson | CASE_OP_32_64(sub): |
632 | 2d497542 | Richard Henderson | { |
633 | 2d497542 | Richard Henderson | TCGOpcode neg_op; |
634 | 2d497542 | Richard Henderson | bool have_neg;
|
635 | 2d497542 | Richard Henderson | |
636 | 2d497542 | Richard Henderson | if (temps[args[2]].state == TCG_TEMP_CONST) { |
637 | 2d497542 | Richard Henderson | /* Proceed with possible constant folding. */
|
638 | 2d497542 | Richard Henderson | break;
|
639 | 2d497542 | Richard Henderson | } |
640 | 2d497542 | Richard Henderson | if (op == INDEX_op_sub_i32) {
|
641 | 2d497542 | Richard Henderson | neg_op = INDEX_op_neg_i32; |
642 | 2d497542 | Richard Henderson | have_neg = TCG_TARGET_HAS_neg_i32; |
643 | 2d497542 | Richard Henderson | } else {
|
644 | 2d497542 | Richard Henderson | neg_op = INDEX_op_neg_i64; |
645 | 2d497542 | Richard Henderson | have_neg = TCG_TARGET_HAS_neg_i64; |
646 | 2d497542 | Richard Henderson | } |
647 | 2d497542 | Richard Henderson | if (!have_neg) {
|
648 | 2d497542 | Richard Henderson | break;
|
649 | 2d497542 | Richard Henderson | } |
650 | 2d497542 | Richard Henderson | if (temps[args[1]].state == TCG_TEMP_CONST |
651 | 2d497542 | Richard Henderson | && temps[args[1]].val == 0) { |
652 | 2d497542 | Richard Henderson | s->gen_opc_buf[op_index] = neg_op; |
653 | 2d497542 | Richard Henderson | reset_temp(args[0]);
|
654 | 2d497542 | Richard Henderson | gen_args[0] = args[0]; |
655 | 2d497542 | Richard Henderson | gen_args[1] = args[2]; |
656 | 2d497542 | Richard Henderson | args += 3;
|
657 | 2d497542 | Richard Henderson | gen_args += 2;
|
658 | 2d497542 | Richard Henderson | continue;
|
659 | 2d497542 | Richard Henderson | } |
660 | 2d497542 | Richard Henderson | } |
661 | 2d497542 | Richard Henderson | break;
|
662 | 01ee5282 | Aurelien Jarno | default:
|
663 | 01ee5282 | Aurelien Jarno | break;
|
664 | 01ee5282 | Aurelien Jarno | } |
665 | 01ee5282 | Aurelien Jarno | |
666 | 56e49438 | Aurelien Jarno | /* Simplify expression for "op r, a, 0 => mov r, a" cases */
|
667 | 53108fb5 | Kirill Batuzov | switch (op) {
|
668 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(add): |
669 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(sub): |
670 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(shl): |
671 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(shr): |
672 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(sar): |
673 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(rotl): |
674 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(rotr): |
675 | 38ee188b | Aurelien Jarno | CASE_OP_32_64(or): |
676 | 38ee188b | Aurelien Jarno | CASE_OP_32_64(xor): |
677 | 53108fb5 | Kirill Batuzov | if (temps[args[1]].state == TCG_TEMP_CONST) { |
678 | 53108fb5 | Kirill Batuzov | /* Proceed with possible constant folding. */
|
679 | 53108fb5 | Kirill Batuzov | break;
|
680 | 53108fb5 | Kirill Batuzov | } |
681 | 53108fb5 | Kirill Batuzov | if (temps[args[2]].state == TCG_TEMP_CONST |
682 | 53108fb5 | Kirill Batuzov | && temps[args[2]].val == 0) { |
683 | e590d4e6 | Aurelien Jarno | if (temps_are_copies(args[0], args[1])) { |
684 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_nop; |
685 | 53108fb5 | Kirill Batuzov | } else {
|
686 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_mov(op); |
687 | b80bb016 | Aurelien Jarno | tcg_opt_gen_mov(s, gen_args, args[0], args[1]); |
688 | 53108fb5 | Kirill Batuzov | gen_args += 2;
|
689 | 53108fb5 | Kirill Batuzov | } |
690 | fedc0da2 | Aurelien Jarno | args += 3;
|
691 | 53108fb5 | Kirill Batuzov | continue;
|
692 | 53108fb5 | Kirill Batuzov | } |
693 | 53108fb5 | Kirill Batuzov | break;
|
694 | 56e49438 | Aurelien Jarno | default:
|
695 | 56e49438 | Aurelien Jarno | break;
|
696 | 56e49438 | Aurelien Jarno | } |
697 | 56e49438 | Aurelien Jarno | |
698 | 3a9d8b17 | Paolo Bonzini | /* Simplify using known-zero bits */
|
699 | 3a9d8b17 | Paolo Bonzini | mask = -1;
|
700 | 633f6502 | Paolo Bonzini | affected = -1;
|
701 | 3a9d8b17 | Paolo Bonzini | switch (op) {
|
702 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(ext8s): |
703 | 3a9d8b17 | Paolo Bonzini | if ((temps[args[1]].mask & 0x80) != 0) { |
704 | 3a9d8b17 | Paolo Bonzini | break;
|
705 | 3a9d8b17 | Paolo Bonzini | } |
706 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(ext8u): |
707 | 3a9d8b17 | Paolo Bonzini | mask = 0xff;
|
708 | 3a9d8b17 | Paolo Bonzini | goto and_const;
|
709 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(ext16s): |
710 | 3a9d8b17 | Paolo Bonzini | if ((temps[args[1]].mask & 0x8000) != 0) { |
711 | 3a9d8b17 | Paolo Bonzini | break;
|
712 | 3a9d8b17 | Paolo Bonzini | } |
713 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(ext16u): |
714 | 3a9d8b17 | Paolo Bonzini | mask = 0xffff;
|
715 | 3a9d8b17 | Paolo Bonzini | goto and_const;
|
716 | 3a9d8b17 | Paolo Bonzini | case INDEX_op_ext32s_i64:
|
717 | 3a9d8b17 | Paolo Bonzini | if ((temps[args[1]].mask & 0x80000000) != 0) { |
718 | 3a9d8b17 | Paolo Bonzini | break;
|
719 | 3a9d8b17 | Paolo Bonzini | } |
720 | 3a9d8b17 | Paolo Bonzini | case INDEX_op_ext32u_i64:
|
721 | 3a9d8b17 | Paolo Bonzini | mask = 0xffffffffU;
|
722 | 3a9d8b17 | Paolo Bonzini | goto and_const;
|
723 | 3a9d8b17 | Paolo Bonzini | |
724 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(and): |
725 | 3a9d8b17 | Paolo Bonzini | mask = temps[args[2]].mask;
|
726 | 3a9d8b17 | Paolo Bonzini | if (temps[args[2]].state == TCG_TEMP_CONST) { |
727 | 3a9d8b17 | Paolo Bonzini | and_const:
|
728 | 633f6502 | Paolo Bonzini | affected = temps[args[1]].mask & ~mask;
|
729 | 3a9d8b17 | Paolo Bonzini | } |
730 | 3a9d8b17 | Paolo Bonzini | mask = temps[args[1]].mask & mask;
|
731 | 3a9d8b17 | Paolo Bonzini | break;
|
732 | 3a9d8b17 | Paolo Bonzini | |
733 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(sar): |
734 | 3a9d8b17 | Paolo Bonzini | if (temps[args[2]].state == TCG_TEMP_CONST) { |
735 | 3a9d8b17 | Paolo Bonzini | mask = ((tcg_target_long)temps[args[1]].mask
|
736 | 3a9d8b17 | Paolo Bonzini | >> temps[args[2]].val);
|
737 | 3a9d8b17 | Paolo Bonzini | } |
738 | 3a9d8b17 | Paolo Bonzini | break;
|
739 | 3a9d8b17 | Paolo Bonzini | |
740 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(shr): |
741 | 3a9d8b17 | Paolo Bonzini | if (temps[args[2]].state == TCG_TEMP_CONST) { |
742 | 3a9d8b17 | Paolo Bonzini | mask = temps[args[1]].mask >> temps[args[2]].val; |
743 | 3a9d8b17 | Paolo Bonzini | } |
744 | 3a9d8b17 | Paolo Bonzini | break;
|
745 | 3a9d8b17 | Paolo Bonzini | |
746 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(shl): |
747 | 3a9d8b17 | Paolo Bonzini | if (temps[args[2]].state == TCG_TEMP_CONST) { |
748 | 3a9d8b17 | Paolo Bonzini | mask = temps[args[1]].mask << temps[args[2]].val; |
749 | 3a9d8b17 | Paolo Bonzini | } |
750 | 3a9d8b17 | Paolo Bonzini | break;
|
751 | 3a9d8b17 | Paolo Bonzini | |
752 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(neg): |
753 | 3a9d8b17 | Paolo Bonzini | /* Set to 1 all bits to the left of the rightmost. */
|
754 | 3a9d8b17 | Paolo Bonzini | mask = -(temps[args[1]].mask & -temps[args[1]].mask); |
755 | 3a9d8b17 | Paolo Bonzini | break;
|
756 | 3a9d8b17 | Paolo Bonzini | |
757 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(deposit): |
758 | 3a9d8b17 | Paolo Bonzini | tmp = ((1ull << args[4]) - 1); |
759 | 3a9d8b17 | Paolo Bonzini | mask = ((temps[args[1]].mask & ~(tmp << args[3])) |
760 | 3a9d8b17 | Paolo Bonzini | | ((temps[args[2]].mask & tmp) << args[3])); |
761 | 3a9d8b17 | Paolo Bonzini | break;
|
762 | 3a9d8b17 | Paolo Bonzini | |
763 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(or): |
764 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(xor): |
765 | 3a9d8b17 | Paolo Bonzini | mask = temps[args[1]].mask | temps[args[2]].mask; |
766 | 3a9d8b17 | Paolo Bonzini | break;
|
767 | 3a9d8b17 | Paolo Bonzini | |
768 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(setcond): |
769 | 3a9d8b17 | Paolo Bonzini | mask = 1;
|
770 | 3a9d8b17 | Paolo Bonzini | break;
|
771 | 3a9d8b17 | Paolo Bonzini | |
772 | 3a9d8b17 | Paolo Bonzini | CASE_OP_32_64(movcond): |
773 | 3a9d8b17 | Paolo Bonzini | mask = temps[args[3]].mask | temps[args[4]].mask; |
774 | 3a9d8b17 | Paolo Bonzini | break;
|
775 | 3a9d8b17 | Paolo Bonzini | |
776 | 3a9d8b17 | Paolo Bonzini | default:
|
777 | 3a9d8b17 | Paolo Bonzini | break;
|
778 | 3a9d8b17 | Paolo Bonzini | } |
779 | 3a9d8b17 | Paolo Bonzini | |
780 | 633f6502 | Paolo Bonzini | if (mask == 0) { |
781 | 633f6502 | Paolo Bonzini | assert(def->nb_oargs == 1);
|
782 | 633f6502 | Paolo Bonzini | s->gen_opc_buf[op_index] = op_to_movi(op); |
783 | 633f6502 | Paolo Bonzini | tcg_opt_gen_movi(gen_args, args[0], 0); |
784 | 633f6502 | Paolo Bonzini | args += def->nb_oargs + def->nb_iargs + def->nb_cargs; |
785 | 633f6502 | Paolo Bonzini | gen_args += 2;
|
786 | 633f6502 | Paolo Bonzini | continue;
|
787 | 633f6502 | Paolo Bonzini | } |
788 | 633f6502 | Paolo Bonzini | if (affected == 0) { |
789 | 633f6502 | Paolo Bonzini | assert(def->nb_oargs == 1);
|
790 | 633f6502 | Paolo Bonzini | if (temps_are_copies(args[0], args[1])) { |
791 | 633f6502 | Paolo Bonzini | s->gen_opc_buf[op_index] = INDEX_op_nop; |
792 | 633f6502 | Paolo Bonzini | } else if (temps[args[1]].state != TCG_TEMP_CONST) { |
793 | 633f6502 | Paolo Bonzini | s->gen_opc_buf[op_index] = op_to_mov(op); |
794 | 633f6502 | Paolo Bonzini | tcg_opt_gen_mov(s, gen_args, args[0], args[1]); |
795 | 633f6502 | Paolo Bonzini | gen_args += 2;
|
796 | 633f6502 | Paolo Bonzini | } else {
|
797 | 633f6502 | Paolo Bonzini | s->gen_opc_buf[op_index] = op_to_movi(op); |
798 | 633f6502 | Paolo Bonzini | tcg_opt_gen_movi(gen_args, args[0], temps[args[1]].val); |
799 | 633f6502 | Paolo Bonzini | gen_args += 2;
|
800 | 633f6502 | Paolo Bonzini | } |
801 | 633f6502 | Paolo Bonzini | args += def->nb_iargs + 1;
|
802 | 633f6502 | Paolo Bonzini | continue;
|
803 | 633f6502 | Paolo Bonzini | } |
804 | 633f6502 | Paolo Bonzini | |
805 | 56e49438 | Aurelien Jarno | /* Simplify expression for "op r, a, 0 => movi r, 0" cases */
|
806 | 56e49438 | Aurelien Jarno | switch (op) {
|
807 | 61251c0c | Aurelien Jarno | CASE_OP_32_64(and): |
808 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(mul): |
809 | 03271524 | Richard Henderson | CASE_OP_32_64(muluh): |
810 | 03271524 | Richard Henderson | CASE_OP_32_64(mulsh): |
811 | 53108fb5 | Kirill Batuzov | if ((temps[args[2]].state == TCG_TEMP_CONST |
812 | 53108fb5 | Kirill Batuzov | && temps[args[2]].val == 0)) { |
813 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_movi(op); |
814 | e590d4e6 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], 0); |
815 | 53108fb5 | Kirill Batuzov | args += 3;
|
816 | 53108fb5 | Kirill Batuzov | gen_args += 2;
|
817 | 53108fb5 | Kirill Batuzov | continue;
|
818 | 53108fb5 | Kirill Batuzov | } |
819 | 53108fb5 | Kirill Batuzov | break;
|
820 | 56e49438 | Aurelien Jarno | default:
|
821 | 56e49438 | Aurelien Jarno | break;
|
822 | 56e49438 | Aurelien Jarno | } |
823 | 56e49438 | Aurelien Jarno | |
824 | 56e49438 | Aurelien Jarno | /* Simplify expression for "op r, a, a => mov r, a" cases */
|
825 | 56e49438 | Aurelien Jarno | switch (op) {
|
826 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(or): |
827 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(and): |
828 | 0aba1c73 | Aurelien Jarno | if (temps_are_copies(args[1], args[2])) { |
829 | e590d4e6 | Aurelien Jarno | if (temps_are_copies(args[0], args[1])) { |
830 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_nop; |
831 | 9a81090b | Kirill Batuzov | } else {
|
832 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_mov(op); |
833 | b80bb016 | Aurelien Jarno | tcg_opt_gen_mov(s, gen_args, args[0], args[1]); |
834 | 9a81090b | Kirill Batuzov | gen_args += 2;
|
835 | 9a81090b | Kirill Batuzov | } |
836 | fedc0da2 | Aurelien Jarno | args += 3;
|
837 | 9a81090b | Kirill Batuzov | continue;
|
838 | 9a81090b | Kirill Batuzov | } |
839 | 9a81090b | Kirill Batuzov | break;
|
840 | fe0de7aa | Blue Swirl | default:
|
841 | fe0de7aa | Blue Swirl | break;
|
842 | 53108fb5 | Kirill Batuzov | } |
843 | 53108fb5 | Kirill Batuzov | |
844 | 3c94193e | Aurelien Jarno | /* Simplify expression for "op r, a, a => movi r, 0" cases */
|
845 | 3c94193e | Aurelien Jarno | switch (op) {
|
846 | 3c94193e | Aurelien Jarno | CASE_OP_32_64(sub): |
847 | 3c94193e | Aurelien Jarno | CASE_OP_32_64(xor): |
848 | 3c94193e | Aurelien Jarno | if (temps_are_copies(args[1], args[2])) { |
849 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_movi(op); |
850 | 3c94193e | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], 0); |
851 | 3c94193e | Aurelien Jarno | gen_args += 2;
|
852 | 3c94193e | Aurelien Jarno | args += 3;
|
853 | 3c94193e | Aurelien Jarno | continue;
|
854 | 3c94193e | Aurelien Jarno | } |
855 | 3c94193e | Aurelien Jarno | break;
|
856 | 3c94193e | Aurelien Jarno | default:
|
857 | 3c94193e | Aurelien Jarno | break;
|
858 | 3c94193e | Aurelien Jarno | } |
859 | 3c94193e | Aurelien Jarno | |
860 | 22613af4 | Kirill Batuzov | /* Propagate constants through copy operations and do constant
|
861 | 22613af4 | Kirill Batuzov | folding. Constants will be substituted to arguments by register
|
862 | 22613af4 | Kirill Batuzov | allocator where needed and possible. Also detect copies. */
|
863 | 8f2e8c07 | Kirill Batuzov | switch (op) {
|
864 | 22613af4 | Kirill Batuzov | CASE_OP_32_64(mov): |
865 | e590d4e6 | Aurelien Jarno | if (temps_are_copies(args[0], args[1])) { |
866 | 22613af4 | Kirill Batuzov | args += 2;
|
867 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_nop; |
868 | 22613af4 | Kirill Batuzov | break;
|
869 | 22613af4 | Kirill Batuzov | } |
870 | 22613af4 | Kirill Batuzov | if (temps[args[1]].state != TCG_TEMP_CONST) { |
871 | b80bb016 | Aurelien Jarno | tcg_opt_gen_mov(s, gen_args, args[0], args[1]); |
872 | 22613af4 | Kirill Batuzov | gen_args += 2;
|
873 | 22613af4 | Kirill Batuzov | args += 2;
|
874 | 22613af4 | Kirill Batuzov | break;
|
875 | 22613af4 | Kirill Batuzov | } |
876 | 22613af4 | Kirill Batuzov | /* Source argument is constant. Rewrite the operation and
|
877 | 22613af4 | Kirill Batuzov | let movi case handle it. */
|
878 | 22613af4 | Kirill Batuzov | op = op_to_movi(op); |
879 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op; |
880 | 22613af4 | Kirill Batuzov | args[1] = temps[args[1]].val; |
881 | 22613af4 | Kirill Batuzov | /* fallthrough */
|
882 | 22613af4 | Kirill Batuzov | CASE_OP_32_64(movi): |
883 | e590d4e6 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], args[1]); |
884 | 22613af4 | Kirill Batuzov | gen_args += 2;
|
885 | 22613af4 | Kirill Batuzov | args += 2;
|
886 | 22613af4 | Kirill Batuzov | break;
|
887 | 6e14e91b | Richard Henderson | |
888 | a640f031 | Kirill Batuzov | CASE_OP_32_64(not): |
889 | cb25c80a | Richard Henderson | CASE_OP_32_64(neg): |
890 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext8s): |
891 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext8u): |
892 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext16s): |
893 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext16u): |
894 | a640f031 | Kirill Batuzov | case INDEX_op_ext32s_i64:
|
895 | a640f031 | Kirill Batuzov | case INDEX_op_ext32u_i64:
|
896 | a640f031 | Kirill Batuzov | if (temps[args[1]].state == TCG_TEMP_CONST) { |
897 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_movi(op); |
898 | a640f031 | Kirill Batuzov | tmp = do_constant_folding(op, temps[args[1]].val, 0); |
899 | e590d4e6 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], tmp);
|
900 | 6e14e91b | Richard Henderson | gen_args += 2;
|
901 | 6e14e91b | Richard Henderson | args += 2;
|
902 | 6e14e91b | Richard Henderson | break;
|
903 | a640f031 | Kirill Batuzov | } |
904 | 6e14e91b | Richard Henderson | goto do_default;
|
905 | 6e14e91b | Richard Henderson | |
906 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(add): |
907 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(sub): |
908 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(mul): |
909 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(or): |
910 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(and): |
911 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(xor): |
912 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(shl): |
913 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(shr): |
914 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(sar): |
915 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(rotl): |
916 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(rotr): |
917 | cb25c80a | Richard Henderson | CASE_OP_32_64(andc): |
918 | cb25c80a | Richard Henderson | CASE_OP_32_64(orc): |
919 | cb25c80a | Richard Henderson | CASE_OP_32_64(eqv): |
920 | cb25c80a | Richard Henderson | CASE_OP_32_64(nand): |
921 | cb25c80a | Richard Henderson | CASE_OP_32_64(nor): |
922 | 03271524 | Richard Henderson | CASE_OP_32_64(muluh): |
923 | 03271524 | Richard Henderson | CASE_OP_32_64(mulsh): |
924 | 01547f7f | Richard Henderson | CASE_OP_32_64(div): |
925 | 01547f7f | Richard Henderson | CASE_OP_32_64(divu): |
926 | 01547f7f | Richard Henderson | CASE_OP_32_64(rem): |
927 | 01547f7f | Richard Henderson | CASE_OP_32_64(remu): |
928 | 53108fb5 | Kirill Batuzov | if (temps[args[1]].state == TCG_TEMP_CONST |
929 | 53108fb5 | Kirill Batuzov | && temps[args[2]].state == TCG_TEMP_CONST) {
|
930 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_movi(op); |
931 | 53108fb5 | Kirill Batuzov | tmp = do_constant_folding(op, temps[args[1]].val,
|
932 | 53108fb5 | Kirill Batuzov | temps[args[2]].val);
|
933 | e590d4e6 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], tmp);
|
934 | 53108fb5 | Kirill Batuzov | gen_args += 2;
|
935 | 6e14e91b | Richard Henderson | args += 3;
|
936 | 6e14e91b | Richard Henderson | break;
|
937 | 53108fb5 | Kirill Batuzov | } |
938 | 6e14e91b | Richard Henderson | goto do_default;
|
939 | 6e14e91b | Richard Henderson | |
940 | 7ef55fc9 | Aurelien Jarno | CASE_OP_32_64(deposit): |
941 | 7ef55fc9 | Aurelien Jarno | if (temps[args[1]].state == TCG_TEMP_CONST |
942 | 7ef55fc9 | Aurelien Jarno | && temps[args[2]].state == TCG_TEMP_CONST) {
|
943 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_movi(op); |
944 | 7ef55fc9 | Aurelien Jarno | tmp = ((1ull << args[4]) - 1); |
945 | 7ef55fc9 | Aurelien Jarno | tmp = (temps[args[1]].val & ~(tmp << args[3])) |
946 | 7ef55fc9 | Aurelien Jarno | | ((temps[args[2]].val & tmp) << args[3]); |
947 | 7ef55fc9 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], tmp);
|
948 | 7ef55fc9 | Aurelien Jarno | gen_args += 2;
|
949 | 6e14e91b | Richard Henderson | args += 5;
|
950 | 6e14e91b | Richard Henderson | break;
|
951 | 7ef55fc9 | Aurelien Jarno | } |
952 | 6e14e91b | Richard Henderson | goto do_default;
|
953 | 6e14e91b | Richard Henderson | |
954 | f8dd19e5 | Aurelien Jarno | CASE_OP_32_64(setcond): |
955 | b336ceb6 | Aurelien Jarno | tmp = do_constant_folding_cond(op, args[1], args[2], args[3]); |
956 | b336ceb6 | Aurelien Jarno | if (tmp != 2) { |
957 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_movi(op); |
958 | e590d4e6 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], tmp);
|
959 | f8dd19e5 | Aurelien Jarno | gen_args += 2;
|
960 | 6e14e91b | Richard Henderson | args += 4;
|
961 | 6e14e91b | Richard Henderson | break;
|
962 | f8dd19e5 | Aurelien Jarno | } |
963 | 6e14e91b | Richard Henderson | goto do_default;
|
964 | 6e14e91b | Richard Henderson | |
965 | fbeaa26c | Aurelien Jarno | CASE_OP_32_64(brcond): |
966 | b336ceb6 | Aurelien Jarno | tmp = do_constant_folding_cond(op, args[0], args[1], args[2]); |
967 | b336ceb6 | Aurelien Jarno | if (tmp != 2) { |
968 | b336ceb6 | Aurelien Jarno | if (tmp) {
|
969 | d193a14a | Paolo Bonzini | reset_all_temps(nb_temps); |
970 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_br; |
971 | fbeaa26c | Aurelien Jarno | gen_args[0] = args[3]; |
972 | fbeaa26c | Aurelien Jarno | gen_args += 1;
|
973 | fbeaa26c | Aurelien Jarno | } else {
|
974 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_nop; |
975 | fbeaa26c | Aurelien Jarno | } |
976 | 6e14e91b | Richard Henderson | args += 4;
|
977 | 6e14e91b | Richard Henderson | break;
|
978 | fbeaa26c | Aurelien Jarno | } |
979 | 6e14e91b | Richard Henderson | goto do_default;
|
980 | 6e14e91b | Richard Henderson | |
981 | fa01a208 | Richard Henderson | CASE_OP_32_64(movcond): |
982 | b336ceb6 | Aurelien Jarno | tmp = do_constant_folding_cond(op, args[1], args[2], args[5]); |
983 | b336ceb6 | Aurelien Jarno | if (tmp != 2) { |
984 | e590d4e6 | Aurelien Jarno | if (temps_are_copies(args[0], args[4-tmp])) { |
985 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_nop; |
986 | fa01a208 | Richard Henderson | } else if (temps[args[4-tmp]].state == TCG_TEMP_CONST) { |
987 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_movi(op); |
988 | e590d4e6 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], temps[args[4-tmp]].val); |
989 | fa01a208 | Richard Henderson | gen_args += 2;
|
990 | fa01a208 | Richard Henderson | } else {
|
991 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = op_to_mov(op); |
992 | e590d4e6 | Aurelien Jarno | tcg_opt_gen_mov(s, gen_args, args[0], args[4-tmp]); |
993 | fa01a208 | Richard Henderson | gen_args += 2;
|
994 | fa01a208 | Richard Henderson | } |
995 | 6e14e91b | Richard Henderson | args += 6;
|
996 | 6e14e91b | Richard Henderson | break;
|
997 | fa01a208 | Richard Henderson | } |
998 | 6e14e91b | Richard Henderson | goto do_default;
|
999 | 212c328d | Richard Henderson | |
1000 | 212c328d | Richard Henderson | case INDEX_op_add2_i32:
|
1001 | 212c328d | Richard Henderson | case INDEX_op_sub2_i32:
|
1002 | 212c328d | Richard Henderson | if (temps[args[2]].state == TCG_TEMP_CONST |
1003 | 212c328d | Richard Henderson | && temps[args[3]].state == TCG_TEMP_CONST
|
1004 | 212c328d | Richard Henderson | && temps[args[4]].state == TCG_TEMP_CONST
|
1005 | 212c328d | Richard Henderson | && temps[args[5]].state == TCG_TEMP_CONST) {
|
1006 | 212c328d | Richard Henderson | uint32_t al = temps[args[2]].val;
|
1007 | 212c328d | Richard Henderson | uint32_t ah = temps[args[3]].val;
|
1008 | 212c328d | Richard Henderson | uint32_t bl = temps[args[4]].val;
|
1009 | 212c328d | Richard Henderson | uint32_t bh = temps[args[5]].val;
|
1010 | 212c328d | Richard Henderson | uint64_t a = ((uint64_t)ah << 32) | al;
|
1011 | 212c328d | Richard Henderson | uint64_t b = ((uint64_t)bh << 32) | bl;
|
1012 | 212c328d | Richard Henderson | TCGArg rl, rh; |
1013 | 212c328d | Richard Henderson | |
1014 | 212c328d | Richard Henderson | if (op == INDEX_op_add2_i32) {
|
1015 | 212c328d | Richard Henderson | a += b; |
1016 | 212c328d | Richard Henderson | } else {
|
1017 | 212c328d | Richard Henderson | a -= b; |
1018 | 212c328d | Richard Henderson | } |
1019 | 212c328d | Richard Henderson | |
1020 | 212c328d | Richard Henderson | /* We emit the extra nop when we emit the add2/sub2. */
|
1021 | 92414b31 | Evgeny Voevodin | assert(s->gen_opc_buf[op_index + 1] == INDEX_op_nop);
|
1022 | 212c328d | Richard Henderson | |
1023 | 212c328d | Richard Henderson | rl = args[0];
|
1024 | 212c328d | Richard Henderson | rh = args[1];
|
1025 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_movi_i32; |
1026 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[++op_index] = INDEX_op_movi_i32; |
1027 | 212c328d | Richard Henderson | tcg_opt_gen_movi(&gen_args[0], rl, (uint32_t)a);
|
1028 | 212c328d | Richard Henderson | tcg_opt_gen_movi(&gen_args[2], rh, (uint32_t)(a >> 32)); |
1029 | 212c328d | Richard Henderson | gen_args += 4;
|
1030 | 212c328d | Richard Henderson | args += 6;
|
1031 | 212c328d | Richard Henderson | break;
|
1032 | 212c328d | Richard Henderson | } |
1033 | 212c328d | Richard Henderson | goto do_default;
|
1034 | 1414968a | Richard Henderson | |
1035 | 1414968a | Richard Henderson | case INDEX_op_mulu2_i32:
|
1036 | 1414968a | Richard Henderson | if (temps[args[2]].state == TCG_TEMP_CONST |
1037 | 1414968a | Richard Henderson | && temps[args[3]].state == TCG_TEMP_CONST) {
|
1038 | 1414968a | Richard Henderson | uint32_t a = temps[args[2]].val;
|
1039 | 1414968a | Richard Henderson | uint32_t b = temps[args[3]].val;
|
1040 | 1414968a | Richard Henderson | uint64_t r = (uint64_t)a * b; |
1041 | 1414968a | Richard Henderson | TCGArg rl, rh; |
1042 | 1414968a | Richard Henderson | |
1043 | 1414968a | Richard Henderson | /* We emit the extra nop when we emit the mulu2. */
|
1044 | 92414b31 | Evgeny Voevodin | assert(s->gen_opc_buf[op_index + 1] == INDEX_op_nop);
|
1045 | 1414968a | Richard Henderson | |
1046 | 1414968a | Richard Henderson | rl = args[0];
|
1047 | 1414968a | Richard Henderson | rh = args[1];
|
1048 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_movi_i32; |
1049 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[++op_index] = INDEX_op_movi_i32; |
1050 | 1414968a | Richard Henderson | tcg_opt_gen_movi(&gen_args[0], rl, (uint32_t)r);
|
1051 | 1414968a | Richard Henderson | tcg_opt_gen_movi(&gen_args[2], rh, (uint32_t)(r >> 32)); |
1052 | 1414968a | Richard Henderson | gen_args += 4;
|
1053 | 1414968a | Richard Henderson | args += 4;
|
1054 | 1414968a | Richard Henderson | break;
|
1055 | 1414968a | Richard Henderson | } |
1056 | 1414968a | Richard Henderson | goto do_default;
|
1057 | 6e14e91b | Richard Henderson | |
1058 | bc1473ef | Richard Henderson | case INDEX_op_brcond2_i32:
|
1059 | 6c4382f8 | Richard Henderson | tmp = do_constant_folding_cond2(&args[0], &args[2], args[4]); |
1060 | 6c4382f8 | Richard Henderson | if (tmp != 2) { |
1061 | 6c4382f8 | Richard Henderson | if (tmp) {
|
1062 | d193a14a | Paolo Bonzini | reset_all_temps(nb_temps); |
1063 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_br; |
1064 | 6c4382f8 | Richard Henderson | gen_args[0] = args[5]; |
1065 | 6c4382f8 | Richard Henderson | gen_args += 1;
|
1066 | 6c4382f8 | Richard Henderson | } else {
|
1067 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_nop; |
1068 | 6c4382f8 | Richard Henderson | } |
1069 | 6c4382f8 | Richard Henderson | } else if ((args[4] == TCG_COND_LT || args[4] == TCG_COND_GE) |
1070 | 6c4382f8 | Richard Henderson | && temps[args[2]].state == TCG_TEMP_CONST
|
1071 | 6c4382f8 | Richard Henderson | && temps[args[3]].state == TCG_TEMP_CONST
|
1072 | 6c4382f8 | Richard Henderson | && temps[args[2]].val == 0 |
1073 | 6c4382f8 | Richard Henderson | && temps[args[3]].val == 0) { |
1074 | 6c4382f8 | Richard Henderson | /* Simplify LT/GE comparisons vs zero to a single compare
|
1075 | 6c4382f8 | Richard Henderson | vs the high word of the input. */
|
1076 | d193a14a | Paolo Bonzini | reset_all_temps(nb_temps); |
1077 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_brcond_i32; |
1078 | bc1473ef | Richard Henderson | gen_args[0] = args[1]; |
1079 | bc1473ef | Richard Henderson | gen_args[1] = args[3]; |
1080 | bc1473ef | Richard Henderson | gen_args[2] = args[4]; |
1081 | bc1473ef | Richard Henderson | gen_args[3] = args[5]; |
1082 | bc1473ef | Richard Henderson | gen_args += 4;
|
1083 | 6c4382f8 | Richard Henderson | } else {
|
1084 | 6c4382f8 | Richard Henderson | goto do_default;
|
1085 | bc1473ef | Richard Henderson | } |
1086 | 6c4382f8 | Richard Henderson | args += 6;
|
1087 | 6c4382f8 | Richard Henderson | break;
|
1088 | bc1473ef | Richard Henderson | |
1089 | bc1473ef | Richard Henderson | case INDEX_op_setcond2_i32:
|
1090 | 6c4382f8 | Richard Henderson | tmp = do_constant_folding_cond2(&args[1], &args[3], args[5]); |
1091 | 6c4382f8 | Richard Henderson | if (tmp != 2) { |
1092 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_movi_i32; |
1093 | 6c4382f8 | Richard Henderson | tcg_opt_gen_movi(gen_args, args[0], tmp);
|
1094 | 6c4382f8 | Richard Henderson | gen_args += 2;
|
1095 | 6c4382f8 | Richard Henderson | } else if ((args[5] == TCG_COND_LT || args[5] == TCG_COND_GE) |
1096 | 6c4382f8 | Richard Henderson | && temps[args[3]].state == TCG_TEMP_CONST
|
1097 | 6c4382f8 | Richard Henderson | && temps[args[4]].state == TCG_TEMP_CONST
|
1098 | 6c4382f8 | Richard Henderson | && temps[args[3]].val == 0 |
1099 | 6c4382f8 | Richard Henderson | && temps[args[4]].val == 0) { |
1100 | 6c4382f8 | Richard Henderson | /* Simplify LT/GE comparisons vs zero to a single compare
|
1101 | 6c4382f8 | Richard Henderson | vs the high word of the input. */
|
1102 | 92414b31 | Evgeny Voevodin | s->gen_opc_buf[op_index] = INDEX_op_setcond_i32; |
1103 | 66e61b55 | Aurelien Jarno | reset_temp(args[0]);
|
1104 | bc1473ef | Richard Henderson | gen_args[0] = args[0]; |
1105 | bc1473ef | Richard Henderson | gen_args[1] = args[2]; |
1106 | bc1473ef | Richard Henderson | gen_args[2] = args[4]; |
1107 | bc1473ef | Richard Henderson | gen_args[3] = args[5]; |
1108 | bc1473ef | Richard Henderson | gen_args += 4;
|
1109 | 6c4382f8 | Richard Henderson | } else {
|
1110 | 6c4382f8 | Richard Henderson | goto do_default;
|
1111 | bc1473ef | Richard Henderson | } |
1112 | 6c4382f8 | Richard Henderson | args += 6;
|
1113 | 6c4382f8 | Richard Henderson | break;
|
1114 | bc1473ef | Richard Henderson | |
1115 | 8f2e8c07 | Kirill Batuzov | case INDEX_op_call:
|
1116 | 22613af4 | Kirill Batuzov | nb_call_args = (args[0] >> 16) + (args[0] & 0xffff); |
1117 | 78505279 | Aurelien Jarno | if (!(args[nb_call_args + 1] & (TCG_CALL_NO_READ_GLOBALS | |
1118 | 78505279 | Aurelien Jarno | TCG_CALL_NO_WRITE_GLOBALS))) { |
1119 | 22613af4 | Kirill Batuzov | for (i = 0; i < nb_globals; i++) { |
1120 | e590d4e6 | Aurelien Jarno | reset_temp(i); |
1121 | 22613af4 | Kirill Batuzov | } |
1122 | 22613af4 | Kirill Batuzov | } |
1123 | 22613af4 | Kirill Batuzov | for (i = 0; i < (args[0] >> 16); i++) { |
1124 | e590d4e6 | Aurelien Jarno | reset_temp(args[i + 1]);
|
1125 | 22613af4 | Kirill Batuzov | } |
1126 | 22613af4 | Kirill Batuzov | i = nb_call_args + 3;
|
1127 | 8f2e8c07 | Kirill Batuzov | while (i) {
|
1128 | 8f2e8c07 | Kirill Batuzov | *gen_args = *args; |
1129 | 8f2e8c07 | Kirill Batuzov | args++; |
1130 | 8f2e8c07 | Kirill Batuzov | gen_args++; |
1131 | 8f2e8c07 | Kirill Batuzov | i--; |
1132 | 8f2e8c07 | Kirill Batuzov | } |
1133 | 8f2e8c07 | Kirill Batuzov | break;
|
1134 | 6e14e91b | Richard Henderson | |
1135 | 8f2e8c07 | Kirill Batuzov | default:
|
1136 | 6e14e91b | Richard Henderson | do_default:
|
1137 | 6e14e91b | Richard Henderson | /* Default case: we know nothing about operation (or were unable
|
1138 | 6e14e91b | Richard Henderson | to compute the operation result) so no propagation is done.
|
1139 | 6e14e91b | Richard Henderson | We trash everything if the operation is the end of a basic
|
1140 | 3a9d8b17 | Paolo Bonzini | block, otherwise we only trash the output args. "mask" is
|
1141 | 3a9d8b17 | Paolo Bonzini | the non-zero bits mask for the first output arg. */
|
1142 | a2550660 | Aurelien Jarno | if (def->flags & TCG_OPF_BB_END) {
|
1143 | d193a14a | Paolo Bonzini | reset_all_temps(nb_temps); |
1144 | a2550660 | Aurelien Jarno | } else {
|
1145 | a2550660 | Aurelien Jarno | for (i = 0; i < def->nb_oargs; i++) { |
1146 | e590d4e6 | Aurelien Jarno | reset_temp(args[i]); |
1147 | a2550660 | Aurelien Jarno | } |
1148 | 22613af4 | Kirill Batuzov | } |
1149 | 8f2e8c07 | Kirill Batuzov | for (i = 0; i < def->nb_args; i++) { |
1150 | 8f2e8c07 | Kirill Batuzov | gen_args[i] = args[i]; |
1151 | 8f2e8c07 | Kirill Batuzov | } |
1152 | 8f2e8c07 | Kirill Batuzov | args += def->nb_args; |
1153 | 8f2e8c07 | Kirill Batuzov | gen_args += def->nb_args; |
1154 | 8f2e8c07 | Kirill Batuzov | break;
|
1155 | 8f2e8c07 | Kirill Batuzov | } |
1156 | 8f2e8c07 | Kirill Batuzov | } |
1157 | 8f2e8c07 | Kirill Batuzov | |
1158 | 8f2e8c07 | Kirill Batuzov | return gen_args;
|
1159 | 8f2e8c07 | Kirill Batuzov | } |
1160 | 8f2e8c07 | Kirill Batuzov | |
1161 | 8f2e8c07 | Kirill Batuzov | TCGArg *tcg_optimize(TCGContext *s, uint16_t *tcg_opc_ptr, |
1162 | 8f2e8c07 | Kirill Batuzov | TCGArg *args, TCGOpDef *tcg_op_defs) |
1163 | 8f2e8c07 | Kirill Batuzov | { |
1164 | 8f2e8c07 | Kirill Batuzov | TCGArg *res; |
1165 | 8f2e8c07 | Kirill Batuzov | res = tcg_constant_folding(s, tcg_opc_ptr, args, tcg_op_defs); |
1166 | 8f2e8c07 | Kirill Batuzov | return res;
|
1167 | 8f2e8c07 | Kirill Batuzov | } |