root / tcg / optimize.c @ fa01a208
History | View | Annotate | Download (22.7 kB)
1 | 8f2e8c07 | Kirill Batuzov | /*
|
---|---|---|---|
2 | 8f2e8c07 | Kirill Batuzov | * Optimizations for Tiny Code Generator for QEMU
|
3 | 8f2e8c07 | Kirill Batuzov | *
|
4 | 8f2e8c07 | Kirill Batuzov | * Copyright (c) 2010 Samsung Electronics.
|
5 | 8f2e8c07 | Kirill Batuzov | * Contributed by Kirill Batuzov <batuzovk@ispras.ru>
|
6 | 8f2e8c07 | Kirill Batuzov | *
|
7 | 8f2e8c07 | Kirill Batuzov | * Permission is hereby granted, free of charge, to any person obtaining a copy
|
8 | 8f2e8c07 | Kirill Batuzov | * of this software and associated documentation files (the "Software"), to deal
|
9 | 8f2e8c07 | Kirill Batuzov | * in the Software without restriction, including without limitation the rights
|
10 | 8f2e8c07 | Kirill Batuzov | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 | 8f2e8c07 | Kirill Batuzov | * copies of the Software, and to permit persons to whom the Software is
|
12 | 8f2e8c07 | Kirill Batuzov | * furnished to do so, subject to the following conditions:
|
13 | 8f2e8c07 | Kirill Batuzov | *
|
14 | 8f2e8c07 | Kirill Batuzov | * The above copyright notice and this permission notice shall be included in
|
15 | 8f2e8c07 | Kirill Batuzov | * all copies or substantial portions of the Software.
|
16 | 8f2e8c07 | Kirill Batuzov | *
|
17 | 8f2e8c07 | Kirill Batuzov | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
18 | 8f2e8c07 | Kirill Batuzov | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
19 | 8f2e8c07 | Kirill Batuzov | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
20 | 8f2e8c07 | Kirill Batuzov | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
21 | 8f2e8c07 | Kirill Batuzov | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
22 | 8f2e8c07 | Kirill Batuzov | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
23 | 8f2e8c07 | Kirill Batuzov | * THE SOFTWARE.
|
24 | 8f2e8c07 | Kirill Batuzov | */
|
25 | 8f2e8c07 | Kirill Batuzov | |
26 | 8f2e8c07 | Kirill Batuzov | #include "config.h" |
27 | 8f2e8c07 | Kirill Batuzov | |
28 | 8f2e8c07 | Kirill Batuzov | #include <stdlib.h> |
29 | 8f2e8c07 | Kirill Batuzov | #include <stdio.h> |
30 | 8f2e8c07 | Kirill Batuzov | |
31 | 8f2e8c07 | Kirill Batuzov | #include "qemu-common.h" |
32 | 8f2e8c07 | Kirill Batuzov | #include "tcg-op.h" |
33 | 8f2e8c07 | Kirill Batuzov | |
34 | 8f2e8c07 | Kirill Batuzov | #define CASE_OP_32_64(x) \
|
35 | 8f2e8c07 | Kirill Batuzov | glue(glue(case INDEX_op_, x), _i32): \
|
36 | 8f2e8c07 | Kirill Batuzov | glue(glue(case INDEX_op_, x), _i64)
|
37 | 8f2e8c07 | Kirill Batuzov | |
38 | 22613af4 | Kirill Batuzov | typedef enum { |
39 | 22613af4 | Kirill Batuzov | TCG_TEMP_UNDEF = 0,
|
40 | 22613af4 | Kirill Batuzov | TCG_TEMP_CONST, |
41 | 22613af4 | Kirill Batuzov | TCG_TEMP_COPY, |
42 | 22613af4 | Kirill Batuzov | TCG_TEMP_HAS_COPY, |
43 | 22613af4 | Kirill Batuzov | TCG_TEMP_ANY |
44 | 22613af4 | Kirill Batuzov | } tcg_temp_state; |
45 | 22613af4 | Kirill Batuzov | |
46 | 22613af4 | Kirill Batuzov | struct tcg_temp_info {
|
47 | 22613af4 | Kirill Batuzov | tcg_temp_state state; |
48 | 22613af4 | Kirill Batuzov | uint16_t prev_copy; |
49 | 22613af4 | Kirill Batuzov | uint16_t next_copy; |
50 | 22613af4 | Kirill Batuzov | tcg_target_ulong val; |
51 | 22613af4 | Kirill Batuzov | }; |
52 | 22613af4 | Kirill Batuzov | |
53 | 22613af4 | Kirill Batuzov | static struct tcg_temp_info temps[TCG_MAX_TEMPS]; |
54 | 22613af4 | Kirill Batuzov | |
55 | 22613af4 | Kirill Batuzov | /* Reset TEMP's state to TCG_TEMP_ANY. If TEMP was a representative of some
|
56 | 22613af4 | Kirill Batuzov | class of equivalent temp's, a new representative should be chosen in this
|
57 | 22613af4 | Kirill Batuzov | class. */
|
58 | 22613af4 | Kirill Batuzov | static void reset_temp(TCGArg temp, int nb_temps, int nb_globals) |
59 | 22613af4 | Kirill Batuzov | { |
60 | 22613af4 | Kirill Batuzov | int i;
|
61 | 22613af4 | Kirill Batuzov | TCGArg new_base = (TCGArg)-1;
|
62 | 22613af4 | Kirill Batuzov | if (temps[temp].state == TCG_TEMP_HAS_COPY) {
|
63 | 22613af4 | Kirill Batuzov | for (i = temps[temp].next_copy; i != temp; i = temps[i].next_copy) {
|
64 | 22613af4 | Kirill Batuzov | if (i >= nb_globals) {
|
65 | 22613af4 | Kirill Batuzov | temps[i].state = TCG_TEMP_HAS_COPY; |
66 | 22613af4 | Kirill Batuzov | new_base = i; |
67 | 22613af4 | Kirill Batuzov | break;
|
68 | 22613af4 | Kirill Batuzov | } |
69 | 22613af4 | Kirill Batuzov | } |
70 | 22613af4 | Kirill Batuzov | for (i = temps[temp].next_copy; i != temp; i = temps[i].next_copy) {
|
71 | 22613af4 | Kirill Batuzov | if (new_base == (TCGArg)-1) { |
72 | 22613af4 | Kirill Batuzov | temps[i].state = TCG_TEMP_ANY; |
73 | 22613af4 | Kirill Batuzov | } else {
|
74 | 22613af4 | Kirill Batuzov | temps[i].val = new_base; |
75 | 22613af4 | Kirill Batuzov | } |
76 | 22613af4 | Kirill Batuzov | } |
77 | 22613af4 | Kirill Batuzov | temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy; |
78 | 22613af4 | Kirill Batuzov | temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy; |
79 | 22613af4 | Kirill Batuzov | } else if (temps[temp].state == TCG_TEMP_COPY) { |
80 | 22613af4 | Kirill Batuzov | temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy; |
81 | 22613af4 | Kirill Batuzov | temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy; |
82 | 22613af4 | Kirill Batuzov | new_base = temps[temp].val; |
83 | 22613af4 | Kirill Batuzov | } |
84 | 22613af4 | Kirill Batuzov | temps[temp].state = TCG_TEMP_ANY; |
85 | 22613af4 | Kirill Batuzov | if (new_base != (TCGArg)-1 && temps[new_base].next_copy == new_base) { |
86 | 22613af4 | Kirill Batuzov | temps[new_base].state = TCG_TEMP_ANY; |
87 | 22613af4 | Kirill Batuzov | } |
88 | 22613af4 | Kirill Batuzov | } |
89 | 22613af4 | Kirill Batuzov | |
90 | fe0de7aa | Blue Swirl | static int op_bits(TCGOpcode op) |
91 | 22613af4 | Kirill Batuzov | { |
92 | 8399ad59 | Richard Henderson | const TCGOpDef *def = &tcg_op_defs[op];
|
93 | 8399ad59 | Richard Henderson | return def->flags & TCG_OPF_64BIT ? 64 : 32; |
94 | 22613af4 | Kirill Batuzov | } |
95 | 22613af4 | Kirill Batuzov | |
96 | fe0de7aa | Blue Swirl | static TCGOpcode op_to_movi(TCGOpcode op)
|
97 | 22613af4 | Kirill Batuzov | { |
98 | 22613af4 | Kirill Batuzov | switch (op_bits(op)) {
|
99 | 22613af4 | Kirill Batuzov | case 32: |
100 | 22613af4 | Kirill Batuzov | return INDEX_op_movi_i32;
|
101 | 22613af4 | Kirill Batuzov | case 64: |
102 | 22613af4 | Kirill Batuzov | return INDEX_op_movi_i64;
|
103 | 22613af4 | Kirill Batuzov | default:
|
104 | 22613af4 | Kirill Batuzov | fprintf(stderr, "op_to_movi: unexpected return value of "
|
105 | 22613af4 | Kirill Batuzov | "function op_bits.\n");
|
106 | 22613af4 | Kirill Batuzov | tcg_abort(); |
107 | 22613af4 | Kirill Batuzov | } |
108 | 22613af4 | Kirill Batuzov | } |
109 | 22613af4 | Kirill Batuzov | |
110 | d104bebd | Aurelien Jarno | static void tcg_opt_gen_mov(TCGArg *gen_args, TCGArg dst, TCGArg src, |
111 | d104bebd | Aurelien Jarno | int nb_temps, int nb_globals) |
112 | 22613af4 | Kirill Batuzov | { |
113 | 22613af4 | Kirill Batuzov | reset_temp(dst, nb_temps, nb_globals); |
114 | 22613af4 | Kirill Batuzov | assert(temps[src].state != TCG_TEMP_COPY); |
115 | d104bebd | Aurelien Jarno | if (src >= nb_globals) {
|
116 | 22613af4 | Kirill Batuzov | assert(temps[src].state != TCG_TEMP_CONST); |
117 | 22613af4 | Kirill Batuzov | if (temps[src].state != TCG_TEMP_HAS_COPY) {
|
118 | 22613af4 | Kirill Batuzov | temps[src].state = TCG_TEMP_HAS_COPY; |
119 | 22613af4 | Kirill Batuzov | temps[src].next_copy = src; |
120 | 22613af4 | Kirill Batuzov | temps[src].prev_copy = src; |
121 | 22613af4 | Kirill Batuzov | } |
122 | 22613af4 | Kirill Batuzov | temps[dst].state = TCG_TEMP_COPY; |
123 | 22613af4 | Kirill Batuzov | temps[dst].val = src; |
124 | 22613af4 | Kirill Batuzov | temps[dst].next_copy = temps[src].next_copy; |
125 | 22613af4 | Kirill Batuzov | temps[dst].prev_copy = src; |
126 | 22613af4 | Kirill Batuzov | temps[temps[dst].next_copy].prev_copy = dst; |
127 | 22613af4 | Kirill Batuzov | temps[src].next_copy = dst; |
128 | 22613af4 | Kirill Batuzov | } |
129 | 22613af4 | Kirill Batuzov | gen_args[0] = dst;
|
130 | 22613af4 | Kirill Batuzov | gen_args[1] = src;
|
131 | 22613af4 | Kirill Batuzov | } |
132 | 22613af4 | Kirill Batuzov | |
133 | 22613af4 | Kirill Batuzov | static void tcg_opt_gen_movi(TCGArg *gen_args, TCGArg dst, TCGArg val, |
134 | 22613af4 | Kirill Batuzov | int nb_temps, int nb_globals) |
135 | 22613af4 | Kirill Batuzov | { |
136 | 22613af4 | Kirill Batuzov | reset_temp(dst, nb_temps, nb_globals); |
137 | 22613af4 | Kirill Batuzov | temps[dst].state = TCG_TEMP_CONST; |
138 | 22613af4 | Kirill Batuzov | temps[dst].val = val; |
139 | 22613af4 | Kirill Batuzov | gen_args[0] = dst;
|
140 | 22613af4 | Kirill Batuzov | gen_args[1] = val;
|
141 | 22613af4 | Kirill Batuzov | } |
142 | 22613af4 | Kirill Batuzov | |
143 | fe0de7aa | Blue Swirl | static TCGOpcode op_to_mov(TCGOpcode op)
|
144 | 53108fb5 | Kirill Batuzov | { |
145 | 53108fb5 | Kirill Batuzov | switch (op_bits(op)) {
|
146 | 53108fb5 | Kirill Batuzov | case 32: |
147 | 53108fb5 | Kirill Batuzov | return INDEX_op_mov_i32;
|
148 | 53108fb5 | Kirill Batuzov | case 64: |
149 | 53108fb5 | Kirill Batuzov | return INDEX_op_mov_i64;
|
150 | 53108fb5 | Kirill Batuzov | default:
|
151 | 53108fb5 | Kirill Batuzov | fprintf(stderr, "op_to_mov: unexpected return value of "
|
152 | 53108fb5 | Kirill Batuzov | "function op_bits.\n");
|
153 | 53108fb5 | Kirill Batuzov | tcg_abort(); |
154 | 53108fb5 | Kirill Batuzov | } |
155 | 53108fb5 | Kirill Batuzov | } |
156 | 53108fb5 | Kirill Batuzov | |
157 | fe0de7aa | Blue Swirl | static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
|
158 | 53108fb5 | Kirill Batuzov | { |
159 | 53108fb5 | Kirill Batuzov | switch (op) {
|
160 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(add): |
161 | 53108fb5 | Kirill Batuzov | return x + y;
|
162 | 53108fb5 | Kirill Batuzov | |
163 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(sub): |
164 | 53108fb5 | Kirill Batuzov | return x - y;
|
165 | 53108fb5 | Kirill Batuzov | |
166 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(mul): |
167 | 53108fb5 | Kirill Batuzov | return x * y;
|
168 | 53108fb5 | Kirill Batuzov | |
169 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(and): |
170 | 9a81090b | Kirill Batuzov | return x & y;
|
171 | 9a81090b | Kirill Batuzov | |
172 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(or): |
173 | 9a81090b | Kirill Batuzov | return x | y;
|
174 | 9a81090b | Kirill Batuzov | |
175 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(xor): |
176 | 9a81090b | Kirill Batuzov | return x ^ y;
|
177 | 9a81090b | Kirill Batuzov | |
178 | 55c0975c | Kirill Batuzov | case INDEX_op_shl_i32:
|
179 | 55c0975c | Kirill Batuzov | return (uint32_t)x << (uint32_t)y;
|
180 | 55c0975c | Kirill Batuzov | |
181 | 55c0975c | Kirill Batuzov | case INDEX_op_shl_i64:
|
182 | 55c0975c | Kirill Batuzov | return (uint64_t)x << (uint64_t)y;
|
183 | 55c0975c | Kirill Batuzov | |
184 | 55c0975c | Kirill Batuzov | case INDEX_op_shr_i32:
|
185 | 55c0975c | Kirill Batuzov | return (uint32_t)x >> (uint32_t)y;
|
186 | 55c0975c | Kirill Batuzov | |
187 | 55c0975c | Kirill Batuzov | case INDEX_op_shr_i64:
|
188 | 55c0975c | Kirill Batuzov | return (uint64_t)x >> (uint64_t)y;
|
189 | 55c0975c | Kirill Batuzov | |
190 | 55c0975c | Kirill Batuzov | case INDEX_op_sar_i32:
|
191 | 55c0975c | Kirill Batuzov | return (int32_t)x >> (int32_t)y;
|
192 | 55c0975c | Kirill Batuzov | |
193 | 55c0975c | Kirill Batuzov | case INDEX_op_sar_i64:
|
194 | 55c0975c | Kirill Batuzov | return (int64_t)x >> (int64_t)y;
|
195 | 55c0975c | Kirill Batuzov | |
196 | 55c0975c | Kirill Batuzov | case INDEX_op_rotr_i32:
|
197 | 25c4d9cc | Richard Henderson | x = ((uint32_t)x << (32 - y)) | ((uint32_t)x >> y);
|
198 | 55c0975c | Kirill Batuzov | return x;
|
199 | 55c0975c | Kirill Batuzov | |
200 | 55c0975c | Kirill Batuzov | case INDEX_op_rotr_i64:
|
201 | 25c4d9cc | Richard Henderson | x = ((uint64_t)x << (64 - y)) | ((uint64_t)x >> y);
|
202 | 55c0975c | Kirill Batuzov | return x;
|
203 | 55c0975c | Kirill Batuzov | |
204 | 55c0975c | Kirill Batuzov | case INDEX_op_rotl_i32:
|
205 | 25c4d9cc | Richard Henderson | x = ((uint32_t)x << y) | ((uint32_t)x >> (32 - y));
|
206 | 55c0975c | Kirill Batuzov | return x;
|
207 | 55c0975c | Kirill Batuzov | |
208 | 55c0975c | Kirill Batuzov | case INDEX_op_rotl_i64:
|
209 | 25c4d9cc | Richard Henderson | x = ((uint64_t)x << y) | ((uint64_t)x >> (64 - y));
|
210 | 55c0975c | Kirill Batuzov | return x;
|
211 | 25c4d9cc | Richard Henderson | |
212 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(not): |
213 | a640f031 | Kirill Batuzov | return ~x;
|
214 | 25c4d9cc | Richard Henderson | |
215 | cb25c80a | Richard Henderson | CASE_OP_32_64(neg): |
216 | cb25c80a | Richard Henderson | return -x;
|
217 | cb25c80a | Richard Henderson | |
218 | cb25c80a | Richard Henderson | CASE_OP_32_64(andc): |
219 | cb25c80a | Richard Henderson | return x & ~y;
|
220 | cb25c80a | Richard Henderson | |
221 | cb25c80a | Richard Henderson | CASE_OP_32_64(orc): |
222 | cb25c80a | Richard Henderson | return x | ~y;
|
223 | cb25c80a | Richard Henderson | |
224 | cb25c80a | Richard Henderson | CASE_OP_32_64(eqv): |
225 | cb25c80a | Richard Henderson | return ~(x ^ y);
|
226 | cb25c80a | Richard Henderson | |
227 | cb25c80a | Richard Henderson | CASE_OP_32_64(nand): |
228 | cb25c80a | Richard Henderson | return ~(x & y);
|
229 | cb25c80a | Richard Henderson | |
230 | cb25c80a | Richard Henderson | CASE_OP_32_64(nor): |
231 | cb25c80a | Richard Henderson | return ~(x | y);
|
232 | cb25c80a | Richard Henderson | |
233 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext8s): |
234 | a640f031 | Kirill Batuzov | return (int8_t)x;
|
235 | 25c4d9cc | Richard Henderson | |
236 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext16s): |
237 | a640f031 | Kirill Batuzov | return (int16_t)x;
|
238 | 25c4d9cc | Richard Henderson | |
239 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext8u): |
240 | a640f031 | Kirill Batuzov | return (uint8_t)x;
|
241 | 25c4d9cc | Richard Henderson | |
242 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext16u): |
243 | a640f031 | Kirill Batuzov | return (uint16_t)x;
|
244 | a640f031 | Kirill Batuzov | |
245 | a640f031 | Kirill Batuzov | case INDEX_op_ext32s_i64:
|
246 | a640f031 | Kirill Batuzov | return (int32_t)x;
|
247 | a640f031 | Kirill Batuzov | |
248 | a640f031 | Kirill Batuzov | case INDEX_op_ext32u_i64:
|
249 | a640f031 | Kirill Batuzov | return (uint32_t)x;
|
250 | a640f031 | Kirill Batuzov | |
251 | 53108fb5 | Kirill Batuzov | default:
|
252 | 53108fb5 | Kirill Batuzov | fprintf(stderr, |
253 | 53108fb5 | Kirill Batuzov | "Unrecognized operation %d in do_constant_folding.\n", op);
|
254 | 53108fb5 | Kirill Batuzov | tcg_abort(); |
255 | 53108fb5 | Kirill Batuzov | } |
256 | 53108fb5 | Kirill Batuzov | } |
257 | 53108fb5 | Kirill Batuzov | |
258 | fe0de7aa | Blue Swirl | static TCGArg do_constant_folding(TCGOpcode op, TCGArg x, TCGArg y)
|
259 | 53108fb5 | Kirill Batuzov | { |
260 | 53108fb5 | Kirill Batuzov | TCGArg res = do_constant_folding_2(op, x, y); |
261 | 53108fb5 | Kirill Batuzov | if (op_bits(op) == 32) { |
262 | 53108fb5 | Kirill Batuzov | res &= 0xffffffff;
|
263 | 53108fb5 | Kirill Batuzov | } |
264 | 53108fb5 | Kirill Batuzov | return res;
|
265 | 53108fb5 | Kirill Batuzov | } |
266 | 53108fb5 | Kirill Batuzov | |
267 | f8dd19e5 | Aurelien Jarno | static TCGArg do_constant_folding_cond(TCGOpcode op, TCGArg x,
|
268 | f8dd19e5 | Aurelien Jarno | TCGArg y, TCGCond c) |
269 | f8dd19e5 | Aurelien Jarno | { |
270 | f8dd19e5 | Aurelien Jarno | switch (op_bits(op)) {
|
271 | f8dd19e5 | Aurelien Jarno | case 32: |
272 | f8dd19e5 | Aurelien Jarno | switch (c) {
|
273 | f8dd19e5 | Aurelien Jarno | case TCG_COND_EQ:
|
274 | f8dd19e5 | Aurelien Jarno | return (uint32_t)x == (uint32_t)y;
|
275 | f8dd19e5 | Aurelien Jarno | case TCG_COND_NE:
|
276 | f8dd19e5 | Aurelien Jarno | return (uint32_t)x != (uint32_t)y;
|
277 | f8dd19e5 | Aurelien Jarno | case TCG_COND_LT:
|
278 | f8dd19e5 | Aurelien Jarno | return (int32_t)x < (int32_t)y;
|
279 | f8dd19e5 | Aurelien Jarno | case TCG_COND_GE:
|
280 | f8dd19e5 | Aurelien Jarno | return (int32_t)x >= (int32_t)y;
|
281 | f8dd19e5 | Aurelien Jarno | case TCG_COND_LE:
|
282 | f8dd19e5 | Aurelien Jarno | return (int32_t)x <= (int32_t)y;
|
283 | f8dd19e5 | Aurelien Jarno | case TCG_COND_GT:
|
284 | f8dd19e5 | Aurelien Jarno | return (int32_t)x > (int32_t)y;
|
285 | f8dd19e5 | Aurelien Jarno | case TCG_COND_LTU:
|
286 | f8dd19e5 | Aurelien Jarno | return (uint32_t)x < (uint32_t)y;
|
287 | f8dd19e5 | Aurelien Jarno | case TCG_COND_GEU:
|
288 | f8dd19e5 | Aurelien Jarno | return (uint32_t)x >= (uint32_t)y;
|
289 | f8dd19e5 | Aurelien Jarno | case TCG_COND_LEU:
|
290 | f8dd19e5 | Aurelien Jarno | return (uint32_t)x <= (uint32_t)y;
|
291 | f8dd19e5 | Aurelien Jarno | case TCG_COND_GTU:
|
292 | f8dd19e5 | Aurelien Jarno | return (uint32_t)x > (uint32_t)y;
|
293 | f8dd19e5 | Aurelien Jarno | } |
294 | f8dd19e5 | Aurelien Jarno | break;
|
295 | f8dd19e5 | Aurelien Jarno | case 64: |
296 | f8dd19e5 | Aurelien Jarno | switch (c) {
|
297 | f8dd19e5 | Aurelien Jarno | case TCG_COND_EQ:
|
298 | f8dd19e5 | Aurelien Jarno | return (uint64_t)x == (uint64_t)y;
|
299 | f8dd19e5 | Aurelien Jarno | case TCG_COND_NE:
|
300 | f8dd19e5 | Aurelien Jarno | return (uint64_t)x != (uint64_t)y;
|
301 | f8dd19e5 | Aurelien Jarno | case TCG_COND_LT:
|
302 | f8dd19e5 | Aurelien Jarno | return (int64_t)x < (int64_t)y;
|
303 | f8dd19e5 | Aurelien Jarno | case TCG_COND_GE:
|
304 | f8dd19e5 | Aurelien Jarno | return (int64_t)x >= (int64_t)y;
|
305 | f8dd19e5 | Aurelien Jarno | case TCG_COND_LE:
|
306 | f8dd19e5 | Aurelien Jarno | return (int64_t)x <= (int64_t)y;
|
307 | f8dd19e5 | Aurelien Jarno | case TCG_COND_GT:
|
308 | f8dd19e5 | Aurelien Jarno | return (int64_t)x > (int64_t)y;
|
309 | f8dd19e5 | Aurelien Jarno | case TCG_COND_LTU:
|
310 | f8dd19e5 | Aurelien Jarno | return (uint64_t)x < (uint64_t)y;
|
311 | f8dd19e5 | Aurelien Jarno | case TCG_COND_GEU:
|
312 | f8dd19e5 | Aurelien Jarno | return (uint64_t)x >= (uint64_t)y;
|
313 | f8dd19e5 | Aurelien Jarno | case TCG_COND_LEU:
|
314 | f8dd19e5 | Aurelien Jarno | return (uint64_t)x <= (uint64_t)y;
|
315 | f8dd19e5 | Aurelien Jarno | case TCG_COND_GTU:
|
316 | f8dd19e5 | Aurelien Jarno | return (uint64_t)x > (uint64_t)y;
|
317 | f8dd19e5 | Aurelien Jarno | } |
318 | f8dd19e5 | Aurelien Jarno | break;
|
319 | f8dd19e5 | Aurelien Jarno | } |
320 | f8dd19e5 | Aurelien Jarno | |
321 | f8dd19e5 | Aurelien Jarno | fprintf(stderr, |
322 | f8dd19e5 | Aurelien Jarno | "Unrecognized bitness %d or condition %d in "
|
323 | f8dd19e5 | Aurelien Jarno | "do_constant_folding_cond.\n", op_bits(op), c);
|
324 | f8dd19e5 | Aurelien Jarno | tcg_abort(); |
325 | f8dd19e5 | Aurelien Jarno | } |
326 | f8dd19e5 | Aurelien Jarno | |
327 | f8dd19e5 | Aurelien Jarno | |
328 | 22613af4 | Kirill Batuzov | /* Propagate constants and copies, fold constant expressions. */
|
329 | 8f2e8c07 | Kirill Batuzov | static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
|
330 | 8f2e8c07 | Kirill Batuzov | TCGArg *args, TCGOpDef *tcg_op_defs) |
331 | 8f2e8c07 | Kirill Batuzov | { |
332 | fe0de7aa | Blue Swirl | int i, nb_ops, op_index, nb_temps, nb_globals, nb_call_args;
|
333 | fe0de7aa | Blue Swirl | TCGOpcode op; |
334 | 8f2e8c07 | Kirill Batuzov | const TCGOpDef *def;
|
335 | 8f2e8c07 | Kirill Batuzov | TCGArg *gen_args; |
336 | 53108fb5 | Kirill Batuzov | TCGArg tmp; |
337 | 22613af4 | Kirill Batuzov | /* Array VALS has an element for each temp.
|
338 | 22613af4 | Kirill Batuzov | If this temp holds a constant then its value is kept in VALS' element.
|
339 | 22613af4 | Kirill Batuzov | If this temp is a copy of other ones then this equivalence class'
|
340 | 22613af4 | Kirill Batuzov | representative is kept in VALS' element.
|
341 | 22613af4 | Kirill Batuzov | If this temp is neither copy nor constant then corresponding VALS'
|
342 | 22613af4 | Kirill Batuzov | element is unused. */
|
343 | 8f2e8c07 | Kirill Batuzov | |
344 | 8f2e8c07 | Kirill Batuzov | nb_temps = s->nb_temps; |
345 | 8f2e8c07 | Kirill Batuzov | nb_globals = s->nb_globals; |
346 | 22613af4 | Kirill Batuzov | memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info)); |
347 | 8f2e8c07 | Kirill Batuzov | |
348 | 8f2e8c07 | Kirill Batuzov | nb_ops = tcg_opc_ptr - gen_opc_buf; |
349 | 8f2e8c07 | Kirill Batuzov | gen_args = args; |
350 | 8f2e8c07 | Kirill Batuzov | for (op_index = 0; op_index < nb_ops; op_index++) { |
351 | 8f2e8c07 | Kirill Batuzov | op = gen_opc_buf[op_index]; |
352 | 8f2e8c07 | Kirill Batuzov | def = &tcg_op_defs[op]; |
353 | 22613af4 | Kirill Batuzov | /* Do copy propagation */
|
354 | 22613af4 | Kirill Batuzov | if (!(def->flags & (TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS))) {
|
355 | 22613af4 | Kirill Batuzov | assert(op != INDEX_op_call); |
356 | 22613af4 | Kirill Batuzov | for (i = def->nb_oargs; i < def->nb_oargs + def->nb_iargs; i++) {
|
357 | 22613af4 | Kirill Batuzov | if (temps[args[i]].state == TCG_TEMP_COPY) {
|
358 | 22613af4 | Kirill Batuzov | args[i] = temps[args[i]].val; |
359 | 22613af4 | Kirill Batuzov | } |
360 | 22613af4 | Kirill Batuzov | } |
361 | 22613af4 | Kirill Batuzov | } |
362 | 22613af4 | Kirill Batuzov | |
363 | 53108fb5 | Kirill Batuzov | /* For commutative operations make constant second argument */
|
364 | 53108fb5 | Kirill Batuzov | switch (op) {
|
365 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(add): |
366 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(mul): |
367 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(and): |
368 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(or): |
369 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(xor): |
370 | cb25c80a | Richard Henderson | CASE_OP_32_64(eqv): |
371 | cb25c80a | Richard Henderson | CASE_OP_32_64(nand): |
372 | cb25c80a | Richard Henderson | CASE_OP_32_64(nor): |
373 | 53108fb5 | Kirill Batuzov | if (temps[args[1]].state == TCG_TEMP_CONST) { |
374 | 53108fb5 | Kirill Batuzov | tmp = args[1];
|
375 | 53108fb5 | Kirill Batuzov | args[1] = args[2]; |
376 | 53108fb5 | Kirill Batuzov | args[2] = tmp;
|
377 | 53108fb5 | Kirill Batuzov | } |
378 | 53108fb5 | Kirill Batuzov | break;
|
379 | 65a7cce1 | Aurelien Jarno | CASE_OP_32_64(brcond): |
380 | 65a7cce1 | Aurelien Jarno | if (temps[args[0]].state == TCG_TEMP_CONST |
381 | 65a7cce1 | Aurelien Jarno | && temps[args[1]].state != TCG_TEMP_CONST) {
|
382 | 65a7cce1 | Aurelien Jarno | tmp = args[0];
|
383 | 65a7cce1 | Aurelien Jarno | args[0] = args[1]; |
384 | 65a7cce1 | Aurelien Jarno | args[1] = tmp;
|
385 | 65a7cce1 | Aurelien Jarno | args[2] = tcg_swap_cond(args[2]); |
386 | 65a7cce1 | Aurelien Jarno | } |
387 | 65a7cce1 | Aurelien Jarno | break;
|
388 | 65a7cce1 | Aurelien Jarno | CASE_OP_32_64(setcond): |
389 | 65a7cce1 | Aurelien Jarno | if (temps[args[1]].state == TCG_TEMP_CONST |
390 | 65a7cce1 | Aurelien Jarno | && temps[args[2]].state != TCG_TEMP_CONST) {
|
391 | 65a7cce1 | Aurelien Jarno | tmp = args[1];
|
392 | 65a7cce1 | Aurelien Jarno | args[1] = args[2]; |
393 | 65a7cce1 | Aurelien Jarno | args[2] = tmp;
|
394 | 65a7cce1 | Aurelien Jarno | args[3] = tcg_swap_cond(args[3]); |
395 | 65a7cce1 | Aurelien Jarno | } |
396 | 65a7cce1 | Aurelien Jarno | break;
|
397 | fa01a208 | Richard Henderson | CASE_OP_32_64(movcond): |
398 | fa01a208 | Richard Henderson | if (temps[args[1]].state == TCG_TEMP_CONST |
399 | fa01a208 | Richard Henderson | && temps[args[2]].state != TCG_TEMP_CONST) {
|
400 | fa01a208 | Richard Henderson | tmp = args[1];
|
401 | fa01a208 | Richard Henderson | args[1] = args[2]; |
402 | fa01a208 | Richard Henderson | args[2] = tmp;
|
403 | fa01a208 | Richard Henderson | args[5] = tcg_swap_cond(args[5]); |
404 | fa01a208 | Richard Henderson | } |
405 | 53108fb5 | Kirill Batuzov | default:
|
406 | 53108fb5 | Kirill Batuzov | break;
|
407 | 53108fb5 | Kirill Batuzov | } |
408 | 53108fb5 | Kirill Batuzov | |
409 | 01ee5282 | Aurelien Jarno | /* Simplify expressions for "shift/rot r, 0, a => movi r, 0" */
|
410 | 01ee5282 | Aurelien Jarno | switch (op) {
|
411 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(shl): |
412 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(shr): |
413 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(sar): |
414 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(rotl): |
415 | 01ee5282 | Aurelien Jarno | CASE_OP_32_64(rotr): |
416 | 01ee5282 | Aurelien Jarno | if (temps[args[1]].state == TCG_TEMP_CONST |
417 | 01ee5282 | Aurelien Jarno | && temps[args[1]].val == 0) { |
418 | 01ee5282 | Aurelien Jarno | gen_opc_buf[op_index] = op_to_movi(op); |
419 | 01ee5282 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], 0, nb_temps, nb_globals); |
420 | 01ee5282 | Aurelien Jarno | args += 3;
|
421 | 01ee5282 | Aurelien Jarno | gen_args += 2;
|
422 | 01ee5282 | Aurelien Jarno | continue;
|
423 | 01ee5282 | Aurelien Jarno | } |
424 | 01ee5282 | Aurelien Jarno | break;
|
425 | 01ee5282 | Aurelien Jarno | default:
|
426 | 01ee5282 | Aurelien Jarno | break;
|
427 | 01ee5282 | Aurelien Jarno | } |
428 | 01ee5282 | Aurelien Jarno | |
429 | 56e49438 | Aurelien Jarno | /* Simplify expression for "op r, a, 0 => mov r, a" cases */
|
430 | 53108fb5 | Kirill Batuzov | switch (op) {
|
431 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(add): |
432 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(sub): |
433 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(shl): |
434 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(shr): |
435 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(sar): |
436 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(rotl): |
437 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(rotr): |
438 | 38ee188b | Aurelien Jarno | CASE_OP_32_64(or): |
439 | 38ee188b | Aurelien Jarno | CASE_OP_32_64(xor): |
440 | 53108fb5 | Kirill Batuzov | if (temps[args[1]].state == TCG_TEMP_CONST) { |
441 | 53108fb5 | Kirill Batuzov | /* Proceed with possible constant folding. */
|
442 | 53108fb5 | Kirill Batuzov | break;
|
443 | 53108fb5 | Kirill Batuzov | } |
444 | 53108fb5 | Kirill Batuzov | if (temps[args[2]].state == TCG_TEMP_CONST |
445 | 53108fb5 | Kirill Batuzov | && temps[args[2]].val == 0) { |
446 | 53108fb5 | Kirill Batuzov | if ((temps[args[0]].state == TCG_TEMP_COPY |
447 | 53108fb5 | Kirill Batuzov | && temps[args[0]].val == args[1]) |
448 | 53108fb5 | Kirill Batuzov | || args[0] == args[1]) { |
449 | 53108fb5 | Kirill Batuzov | gen_opc_buf[op_index] = INDEX_op_nop; |
450 | 53108fb5 | Kirill Batuzov | } else {
|
451 | 53108fb5 | Kirill Batuzov | gen_opc_buf[op_index] = op_to_mov(op); |
452 | d104bebd | Aurelien Jarno | tcg_opt_gen_mov(gen_args, args[0], args[1], |
453 | 53108fb5 | Kirill Batuzov | nb_temps, nb_globals); |
454 | 53108fb5 | Kirill Batuzov | gen_args += 2;
|
455 | 53108fb5 | Kirill Batuzov | } |
456 | fedc0da2 | Aurelien Jarno | args += 3;
|
457 | 53108fb5 | Kirill Batuzov | continue;
|
458 | 53108fb5 | Kirill Batuzov | } |
459 | 53108fb5 | Kirill Batuzov | break;
|
460 | 56e49438 | Aurelien Jarno | default:
|
461 | 56e49438 | Aurelien Jarno | break;
|
462 | 56e49438 | Aurelien Jarno | } |
463 | 56e49438 | Aurelien Jarno | |
464 | 56e49438 | Aurelien Jarno | /* Simplify expression for "op r, a, 0 => movi r, 0" cases */
|
465 | 56e49438 | Aurelien Jarno | switch (op) {
|
466 | 61251c0c | Aurelien Jarno | CASE_OP_32_64(and): |
467 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(mul): |
468 | 53108fb5 | Kirill Batuzov | if ((temps[args[2]].state == TCG_TEMP_CONST |
469 | 53108fb5 | Kirill Batuzov | && temps[args[2]].val == 0)) { |
470 | 53108fb5 | Kirill Batuzov | gen_opc_buf[op_index] = op_to_movi(op); |
471 | 53108fb5 | Kirill Batuzov | tcg_opt_gen_movi(gen_args, args[0], 0, nb_temps, nb_globals); |
472 | 53108fb5 | Kirill Batuzov | args += 3;
|
473 | 53108fb5 | Kirill Batuzov | gen_args += 2;
|
474 | 53108fb5 | Kirill Batuzov | continue;
|
475 | 53108fb5 | Kirill Batuzov | } |
476 | 53108fb5 | Kirill Batuzov | break;
|
477 | 56e49438 | Aurelien Jarno | default:
|
478 | 56e49438 | Aurelien Jarno | break;
|
479 | 56e49438 | Aurelien Jarno | } |
480 | 56e49438 | Aurelien Jarno | |
481 | 56e49438 | Aurelien Jarno | /* Simplify expression for "op r, a, a => mov r, a" cases */
|
482 | 56e49438 | Aurelien Jarno | switch (op) {
|
483 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(or): |
484 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(and): |
485 | 9a81090b | Kirill Batuzov | if (args[1] == args[2]) { |
486 | 9a81090b | Kirill Batuzov | if (args[1] == args[0]) { |
487 | 9a81090b | Kirill Batuzov | gen_opc_buf[op_index] = INDEX_op_nop; |
488 | 9a81090b | Kirill Batuzov | } else {
|
489 | 9a81090b | Kirill Batuzov | gen_opc_buf[op_index] = op_to_mov(op); |
490 | d104bebd | Aurelien Jarno | tcg_opt_gen_mov(gen_args, args[0], args[1], nb_temps, |
491 | 9a81090b | Kirill Batuzov | nb_globals); |
492 | 9a81090b | Kirill Batuzov | gen_args += 2;
|
493 | 9a81090b | Kirill Batuzov | } |
494 | fedc0da2 | Aurelien Jarno | args += 3;
|
495 | 9a81090b | Kirill Batuzov | continue;
|
496 | 9a81090b | Kirill Batuzov | } |
497 | 9a81090b | Kirill Batuzov | break;
|
498 | fe0de7aa | Blue Swirl | default:
|
499 | fe0de7aa | Blue Swirl | break;
|
500 | 53108fb5 | Kirill Batuzov | } |
501 | 53108fb5 | Kirill Batuzov | |
502 | 22613af4 | Kirill Batuzov | /* Propagate constants through copy operations and do constant
|
503 | 22613af4 | Kirill Batuzov | folding. Constants will be substituted to arguments by register
|
504 | 22613af4 | Kirill Batuzov | allocator where needed and possible. Also detect copies. */
|
505 | 8f2e8c07 | Kirill Batuzov | switch (op) {
|
506 | 22613af4 | Kirill Batuzov | CASE_OP_32_64(mov): |
507 | 22613af4 | Kirill Batuzov | if ((temps[args[1]].state == TCG_TEMP_COPY |
508 | 22613af4 | Kirill Batuzov | && temps[args[1]].val == args[0]) |
509 | 22613af4 | Kirill Batuzov | || args[0] == args[1]) { |
510 | 22613af4 | Kirill Batuzov | args += 2;
|
511 | 22613af4 | Kirill Batuzov | gen_opc_buf[op_index] = INDEX_op_nop; |
512 | 22613af4 | Kirill Batuzov | break;
|
513 | 22613af4 | Kirill Batuzov | } |
514 | 22613af4 | Kirill Batuzov | if (temps[args[1]].state != TCG_TEMP_CONST) { |
515 | d104bebd | Aurelien Jarno | tcg_opt_gen_mov(gen_args, args[0], args[1], |
516 | 22613af4 | Kirill Batuzov | nb_temps, nb_globals); |
517 | 22613af4 | Kirill Batuzov | gen_args += 2;
|
518 | 22613af4 | Kirill Batuzov | args += 2;
|
519 | 22613af4 | Kirill Batuzov | break;
|
520 | 22613af4 | Kirill Batuzov | } |
521 | 22613af4 | Kirill Batuzov | /* Source argument is constant. Rewrite the operation and
|
522 | 22613af4 | Kirill Batuzov | let movi case handle it. */
|
523 | 22613af4 | Kirill Batuzov | op = op_to_movi(op); |
524 | 22613af4 | Kirill Batuzov | gen_opc_buf[op_index] = op; |
525 | 22613af4 | Kirill Batuzov | args[1] = temps[args[1]].val; |
526 | 22613af4 | Kirill Batuzov | /* fallthrough */
|
527 | 22613af4 | Kirill Batuzov | CASE_OP_32_64(movi): |
528 | 22613af4 | Kirill Batuzov | tcg_opt_gen_movi(gen_args, args[0], args[1], nb_temps, nb_globals); |
529 | 22613af4 | Kirill Batuzov | gen_args += 2;
|
530 | 22613af4 | Kirill Batuzov | args += 2;
|
531 | 22613af4 | Kirill Batuzov | break;
|
532 | a640f031 | Kirill Batuzov | CASE_OP_32_64(not): |
533 | cb25c80a | Richard Henderson | CASE_OP_32_64(neg): |
534 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext8s): |
535 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext8u): |
536 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext16s): |
537 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(ext16u): |
538 | a640f031 | Kirill Batuzov | case INDEX_op_ext32s_i64:
|
539 | a640f031 | Kirill Batuzov | case INDEX_op_ext32u_i64:
|
540 | a640f031 | Kirill Batuzov | if (temps[args[1]].state == TCG_TEMP_CONST) { |
541 | a640f031 | Kirill Batuzov | gen_opc_buf[op_index] = op_to_movi(op); |
542 | a640f031 | Kirill Batuzov | tmp = do_constant_folding(op, temps[args[1]].val, 0); |
543 | a640f031 | Kirill Batuzov | tcg_opt_gen_movi(gen_args, args[0], tmp, nb_temps, nb_globals);
|
544 | a640f031 | Kirill Batuzov | } else {
|
545 | a640f031 | Kirill Batuzov | reset_temp(args[0], nb_temps, nb_globals);
|
546 | a640f031 | Kirill Batuzov | gen_args[0] = args[0]; |
547 | a640f031 | Kirill Batuzov | gen_args[1] = args[1]; |
548 | a640f031 | Kirill Batuzov | } |
549 | fedc0da2 | Aurelien Jarno | gen_args += 2;
|
550 | fedc0da2 | Aurelien Jarno | args += 2;
|
551 | fedc0da2 | Aurelien Jarno | break;
|
552 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(add): |
553 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(sub): |
554 | 53108fb5 | Kirill Batuzov | CASE_OP_32_64(mul): |
555 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(or): |
556 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(and): |
557 | 9a81090b | Kirill Batuzov | CASE_OP_32_64(xor): |
558 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(shl): |
559 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(shr): |
560 | 55c0975c | Kirill Batuzov | CASE_OP_32_64(sar): |
561 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(rotl): |
562 | 25c4d9cc | Richard Henderson | CASE_OP_32_64(rotr): |
563 | cb25c80a | Richard Henderson | CASE_OP_32_64(andc): |
564 | cb25c80a | Richard Henderson | CASE_OP_32_64(orc): |
565 | cb25c80a | Richard Henderson | CASE_OP_32_64(eqv): |
566 | cb25c80a | Richard Henderson | CASE_OP_32_64(nand): |
567 | cb25c80a | Richard Henderson | CASE_OP_32_64(nor): |
568 | 53108fb5 | Kirill Batuzov | if (temps[args[1]].state == TCG_TEMP_CONST |
569 | 53108fb5 | Kirill Batuzov | && temps[args[2]].state == TCG_TEMP_CONST) {
|
570 | 53108fb5 | Kirill Batuzov | gen_opc_buf[op_index] = op_to_movi(op); |
571 | 53108fb5 | Kirill Batuzov | tmp = do_constant_folding(op, temps[args[1]].val,
|
572 | 53108fb5 | Kirill Batuzov | temps[args[2]].val);
|
573 | 53108fb5 | Kirill Batuzov | tcg_opt_gen_movi(gen_args, args[0], tmp, nb_temps, nb_globals);
|
574 | 53108fb5 | Kirill Batuzov | gen_args += 2;
|
575 | 53108fb5 | Kirill Batuzov | } else {
|
576 | 53108fb5 | Kirill Batuzov | reset_temp(args[0], nb_temps, nb_globals);
|
577 | 53108fb5 | Kirill Batuzov | gen_args[0] = args[0]; |
578 | 53108fb5 | Kirill Batuzov | gen_args[1] = args[1]; |
579 | 53108fb5 | Kirill Batuzov | gen_args[2] = args[2]; |
580 | 53108fb5 | Kirill Batuzov | gen_args += 3;
|
581 | 53108fb5 | Kirill Batuzov | } |
582 | fedc0da2 | Aurelien Jarno | args += 3;
|
583 | fedc0da2 | Aurelien Jarno | break;
|
584 | f8dd19e5 | Aurelien Jarno | CASE_OP_32_64(setcond): |
585 | f8dd19e5 | Aurelien Jarno | if (temps[args[1]].state == TCG_TEMP_CONST |
586 | f8dd19e5 | Aurelien Jarno | && temps[args[2]].state == TCG_TEMP_CONST) {
|
587 | f8dd19e5 | Aurelien Jarno | gen_opc_buf[op_index] = op_to_movi(op); |
588 | f8dd19e5 | Aurelien Jarno | tmp = do_constant_folding_cond(op, temps[args[1]].val,
|
589 | f8dd19e5 | Aurelien Jarno | temps[args[2]].val, args[3]); |
590 | f8dd19e5 | Aurelien Jarno | tcg_opt_gen_movi(gen_args, args[0], tmp, nb_temps, nb_globals);
|
591 | f8dd19e5 | Aurelien Jarno | gen_args += 2;
|
592 | f8dd19e5 | Aurelien Jarno | } else {
|
593 | f8dd19e5 | Aurelien Jarno | reset_temp(args[0], nb_temps, nb_globals);
|
594 | f8dd19e5 | Aurelien Jarno | gen_args[0] = args[0]; |
595 | f8dd19e5 | Aurelien Jarno | gen_args[1] = args[1]; |
596 | f8dd19e5 | Aurelien Jarno | gen_args[2] = args[2]; |
597 | f8dd19e5 | Aurelien Jarno | gen_args[3] = args[3]; |
598 | f8dd19e5 | Aurelien Jarno | gen_args += 4;
|
599 | f8dd19e5 | Aurelien Jarno | } |
600 | fedc0da2 | Aurelien Jarno | args += 4;
|
601 | fedc0da2 | Aurelien Jarno | break;
|
602 | fbeaa26c | Aurelien Jarno | CASE_OP_32_64(brcond): |
603 | fbeaa26c | Aurelien Jarno | if (temps[args[0]].state == TCG_TEMP_CONST |
604 | fbeaa26c | Aurelien Jarno | && temps[args[1]].state == TCG_TEMP_CONST) {
|
605 | fbeaa26c | Aurelien Jarno | if (do_constant_folding_cond(op, temps[args[0]].val, |
606 | fbeaa26c | Aurelien Jarno | temps[args[1]].val, args[2])) { |
607 | fbeaa26c | Aurelien Jarno | memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info)); |
608 | fbeaa26c | Aurelien Jarno | gen_opc_buf[op_index] = INDEX_op_br; |
609 | fbeaa26c | Aurelien Jarno | gen_args[0] = args[3]; |
610 | fbeaa26c | Aurelien Jarno | gen_args += 1;
|
611 | fbeaa26c | Aurelien Jarno | } else {
|
612 | fbeaa26c | Aurelien Jarno | gen_opc_buf[op_index] = INDEX_op_nop; |
613 | fbeaa26c | Aurelien Jarno | } |
614 | fbeaa26c | Aurelien Jarno | } else {
|
615 | fbeaa26c | Aurelien Jarno | memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info)); |
616 | fbeaa26c | Aurelien Jarno | reset_temp(args[0], nb_temps, nb_globals);
|
617 | fbeaa26c | Aurelien Jarno | gen_args[0] = args[0]; |
618 | fbeaa26c | Aurelien Jarno | gen_args[1] = args[1]; |
619 | fbeaa26c | Aurelien Jarno | gen_args[2] = args[2]; |
620 | fbeaa26c | Aurelien Jarno | gen_args[3] = args[3]; |
621 | fbeaa26c | Aurelien Jarno | gen_args += 4;
|
622 | fbeaa26c | Aurelien Jarno | } |
623 | fedc0da2 | Aurelien Jarno | args += 4;
|
624 | fedc0da2 | Aurelien Jarno | break;
|
625 | fa01a208 | Richard Henderson | CASE_OP_32_64(movcond): |
626 | fa01a208 | Richard Henderson | if (temps[args[1]].state == TCG_TEMP_CONST |
627 | fa01a208 | Richard Henderson | && temps[args[2]].state == TCG_TEMP_CONST) {
|
628 | fa01a208 | Richard Henderson | tmp = do_constant_folding_cond(op, temps[args[1]].val,
|
629 | fa01a208 | Richard Henderson | temps[args[2]].val, args[5]); |
630 | fa01a208 | Richard Henderson | if (args[0] == args[4-tmp] |
631 | fa01a208 | Richard Henderson | || (temps[args[4-tmp]].state == TCG_TEMP_COPY
|
632 | fa01a208 | Richard Henderson | && temps[args[4-tmp]].val == args[0])) { |
633 | fa01a208 | Richard Henderson | gen_opc_buf[op_index] = INDEX_op_nop; |
634 | fa01a208 | Richard Henderson | } else if (temps[args[4-tmp]].state == TCG_TEMP_CONST) { |
635 | fa01a208 | Richard Henderson | gen_opc_buf[op_index] = op_to_movi(op); |
636 | fa01a208 | Richard Henderson | tcg_opt_gen_movi(gen_args, args[0], temps[args[4-tmp]].val, |
637 | fa01a208 | Richard Henderson | nb_temps, nb_globals); |
638 | fa01a208 | Richard Henderson | gen_args += 2;
|
639 | fa01a208 | Richard Henderson | } else {
|
640 | fa01a208 | Richard Henderson | gen_opc_buf[op_index] = op_to_mov(op); |
641 | fa01a208 | Richard Henderson | tcg_opt_gen_mov(gen_args, args[0], args[4-tmp], |
642 | fa01a208 | Richard Henderson | nb_temps, nb_globals); |
643 | fa01a208 | Richard Henderson | gen_args += 2;
|
644 | fa01a208 | Richard Henderson | } |
645 | fa01a208 | Richard Henderson | } else {
|
646 | fa01a208 | Richard Henderson | reset_temp(args[0], nb_temps, nb_globals);
|
647 | fa01a208 | Richard Henderson | gen_args[0] = args[0]; |
648 | fa01a208 | Richard Henderson | gen_args[1] = args[1]; |
649 | fa01a208 | Richard Henderson | gen_args[2] = args[2]; |
650 | fa01a208 | Richard Henderson | gen_args[3] = args[3]; |
651 | fa01a208 | Richard Henderson | gen_args[4] = args[4]; |
652 | fa01a208 | Richard Henderson | gen_args[5] = args[5]; |
653 | fa01a208 | Richard Henderson | gen_args += 6;
|
654 | fa01a208 | Richard Henderson | } |
655 | fa01a208 | Richard Henderson | args += 6;
|
656 | fa01a208 | Richard Henderson | break;
|
657 | 8f2e8c07 | Kirill Batuzov | case INDEX_op_call:
|
658 | 22613af4 | Kirill Batuzov | nb_call_args = (args[0] >> 16) + (args[0] & 0xffff); |
659 | 22613af4 | Kirill Batuzov | if (!(args[nb_call_args + 1] & (TCG_CALL_CONST | TCG_CALL_PURE))) { |
660 | 22613af4 | Kirill Batuzov | for (i = 0; i < nb_globals; i++) { |
661 | 22613af4 | Kirill Batuzov | reset_temp(i, nb_temps, nb_globals); |
662 | 22613af4 | Kirill Batuzov | } |
663 | 22613af4 | Kirill Batuzov | } |
664 | 22613af4 | Kirill Batuzov | for (i = 0; i < (args[0] >> 16); i++) { |
665 | 22613af4 | Kirill Batuzov | reset_temp(args[i + 1], nb_temps, nb_globals);
|
666 | 22613af4 | Kirill Batuzov | } |
667 | 22613af4 | Kirill Batuzov | i = nb_call_args + 3;
|
668 | 8f2e8c07 | Kirill Batuzov | while (i) {
|
669 | 8f2e8c07 | Kirill Batuzov | *gen_args = *args; |
670 | 8f2e8c07 | Kirill Batuzov | args++; |
671 | 8f2e8c07 | Kirill Batuzov | gen_args++; |
672 | 8f2e8c07 | Kirill Batuzov | i--; |
673 | 8f2e8c07 | Kirill Batuzov | } |
674 | 8f2e8c07 | Kirill Batuzov | break;
|
675 | 8f2e8c07 | Kirill Batuzov | default:
|
676 | 22613af4 | Kirill Batuzov | /* Default case: we do know nothing about operation so no
|
677 | a2550660 | Aurelien Jarno | propagation is done. We trash everything if the operation
|
678 | a2550660 | Aurelien Jarno | is the end of a basic block, otherwise we only trash the
|
679 | a2550660 | Aurelien Jarno | output args. */
|
680 | a2550660 | Aurelien Jarno | if (def->flags & TCG_OPF_BB_END) {
|
681 | a2550660 | Aurelien Jarno | memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info)); |
682 | a2550660 | Aurelien Jarno | } else {
|
683 | a2550660 | Aurelien Jarno | for (i = 0; i < def->nb_oargs; i++) { |
684 | a2550660 | Aurelien Jarno | reset_temp(args[i], nb_temps, nb_globals); |
685 | a2550660 | Aurelien Jarno | } |
686 | 22613af4 | Kirill Batuzov | } |
687 | 8f2e8c07 | Kirill Batuzov | for (i = 0; i < def->nb_args; i++) { |
688 | 8f2e8c07 | Kirill Batuzov | gen_args[i] = args[i]; |
689 | 8f2e8c07 | Kirill Batuzov | } |
690 | 8f2e8c07 | Kirill Batuzov | args += def->nb_args; |
691 | 8f2e8c07 | Kirill Batuzov | gen_args += def->nb_args; |
692 | 8f2e8c07 | Kirill Batuzov | break;
|
693 | 8f2e8c07 | Kirill Batuzov | } |
694 | 8f2e8c07 | Kirill Batuzov | } |
695 | 8f2e8c07 | Kirill Batuzov | |
696 | 8f2e8c07 | Kirill Batuzov | return gen_args;
|
697 | 8f2e8c07 | Kirill Batuzov | } |
698 | 8f2e8c07 | Kirill Batuzov | |
699 | 8f2e8c07 | Kirill Batuzov | TCGArg *tcg_optimize(TCGContext *s, uint16_t *tcg_opc_ptr, |
700 | 8f2e8c07 | Kirill Batuzov | TCGArg *args, TCGOpDef *tcg_op_defs) |
701 | 8f2e8c07 | Kirill Batuzov | { |
702 | 8f2e8c07 | Kirill Batuzov | TCGArg *res; |
703 | 8f2e8c07 | Kirill Batuzov | res = tcg_constant_folding(s, tcg_opc_ptr, args, tcg_op_defs); |
704 | 8f2e8c07 | Kirill Batuzov | return res;
|
705 | 8f2e8c07 | Kirill Batuzov | } |