Mercurial > hg > CbC > CbC_gcc
annotate gcc/tree-ssa-threadedge.c @ 86:12b3180c7d07
modify calls.c.
author | Nobuyasu Oshiro <dimolto@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Thu, 15 Dec 2011 02:37:13 +0900 |
parents | f6334be47118 |
children | 04ced10e8804 |
rev | line source |
---|---|
0 | 1 /* SSA Jump Threading |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
3 Free Software Foundation, Inc. |
0 | 4 Contributed by Jeff Law <law@redhat.com> |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify | |
9 it under the terms of the GNU General Public License as published by | |
10 the Free Software Foundation; either version 3, or (at your option) | |
11 any later version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, | |
14 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 GNU General Public License for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 #include "config.h" | |
23 #include "system.h" | |
24 #include "coretypes.h" | |
25 #include "tm.h" | |
26 #include "tree.h" | |
27 #include "flags.h" | |
28 #include "tm_p.h" | |
29 #include "basic-block.h" | |
30 #include "cfgloop.h" | |
31 #include "output.h" | |
32 #include "function.h" | |
33 #include "timevar.h" | |
34 #include "tree-dump.h" | |
35 #include "tree-flow.h" | |
36 #include "tree-pass.h" | |
37 #include "tree-ssa-propagate.h" | |
38 #include "langhooks.h" | |
39 #include "params.h" | |
40 | |
41 /* To avoid code explosion due to jump threading, we limit the | |
42 number of statements we are going to copy. This variable | |
43 holds the number of statements currently seen that we'll have | |
44 to copy as part of the jump threading process. */ | |
45 static int stmt_count; | |
46 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
47 /* Array to record value-handles per SSA_NAME. */ |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
48 VEC(tree,heap) *ssa_name_values; |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
49 |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
50 /* Set the value for the SSA name NAME to VALUE. */ |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
51 |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
52 void |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
53 set_ssa_name_value (tree name, tree value) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
54 { |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
55 if (SSA_NAME_VERSION (name) >= VEC_length (tree, ssa_name_values)) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
56 VEC_safe_grow_cleared (tree, heap, ssa_name_values, |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
57 SSA_NAME_VERSION (name) + 1); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
58 VEC_replace (tree, ssa_name_values, SSA_NAME_VERSION (name), value); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
59 } |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
60 |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
61 /* Initialize the per SSA_NAME value-handles array. Returns it. */ |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
62 void |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
63 threadedge_initialize_values (void) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
64 { |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
65 gcc_assert (ssa_name_values == NULL); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
66 ssa_name_values = VEC_alloc(tree, heap, num_ssa_names); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
67 } |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
68 |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
69 /* Free the per SSA_NAME value-handle array. */ |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
70 void |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
71 threadedge_finalize_values (void) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
72 { |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
73 VEC_free(tree, heap, ssa_name_values); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
74 } |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
75 |
0 | 76 /* Return TRUE if we may be able to thread an incoming edge into |
77 BB to an outgoing edge from BB. Return FALSE otherwise. */ | |
78 | |
79 bool | |
80 potentially_threadable_block (basic_block bb) | |
81 { | |
82 gimple_stmt_iterator gsi; | |
83 | |
84 /* If BB has a single successor or a single predecessor, then | |
85 there is no threading opportunity. */ | |
86 if (single_succ_p (bb) || single_pred_p (bb)) | |
87 return false; | |
88 | |
89 /* If BB does not end with a conditional, switch or computed goto, | |
90 then there is no threading opportunity. */ | |
91 gsi = gsi_last_bb (bb); | |
92 if (gsi_end_p (gsi) | |
93 || ! gsi_stmt (gsi) | |
94 || (gimple_code (gsi_stmt (gsi)) != GIMPLE_COND | |
95 && gimple_code (gsi_stmt (gsi)) != GIMPLE_GOTO | |
96 && gimple_code (gsi_stmt (gsi)) != GIMPLE_SWITCH)) | |
97 return false; | |
98 | |
99 return true; | |
100 } | |
101 | |
102 /* Return the LHS of any ASSERT_EXPR where OP appears as the first | |
103 argument to the ASSERT_EXPR and in which the ASSERT_EXPR dominates | |
104 BB. If no such ASSERT_EXPR is found, return OP. */ | |
105 | |
106 static tree | |
107 lhs_of_dominating_assert (tree op, basic_block bb, gimple stmt) | |
108 { | |
109 imm_use_iterator imm_iter; | |
110 gimple use_stmt; | |
111 use_operand_p use_p; | |
112 | |
113 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op) | |
114 { | |
115 use_stmt = USE_STMT (use_p); | |
116 if (use_stmt != stmt | |
117 && gimple_assign_single_p (use_stmt) | |
118 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ASSERT_EXPR | |
119 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == op | |
120 && dominated_by_p (CDI_DOMINATORS, bb, gimple_bb (use_stmt))) | |
121 { | |
122 return gimple_assign_lhs (use_stmt); | |
123 } | |
124 } | |
125 return op; | |
126 } | |
127 | |
128 /* We record temporary equivalences created by PHI nodes or | |
129 statements within the target block. Doing so allows us to | |
130 identify more jump threading opportunities, even in blocks | |
131 with side effects. | |
132 | |
133 We keep track of those temporary equivalences in a stack | |
134 structure so that we can unwind them when we're done processing | |
135 a particular edge. This routine handles unwinding the data | |
136 structures. */ | |
137 | |
138 static void | |
139 remove_temporary_equivalences (VEC(tree, heap) **stack) | |
140 { | |
141 while (VEC_length (tree, *stack) > 0) | |
142 { | |
143 tree prev_value, dest; | |
144 | |
145 dest = VEC_pop (tree, *stack); | |
146 | |
147 /* A NULL value indicates we should stop unwinding, otherwise | |
148 pop off the next entry as they're recorded in pairs. */ | |
149 if (dest == NULL) | |
150 break; | |
151 | |
152 prev_value = VEC_pop (tree, *stack); | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
153 set_ssa_name_value (dest, prev_value); |
0 | 154 } |
155 } | |
156 | |
157 /* Record a temporary equivalence, saving enough information so that | |
158 we can restore the state of recorded equivalences when we're | |
159 done processing the current edge. */ | |
160 | |
161 static void | |
162 record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack) | |
163 { | |
164 tree prev_x = SSA_NAME_VALUE (x); | |
165 | |
166 if (TREE_CODE (y) == SSA_NAME) | |
167 { | |
168 tree tmp = SSA_NAME_VALUE (y); | |
169 y = tmp ? tmp : y; | |
170 } | |
171 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
172 set_ssa_name_value (x, y); |
0 | 173 VEC_reserve (tree, heap, *stack, 2); |
174 VEC_quick_push (tree, *stack, prev_x); | |
175 VEC_quick_push (tree, *stack, x); | |
176 } | |
177 | |
178 /* Record temporary equivalences created by PHIs at the target of the | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
179 edge E. Record unwind information for the equivalences onto STACK. |
0 | 180 |
181 If a PHI which prevents threading is encountered, then return FALSE | |
182 indicating we should not thread this edge, else return TRUE. */ | |
183 | |
184 static bool | |
185 record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack) | |
186 { | |
187 gimple_stmt_iterator gsi; | |
188 | |
189 /* Each PHI creates a temporary equivalence, record them. | |
190 These are context sensitive equivalences and will be removed | |
191 later. */ | |
192 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
193 { | |
194 gimple phi = gsi_stmt (gsi); | |
195 tree src = PHI_ARG_DEF_FROM_EDGE (phi, e); | |
196 tree dst = gimple_phi_result (phi); | |
197 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
198 /* If the desired argument is not the same as this PHI's result |
0 | 199 and it is set by a PHI in E->dest, then we can not thread |
200 through E->dest. */ | |
201 if (src != dst | |
202 && TREE_CODE (src) == SSA_NAME | |
203 && gimple_code (SSA_NAME_DEF_STMT (src)) == GIMPLE_PHI | |
204 && gimple_bb (SSA_NAME_DEF_STMT (src)) == e->dest) | |
205 return false; | |
206 | |
207 /* We consider any non-virtual PHI as a statement since it | |
208 count result in a constant assignment or copy operation. */ | |
209 if (is_gimple_reg (dst)) | |
210 stmt_count++; | |
211 | |
212 record_temporary_equivalence (dst, src, stack); | |
213 } | |
214 return true; | |
215 } | |
216 | |
217 /* Fold the RHS of an assignment statement and return it as a tree. | |
218 May return NULL_TREE if no simplification is possible. */ | |
219 | |
220 static tree | |
221 fold_assignment_stmt (gimple stmt) | |
222 { | |
223 enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
224 | |
225 switch (get_gimple_rhs_class (subcode)) | |
226 { | |
227 case GIMPLE_SINGLE_RHS: | |
228 { | |
229 tree rhs = gimple_assign_rhs1 (stmt); | |
230 | |
231 if (TREE_CODE (rhs) == COND_EXPR) | |
232 { | |
233 /* Sadly, we have to handle conditional assignments specially | |
234 here, because fold expects all the operands of an expression | |
235 to be folded before the expression itself is folded, but we | |
236 can't just substitute the folded condition here. */ | |
237 tree cond = fold (COND_EXPR_COND (rhs)); | |
238 if (cond == boolean_true_node) | |
239 rhs = COND_EXPR_THEN (rhs); | |
240 else if (cond == boolean_false_node) | |
241 rhs = COND_EXPR_ELSE (rhs); | |
242 } | |
243 | |
244 return fold (rhs); | |
245 } | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
246 |
0 | 247 case GIMPLE_UNARY_RHS: |
248 { | |
249 tree lhs = gimple_assign_lhs (stmt); | |
250 tree op0 = gimple_assign_rhs1 (stmt); | |
251 return fold_unary (subcode, TREE_TYPE (lhs), op0); | |
252 } | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
253 |
0 | 254 case GIMPLE_BINARY_RHS: |
255 { | |
256 tree lhs = gimple_assign_lhs (stmt); | |
257 tree op0 = gimple_assign_rhs1 (stmt); | |
258 tree op1 = gimple_assign_rhs2 (stmt); | |
259 return fold_binary (subcode, TREE_TYPE (lhs), op0, op1); | |
260 } | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
261 |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
262 case GIMPLE_TERNARY_RHS: |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
263 { |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
264 tree lhs = gimple_assign_lhs (stmt); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
265 tree op0 = gimple_assign_rhs1 (stmt); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
266 tree op1 = gimple_assign_rhs2 (stmt); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
267 tree op2 = gimple_assign_rhs3 (stmt); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
268 return fold_ternary (subcode, TREE_TYPE (lhs), op0, op1, op2); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
269 } |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
270 |
0 | 271 default: |
272 gcc_unreachable (); | |
273 } | |
274 } | |
275 | |
276 /* Try to simplify each statement in E->dest, ultimately leading to | |
277 a simplification of the COND_EXPR at the end of E->dest. | |
278 | |
279 Record unwind information for temporary equivalences onto STACK. | |
280 | |
281 Use SIMPLIFY (a pointer to a callback function) to further simplify | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
282 statements using pass specific information. |
0 | 283 |
284 We might consider marking just those statements which ultimately | |
285 feed the COND_EXPR. It's not clear if the overhead of bookkeeping | |
286 would be recovered by trying to simplify fewer statements. | |
287 | |
288 If we are able to simplify a statement into the form | |
289 SSA_NAME = (SSA_NAME | gimple invariant), then we can record | |
290 a context sensitive equivalence which may help us simplify | |
291 later statements in E->dest. */ | |
292 | |
293 static gimple | |
294 record_temporary_equivalences_from_stmts_at_dest (edge e, | |
295 VEC(tree, heap) **stack, | |
296 tree (*simplify) (gimple, | |
297 gimple)) | |
298 { | |
299 gimple stmt = NULL; | |
300 gimple_stmt_iterator gsi; | |
301 int max_stmt_count; | |
302 | |
303 max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS); | |
304 | |
305 /* Walk through each statement in the block recording equivalences | |
306 we discover. Note any equivalences we discover are context | |
307 sensitive (ie, are dependent on traversing E) and must be unwound | |
308 when we're finished processing E. */ | |
309 for (gsi = gsi_start_bb (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
310 { | |
311 tree cached_lhs = NULL; | |
312 | |
313 stmt = gsi_stmt (gsi); | |
314 | |
315 /* Ignore empty statements and labels. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
316 if (gimple_code (stmt) == GIMPLE_NOP |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
317 || gimple_code (stmt) == GIMPLE_LABEL |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
318 || is_gimple_debug (stmt)) |
0 | 319 continue; |
320 | |
321 /* If the statement has volatile operands, then we assume we | |
322 can not thread through this block. This is overly | |
323 conservative in some ways. */ | |
324 if (gimple_code (stmt) == GIMPLE_ASM && gimple_asm_volatile_p (stmt)) | |
325 return NULL; | |
326 | |
327 /* If duplicating this block is going to cause too much code | |
328 expansion, then do not thread through this block. */ | |
329 stmt_count++; | |
330 if (stmt_count > max_stmt_count) | |
331 return NULL; | |
332 | |
333 /* If this is not a statement that sets an SSA_NAME to a new | |
334 value, then do not try to simplify this statement as it will | |
335 not simplify in any way that is helpful for jump threading. */ | |
336 if ((gimple_code (stmt) != GIMPLE_ASSIGN | |
337 || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME) | |
338 && (gimple_code (stmt) != GIMPLE_CALL | |
339 || gimple_call_lhs (stmt) == NULL_TREE | |
340 || TREE_CODE (gimple_call_lhs (stmt)) != SSA_NAME)) | |
341 continue; | |
342 | |
343 /* The result of __builtin_object_size depends on all the arguments | |
344 of a phi node. Temporarily using only one edge produces invalid | |
345 results. For example | |
346 | |
347 if (x < 6) | |
348 goto l; | |
349 else | |
350 goto l; | |
351 | |
352 l: | |
353 r = PHI <&w[2].a[1](2), &a.a[6](3)> | |
354 __builtin_object_size (r, 0) | |
355 | |
356 The result of __builtin_object_size is defined to be the maximum of | |
357 remaining bytes. If we use only one edge on the phi, the result will | |
358 change to be the remaining bytes for the corresponding phi argument. | |
359 | |
360 Similarly for __builtin_constant_p: | |
361 | |
362 r = PHI <1(2), 2(3)> | |
363 __builtin_constant_p (r) | |
364 | |
365 Both PHI arguments are constant, but x ? 1 : 2 is still not | |
366 constant. */ | |
367 | |
368 if (is_gimple_call (stmt)) | |
369 { | |
370 tree fndecl = gimple_call_fndecl (stmt); | |
371 if (fndecl | |
372 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE | |
373 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P)) | |
374 continue; | |
375 } | |
376 | |
377 /* At this point we have a statement which assigns an RHS to an | |
378 SSA_VAR on the LHS. We want to try and simplify this statement | |
379 to expose more context sensitive equivalences which in turn may | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
380 allow us to simplify the condition at the end of the loop. |
0 | 381 |
382 Handle simple copy operations as well as implied copies from | |
383 ASSERT_EXPRs. */ | |
384 if (gimple_assign_single_p (stmt) | |
385 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) | |
386 cached_lhs = gimple_assign_rhs1 (stmt); | |
387 else if (gimple_assign_single_p (stmt) | |
388 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR) | |
389 cached_lhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); | |
390 else | |
391 { | |
392 /* A statement that is not a trivial copy or ASSERT_EXPR. | |
393 We're going to temporarily copy propagate the operands | |
394 and see if that allows us to simplify this statement. */ | |
395 tree *copy; | |
396 ssa_op_iter iter; | |
397 use_operand_p use_p; | |
398 unsigned int num, i = 0; | |
399 | |
400 num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE)); | |
401 copy = XCNEWVEC (tree, num); | |
402 | |
403 /* Make a copy of the uses & vuses into USES_COPY, then cprop into | |
404 the operands. */ | |
405 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) | |
406 { | |
407 tree tmp = NULL; | |
408 tree use = USE_FROM_PTR (use_p); | |
409 | |
410 copy[i++] = use; | |
411 if (TREE_CODE (use) == SSA_NAME) | |
412 tmp = SSA_NAME_VALUE (use); | |
413 if (tmp) | |
414 SET_USE (use_p, tmp); | |
415 } | |
416 | |
417 /* Try to fold/lookup the new expression. Inserting the | |
418 expression into the hash table is unlikely to help. */ | |
419 if (is_gimple_call (stmt)) | |
420 cached_lhs = fold_call_stmt (stmt, false); | |
421 else | |
422 cached_lhs = fold_assignment_stmt (stmt); | |
423 | |
424 if (!cached_lhs | |
425 || (TREE_CODE (cached_lhs) != SSA_NAME | |
426 && !is_gimple_min_invariant (cached_lhs))) | |
427 cached_lhs = (*simplify) (stmt, stmt); | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
428 |
0 | 429 /* Restore the statement's original uses/defs. */ |
430 i = 0; | |
431 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) | |
432 SET_USE (use_p, copy[i++]); | |
433 | |
434 free (copy); | |
435 } | |
436 | |
437 /* Record the context sensitive equivalence if we were able | |
438 to simplify this statement. */ | |
439 if (cached_lhs | |
440 && (TREE_CODE (cached_lhs) == SSA_NAME | |
441 || is_gimple_min_invariant (cached_lhs))) | |
442 record_temporary_equivalence (gimple_get_lhs (stmt), cached_lhs, stack); | |
443 } | |
444 return stmt; | |
445 } | |
446 | |
447 /* Simplify the control statement at the end of the block E->dest. | |
448 | |
449 To avoid allocating memory unnecessarily, a scratch GIMPLE_COND | |
450 is available to use/clobber in DUMMY_COND. | |
451 | |
452 Use SIMPLIFY (a pointer to a callback function) to further simplify | |
453 a condition using pass specific information. | |
454 | |
455 Return the simplified condition or NULL if simplification could | |
456 not be performed. */ | |
457 | |
458 static tree | |
459 simplify_control_stmt_condition (edge e, | |
460 gimple stmt, | |
461 gimple dummy_cond, | |
462 tree (*simplify) (gimple, gimple), | |
463 bool handle_dominating_asserts) | |
464 { | |
465 tree cond, cached_lhs; | |
466 enum gimple_code code = gimple_code (stmt); | |
467 | |
468 /* For comparisons, we have to update both operands, then try | |
469 to simplify the comparison. */ | |
470 if (code == GIMPLE_COND) | |
471 { | |
472 tree op0, op1; | |
473 enum tree_code cond_code; | |
474 | |
475 op0 = gimple_cond_lhs (stmt); | |
476 op1 = gimple_cond_rhs (stmt); | |
477 cond_code = gimple_cond_code (stmt); | |
478 | |
479 /* Get the current value of both operands. */ | |
480 if (TREE_CODE (op0) == SSA_NAME) | |
481 { | |
482 tree tmp = SSA_NAME_VALUE (op0); | |
483 if (tmp) | |
484 op0 = tmp; | |
485 } | |
486 | |
487 if (TREE_CODE (op1) == SSA_NAME) | |
488 { | |
489 tree tmp = SSA_NAME_VALUE (op1); | |
490 if (tmp) | |
491 op1 = tmp; | |
492 } | |
493 | |
494 if (handle_dominating_asserts) | |
495 { | |
496 /* Now see if the operand was consumed by an ASSERT_EXPR | |
497 which dominates E->src. If so, we want to replace the | |
498 operand with the LHS of the ASSERT_EXPR. */ | |
499 if (TREE_CODE (op0) == SSA_NAME) | |
500 op0 = lhs_of_dominating_assert (op0, e->src, stmt); | |
501 | |
502 if (TREE_CODE (op1) == SSA_NAME) | |
503 op1 = lhs_of_dominating_assert (op1, e->src, stmt); | |
504 } | |
505 | |
506 /* We may need to canonicalize the comparison. For | |
507 example, op0 might be a constant while op1 is an | |
508 SSA_NAME. Failure to canonicalize will cause us to | |
509 miss threading opportunities. */ | |
510 if (tree_swap_operands_p (op0, op1, false)) | |
511 { | |
512 tree tmp; | |
513 cond_code = swap_tree_comparison (cond_code); | |
514 tmp = op0; | |
515 op0 = op1; | |
516 op1 = tmp; | |
517 } | |
518 | |
519 /* Stuff the operator and operands into our dummy conditional | |
520 expression. */ | |
521 gimple_cond_set_code (dummy_cond, cond_code); | |
522 gimple_cond_set_lhs (dummy_cond, op0); | |
523 gimple_cond_set_rhs (dummy_cond, op1); | |
524 | |
525 /* We absolutely do not care about any type conversions | |
526 we only care about a zero/nonzero value. */ | |
527 fold_defer_overflow_warnings (); | |
528 | |
529 cached_lhs = fold_binary (cond_code, boolean_type_node, op0, op1); | |
530 if (cached_lhs) | |
531 while (CONVERT_EXPR_P (cached_lhs)) | |
532 cached_lhs = TREE_OPERAND (cached_lhs, 0); | |
533 | |
534 fold_undefer_overflow_warnings ((cached_lhs | |
535 && is_gimple_min_invariant (cached_lhs)), | |
536 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); | |
537 | |
538 /* If we have not simplified the condition down to an invariant, | |
539 then use the pass specific callback to simplify the condition. */ | |
540 if (!cached_lhs | |
541 || !is_gimple_min_invariant (cached_lhs)) | |
542 cached_lhs = (*simplify) (dummy_cond, stmt); | |
543 | |
544 return cached_lhs; | |
545 } | |
546 | |
547 if (code == GIMPLE_SWITCH) | |
548 cond = gimple_switch_index (stmt); | |
549 else if (code == GIMPLE_GOTO) | |
550 cond = gimple_goto_dest (stmt); | |
551 else | |
552 gcc_unreachable (); | |
553 | |
554 /* We can have conditionals which just test the state of a variable | |
555 rather than use a relational operator. These are simpler to handle. */ | |
556 if (TREE_CODE (cond) == SSA_NAME) | |
557 { | |
558 cached_lhs = cond; | |
559 | |
560 /* Get the variable's current value from the equivalence chains. | |
561 | |
562 It is possible to get loops in the SSA_NAME_VALUE chains | |
563 (consider threading the backedge of a loop where we have | |
564 a loop invariant SSA_NAME used in the condition. */ | |
565 if (cached_lhs | |
566 && TREE_CODE (cached_lhs) == SSA_NAME | |
567 && SSA_NAME_VALUE (cached_lhs)) | |
568 cached_lhs = SSA_NAME_VALUE (cached_lhs); | |
569 | |
570 /* If we're dominated by a suitable ASSERT_EXPR, then | |
571 update CACHED_LHS appropriately. */ | |
572 if (handle_dominating_asserts && TREE_CODE (cached_lhs) == SSA_NAME) | |
573 cached_lhs = lhs_of_dominating_assert (cached_lhs, e->src, stmt); | |
574 | |
575 /* If we haven't simplified to an invariant yet, then use the | |
576 pass specific callback to try and simplify it further. */ | |
577 if (cached_lhs && ! is_gimple_min_invariant (cached_lhs)) | |
578 cached_lhs = (*simplify) (stmt, stmt); | |
579 } | |
580 else | |
581 cached_lhs = NULL; | |
582 | |
583 return cached_lhs; | |
584 } | |
585 | |
586 /* We are exiting E->src, see if E->dest ends with a conditional | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
587 jump which has a known value when reached via E. |
0 | 588 |
589 Special care is necessary if E is a back edge in the CFG as we | |
590 may have already recorded equivalences for E->dest into our | |
591 various tables, including the result of the conditional at | |
592 the end of E->dest. Threading opportunities are severely | |
593 limited in that case to avoid short-circuiting the loop | |
594 incorrectly. | |
595 | |
596 Note it is quite common for the first block inside a loop to | |
597 end with a conditional which is either always true or always | |
598 false when reached via the loop backedge. Thus we do not want | |
599 to blindly disable threading across a loop backedge. | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
600 |
0 | 601 DUMMY_COND is a shared cond_expr used by condition simplification as scratch, |
602 to avoid allocating memory. | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
603 |
0 | 604 HANDLE_DOMINATING_ASSERTS is true if we should try to replace operands of |
605 the simplified condition with left-hand sides of ASSERT_EXPRs they are | |
606 used in. | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
607 |
0 | 608 STACK is used to undo temporary equivalences created during the walk of |
609 E->dest. | |
610 | |
611 SIMPLIFY is a pass-specific function used to simplify statements. */ | |
612 | |
613 void | |
614 thread_across_edge (gimple dummy_cond, | |
615 edge e, | |
616 bool handle_dominating_asserts, | |
617 VEC(tree, heap) **stack, | |
618 tree (*simplify) (gimple, gimple)) | |
619 { | |
620 gimple stmt; | |
621 | |
622 /* If E is a backedge, then we want to verify that the COND_EXPR, | |
623 SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected | |
624 by any statements in e->dest. If it is affected, then it is not | |
625 safe to thread this edge. */ | |
626 if (e->flags & EDGE_DFS_BACK) | |
627 { | |
628 ssa_op_iter iter; | |
629 use_operand_p use_p; | |
630 gimple last = gsi_stmt (gsi_last_bb (e->dest)); | |
631 | |
632 FOR_EACH_SSA_USE_OPERAND (use_p, last, iter, SSA_OP_USE | SSA_OP_VUSE) | |
633 { | |
634 tree use = USE_FROM_PTR (use_p); | |
635 | |
636 if (TREE_CODE (use) == SSA_NAME | |
637 && gimple_code (SSA_NAME_DEF_STMT (use)) != GIMPLE_PHI | |
638 && gimple_bb (SSA_NAME_DEF_STMT (use)) == e->dest) | |
639 goto fail; | |
640 } | |
641 } | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
642 |
0 | 643 stmt_count = 0; |
644 | |
645 /* PHIs create temporary equivalences. */ | |
646 if (!record_temporary_equivalences_from_phis (e, stack)) | |
647 goto fail; | |
648 | |
649 /* Now walk each statement recording any context sensitive | |
650 temporary equivalences we can detect. */ | |
651 stmt = record_temporary_equivalences_from_stmts_at_dest (e, stack, simplify); | |
652 if (!stmt) | |
653 goto fail; | |
654 | |
655 /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm | |
656 will be taken. */ | |
657 if (gimple_code (stmt) == GIMPLE_COND | |
658 || gimple_code (stmt) == GIMPLE_GOTO | |
659 || gimple_code (stmt) == GIMPLE_SWITCH) | |
660 { | |
661 tree cond; | |
662 | |
663 /* Extract and simplify the condition. */ | |
664 cond = simplify_control_stmt_condition (e, stmt, dummy_cond, simplify, handle_dominating_asserts); | |
665 | |
666 if (cond && is_gimple_min_invariant (cond)) | |
667 { | |
668 edge taken_edge = find_taken_edge (e->dest, cond); | |
669 basic_block dest = (taken_edge ? taken_edge->dest : NULL); | |
670 | |
671 if (dest == e->dest) | |
672 goto fail; | |
673 | |
674 remove_temporary_equivalences (stack); | |
675 register_jump_thread (e, taken_edge); | |
676 } | |
677 } | |
678 | |
679 fail: | |
680 remove_temporary_equivalences (stack); | |
681 } |