Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-ssa-pre.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 855418dad1a3 |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* SSA-PRE for trees. | |
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 | |
3 Free Software Foundation, Inc. | |
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher | |
5 <stevenb@suse.de> | |
6 | |
7 This file is part of GCC. | |
8 | |
9 GCC is free software; you can redistribute it and/or modify | |
10 it under the terms of the GNU General Public License as published by | |
11 the Free Software Foundation; either version 3, or (at your option) | |
12 any later version. | |
13 | |
14 GCC is distributed in the hope that it will be useful, | |
15 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
17 GNU General Public License for more details. | |
18 | |
19 You should have received a copy of the GNU General Public License | |
20 along with GCC; see the file COPYING3. If not see | |
21 <http://www.gnu.org/licenses/>. */ | |
22 | |
23 #include "config.h" | |
24 #include "system.h" | |
25 #include "coretypes.h" | |
26 #include "tm.h" | |
27 #include "ggc.h" | |
28 #include "tree.h" | |
29 #include "basic-block.h" | |
30 #include "diagnostic.h" | |
31 #include "tree-inline.h" | |
32 #include "tree-flow.h" | |
33 #include "gimple.h" | |
34 #include "tree-dump.h" | |
35 #include "timevar.h" | |
36 #include "fibheap.h" | |
37 #include "hashtab.h" | |
38 #include "tree-iterator.h" | |
39 #include "real.h" | |
40 #include "alloc-pool.h" | |
41 #include "obstack.h" | |
42 #include "tree-pass.h" | |
43 #include "flags.h" | |
44 #include "bitmap.h" | |
45 #include "langhooks.h" | |
46 #include "cfgloop.h" | |
47 #include "tree-ssa-sccvn.h" | |
48 #include "params.h" | |
49 #include "dbgcnt.h" | |
50 | |
51 /* TODO: | |
52 | |
53 1. Avail sets can be shared by making an avail_find_leader that | |
54 walks up the dominator tree and looks in those avail sets. | |
55 This might affect code optimality, it's unclear right now. | |
56 2. Strength reduction can be performed by anticipating expressions | |
57 we can repair later on. | |
58 3. We can do back-substitution or smarter value numbering to catch | |
59 commutative expressions split up over multiple statements. | |
60 */ | |
61 | |
62 /* For ease of terminology, "expression node" in the below refers to | |
63 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs | |
64 represent the actual statement containing the expressions we care about, | |
65 and we cache the value number by putting it in the expression. */ | |
66 | |
67 /* Basic algorithm | |
68 | |
69 First we walk the statements to generate the AVAIL sets, the | |
70 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the | |
71 generation of values/expressions by a given block. We use them | |
72 when computing the ANTIC sets. The AVAIL sets consist of | |
73 SSA_NAME's that represent values, so we know what values are | |
74 available in what blocks. AVAIL is a forward dataflow problem. In | |
75 SSA, values are never killed, so we don't need a kill set, or a | |
76 fixpoint iteration, in order to calculate the AVAIL sets. In | |
77 traditional parlance, AVAIL sets tell us the downsafety of the | |
78 expressions/values. | |
79 | |
80 Next, we generate the ANTIC sets. These sets represent the | |
81 anticipatable expressions. ANTIC is a backwards dataflow | |
82 problem. An expression is anticipatable in a given block if it could | |
83 be generated in that block. This means that if we had to perform | |
84 an insertion in that block, of the value of that expression, we | |
85 could. Calculating the ANTIC sets requires phi translation of | |
86 expressions, because the flow goes backwards through phis. We must | |
87 iterate to a fixpoint of the ANTIC sets, because we have a kill | |
88 set. Even in SSA form, values are not live over the entire | |
89 function, only from their definition point onwards. So we have to | |
90 remove values from the ANTIC set once we go past the definition | |
91 point of the leaders that make them up. | |
92 compute_antic/compute_antic_aux performs this computation. | |
93 | |
94 Third, we perform insertions to make partially redundant | |
95 expressions fully redundant. | |
96 | |
97 An expression is partially redundant (excluding partial | |
98 anticipation) if: | |
99 | |
100 1. It is AVAIL in some, but not all, of the predecessors of a | |
101 given block. | |
102 2. It is ANTIC in all the predecessors. | |
103 | |
104 In order to make it fully redundant, we insert the expression into | |
105 the predecessors where it is not available, but is ANTIC. | |
106 | |
107 For the partial anticipation case, we only perform insertion if it | |
108 is partially anticipated in some block, and fully available in all | |
109 of the predecessors. | |
110 | |
111 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion | |
112 performs these steps. | |
113 | |
114 Fourth, we eliminate fully redundant expressions. | |
115 This is a simple statement walk that replaces redundant | |
116 calculations with the now available values. */ | |
117 | |
118 /* Representations of value numbers: | |
119 | |
120 Value numbers are represented by a representative SSA_NAME. We | |
121 will create fake SSA_NAME's in situations where we need a | |
122 representative but do not have one (because it is a complex | |
123 expression). In order to facilitate storing the value numbers in | |
124 bitmaps, and keep the number of wasted SSA_NAME's down, we also | |
125 associate a value_id with each value number, and create full blown | |
126 ssa_name's only where we actually need them (IE in operands of | |
127 existing expressions). | |
128 | |
129 Theoretically you could replace all the value_id's with | |
130 SSA_NAME_VERSION, but this would allocate a large number of | |
131 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number. | |
132 It would also require an additional indirection at each point we | |
133 use the value id. */ | |
134 | |
135 /* Representation of expressions on value numbers: | |
136 | |
137 Expressions consisting of value numbers are represented the same | |
138 way as our VN internally represents them, with an additional | |
139 "pre_expr" wrapping around them in order to facilitate storing all | |
140 of the expressions in the same sets. */ | |
141 | |
142 /* Representation of sets: | |
143 | |
144 The dataflow sets do not need to be sorted in any particular order | |
145 for the majority of their lifetime, are simply represented as two | |
146 bitmaps, one that keeps track of values present in the set, and one | |
147 that keeps track of expressions present in the set. | |
148 | |
149 When we need them in topological order, we produce it on demand by | |
150 transforming the bitmap into an array and sorting it into topo | |
151 order. */ | |
152 | |
153 /* Type of expression, used to know which member of the PRE_EXPR union | |
154 is valid. */ | |
155 | |
156 enum pre_expr_kind | |
157 { | |
158 NAME, | |
159 NARY, | |
160 REFERENCE, | |
161 CONSTANT | |
162 }; | |
163 | |
164 typedef union pre_expr_union_d | |
165 { | |
166 tree name; | |
167 tree constant; | |
168 vn_nary_op_t nary; | |
169 vn_reference_t reference; | |
170 } pre_expr_union; | |
171 | |
172 typedef struct pre_expr_d | |
173 { | |
174 enum pre_expr_kind kind; | |
175 unsigned int id; | |
176 pre_expr_union u; | |
177 } *pre_expr; | |
178 | |
179 #define PRE_EXPR_NAME(e) (e)->u.name | |
180 #define PRE_EXPR_NARY(e) (e)->u.nary | |
181 #define PRE_EXPR_REFERENCE(e) (e)->u.reference | |
182 #define PRE_EXPR_CONSTANT(e) (e)->u.constant | |
183 | |
184 static int | |
185 pre_expr_eq (const void *p1, const void *p2) | |
186 { | |
187 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1; | |
188 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2; | |
189 | |
190 if (e1->kind != e2->kind) | |
191 return false; | |
192 | |
193 switch (e1->kind) | |
194 { | |
195 case CONSTANT: | |
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1), | |
197 PRE_EXPR_CONSTANT (e2)); | |
198 case NAME: | |
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2); | |
200 case NARY: | |
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2)); | |
202 case REFERENCE: | |
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1), | |
204 PRE_EXPR_REFERENCE (e2)); | |
205 default: | |
206 abort(); | |
207 } | |
208 } | |
209 | |
210 static hashval_t | |
211 pre_expr_hash (const void *p1) | |
212 { | |
213 const struct pre_expr_d *e = (const struct pre_expr_d *) p1; | |
214 switch (e->kind) | |
215 { | |
216 case CONSTANT: | |
217 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e)); | |
218 case NAME: | |
219 return iterative_hash_hashval_t (SSA_NAME_VERSION (PRE_EXPR_NAME (e)), 0); | |
220 case NARY: | |
221 return PRE_EXPR_NARY (e)->hashcode; | |
222 case REFERENCE: | |
223 return PRE_EXPR_REFERENCE (e)->hashcode; | |
224 default: | |
225 abort (); | |
226 } | |
227 } | |
228 | |
229 | |
230 /* Next global expression id number. */ | |
231 static unsigned int next_expression_id; | |
232 | |
233 /* Mapping from expression to id number we can use in bitmap sets. */ | |
234 DEF_VEC_P (pre_expr); | |
235 DEF_VEC_ALLOC_P (pre_expr, heap); | |
236 static VEC(pre_expr, heap) *expressions; | |
237 static htab_t expression_to_id; | |
238 | |
239 /* Allocate an expression id for EXPR. */ | |
240 | |
241 static inline unsigned int | |
242 alloc_expression_id (pre_expr expr) | |
243 { | |
244 void **slot; | |
245 /* Make sure we won't overflow. */ | |
246 gcc_assert (next_expression_id + 1 > next_expression_id); | |
247 expr->id = next_expression_id++; | |
248 VEC_safe_push (pre_expr, heap, expressions, expr); | |
249 slot = htab_find_slot (expression_to_id, expr, INSERT); | |
250 gcc_assert (!*slot); | |
251 *slot = expr; | |
252 return next_expression_id - 1; | |
253 } | |
254 | |
255 /* Return the expression id for tree EXPR. */ | |
256 | |
257 static inline unsigned int | |
258 get_expression_id (const pre_expr expr) | |
259 { | |
260 return expr->id; | |
261 } | |
262 | |
263 static inline unsigned int | |
264 lookup_expression_id (const pre_expr expr) | |
265 { | |
266 void **slot; | |
267 | |
268 slot = htab_find_slot (expression_to_id, expr, NO_INSERT); | |
269 if (!slot) | |
270 return 0; | |
271 return ((pre_expr)*slot)->id; | |
272 } | |
273 | |
274 /* Return the existing expression id for EXPR, or create one if one | |
275 does not exist yet. */ | |
276 | |
277 static inline unsigned int | |
278 get_or_alloc_expression_id (pre_expr expr) | |
279 { | |
280 unsigned int id = lookup_expression_id (expr); | |
281 if (id == 0) | |
282 return alloc_expression_id (expr); | |
283 return expr->id = id; | |
284 } | |
285 | |
286 /* Return the expression that has expression id ID */ | |
287 | |
288 static inline pre_expr | |
289 expression_for_id (unsigned int id) | |
290 { | |
291 return VEC_index (pre_expr, expressions, id); | |
292 } | |
293 | |
294 /* Free the expression id field in all of our expressions, | |
295 and then destroy the expressions array. */ | |
296 | |
297 static void | |
298 clear_expression_ids (void) | |
299 { | |
300 VEC_free (pre_expr, heap, expressions); | |
301 } | |
302 | |
303 static alloc_pool pre_expr_pool; | |
304 | |
305 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */ | |
306 | |
307 static pre_expr | |
308 get_or_alloc_expr_for_name (tree name) | |
309 { | |
310 pre_expr result = (pre_expr) pool_alloc (pre_expr_pool); | |
311 unsigned int result_id; | |
312 | |
313 result->kind = NAME; | |
314 result->id = 0; | |
315 PRE_EXPR_NAME (result) = name; | |
316 result_id = lookup_expression_id (result); | |
317 if (result_id != 0) | |
318 { | |
319 pool_free (pre_expr_pool, result); | |
320 result = expression_for_id (result_id); | |
321 return result; | |
322 } | |
323 get_or_alloc_expression_id (result); | |
324 return result; | |
325 } | |
326 | |
327 static bool in_fre = false; | |
328 | |
329 /* An unordered bitmap set. One bitmap tracks values, the other, | |
330 expressions. */ | |
331 typedef struct bitmap_set | |
332 { | |
333 bitmap expressions; | |
334 bitmap values; | |
335 } *bitmap_set_t; | |
336 | |
337 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \ | |
338 EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi)) | |
339 | |
340 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \ | |
341 EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi)) | |
342 | |
343 /* Mapping from value id to expressions with that value_id. */ | |
344 DEF_VEC_P (bitmap_set_t); | |
345 DEF_VEC_ALLOC_P (bitmap_set_t, heap); | |
346 static VEC(bitmap_set_t, heap) *value_expressions; | |
347 | |
348 /* Sets that we need to keep track of. */ | |
349 typedef struct bb_bitmap_sets | |
350 { | |
351 /* The EXP_GEN set, which represents expressions/values generated in | |
352 a basic block. */ | |
353 bitmap_set_t exp_gen; | |
354 | |
355 /* The PHI_GEN set, which represents PHI results generated in a | |
356 basic block. */ | |
357 bitmap_set_t phi_gen; | |
358 | |
359 /* The TMP_GEN set, which represents results/temporaries generated | |
360 in a basic block. IE the LHS of an expression. */ | |
361 bitmap_set_t tmp_gen; | |
362 | |
363 /* The AVAIL_OUT set, which represents which values are available in | |
364 a given basic block. */ | |
365 bitmap_set_t avail_out; | |
366 | |
367 /* The ANTIC_IN set, which represents which values are anticipatable | |
368 in a given basic block. */ | |
369 bitmap_set_t antic_in; | |
370 | |
371 /* The PA_IN set, which represents which values are | |
372 partially anticipatable in a given basic block. */ | |
373 bitmap_set_t pa_in; | |
374 | |
375 /* The NEW_SETS set, which is used during insertion to augment the | |
376 AVAIL_OUT set of blocks with the new insertions performed during | |
377 the current iteration. */ | |
378 bitmap_set_t new_sets; | |
379 | |
380 /* True if we have visited this block during ANTIC calculation. */ | |
381 unsigned int visited:1; | |
382 | |
383 /* True we have deferred processing this block during ANTIC | |
384 calculation until its successor is processed. */ | |
385 unsigned int deferred : 1; | |
386 } *bb_value_sets_t; | |
387 | |
388 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen | |
389 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen | |
390 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen | |
391 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out | |
392 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in | |
393 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in | |
394 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets | |
395 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited | |
396 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred | |
397 | |
398 | |
399 /* Maximal set of values, used to initialize the ANTIC problem, which | |
400 is an intersection problem. */ | |
401 static bitmap_set_t maximal_set; | |
402 | |
403 /* Basic block list in postorder. */ | |
404 static int *postorder; | |
405 | |
406 /* This structure is used to keep track of statistics on what | |
407 optimization PRE was able to perform. */ | |
408 static struct | |
409 { | |
410 /* The number of RHS computations eliminated by PRE. */ | |
411 int eliminations; | |
412 | |
413 /* The number of new expressions/temporaries generated by PRE. */ | |
414 int insertions; | |
415 | |
416 /* The number of inserts found due to partial anticipation */ | |
417 int pa_insert; | |
418 | |
419 /* The number of new PHI nodes added by PRE. */ | |
420 int phis; | |
421 | |
422 /* The number of values found constant. */ | |
423 int constified; | |
424 | |
425 } pre_stats; | |
426 | |
427 static bool do_partial_partial; | |
428 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple); | |
429 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr); | |
430 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr); | |
431 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t); | |
432 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int); | |
433 static void bitmap_insert_into_set (bitmap_set_t, pre_expr); | |
434 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr, bool); | |
435 static bitmap_set_t bitmap_set_new (void); | |
436 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *, | |
437 gimple, tree); | |
438 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *, | |
439 gimple); | |
440 static unsigned int get_expr_value_id (pre_expr); | |
441 | |
442 /* We can add and remove elements and entries to and from sets | |
443 and hash tables, so we use alloc pools for them. */ | |
444 | |
445 static alloc_pool bitmap_set_pool; | |
446 static bitmap_obstack grand_bitmap_obstack; | |
447 | |
448 /* To avoid adding 300 temporary variables when we only need one, we | |
449 only create one temporary variable, on demand, and build ssa names | |
450 off that. We do have to change the variable if the types don't | |
451 match the current variable's type. */ | |
452 static tree pretemp; | |
453 static tree storetemp; | |
454 static tree prephitemp; | |
455 | |
456 /* Set of blocks with statements that have had its EH information | |
457 cleaned up. */ | |
458 static bitmap need_eh_cleanup; | |
459 | |
460 /* Which expressions have been seen during a given phi translation. */ | |
461 static bitmap seen_during_translate; | |
462 | |
463 /* The phi_translate_table caches phi translations for a given | |
464 expression and predecessor. */ | |
465 | |
466 static htab_t phi_translate_table; | |
467 | |
468 /* A three tuple {e, pred, v} used to cache phi translations in the | |
469 phi_translate_table. */ | |
470 | |
471 typedef struct expr_pred_trans_d | |
472 { | |
473 /* The expression. */ | |
474 pre_expr e; | |
475 | |
476 /* The predecessor block along which we translated the expression. */ | |
477 basic_block pred; | |
478 | |
479 /* The value that resulted from the translation. */ | |
480 pre_expr v; | |
481 | |
482 /* The hashcode for the expression, pred pair. This is cached for | |
483 speed reasons. */ | |
484 hashval_t hashcode; | |
485 } *expr_pred_trans_t; | |
486 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t; | |
487 | |
488 /* Return the hash value for a phi translation table entry. */ | |
489 | |
490 static hashval_t | |
491 expr_pred_trans_hash (const void *p) | |
492 { | |
493 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p; | |
494 return ve->hashcode; | |
495 } | |
496 | |
497 /* Return true if two phi translation table entries are the same. | |
498 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/ | |
499 | |
500 static int | |
501 expr_pred_trans_eq (const void *p1, const void *p2) | |
502 { | |
503 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1; | |
504 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2; | |
505 basic_block b1 = ve1->pred; | |
506 basic_block b2 = ve2->pred; | |
507 | |
508 /* If they are not translations for the same basic block, they can't | |
509 be equal. */ | |
510 if (b1 != b2) | |
511 return false; | |
512 return pre_expr_eq (ve1->e, ve2->e); | |
513 } | |
514 | |
515 /* Search in the phi translation table for the translation of | |
516 expression E in basic block PRED. | |
517 Return the translated value, if found, NULL otherwise. */ | |
518 | |
519 static inline pre_expr | |
520 phi_trans_lookup (pre_expr e, basic_block pred) | |
521 { | |
522 void **slot; | |
523 struct expr_pred_trans_d ept; | |
524 | |
525 ept.e = e; | |
526 ept.pred = pred; | |
527 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index); | |
528 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode, | |
529 NO_INSERT); | |
530 if (!slot) | |
531 return NULL; | |
532 else | |
533 return ((expr_pred_trans_t) *slot)->v; | |
534 } | |
535 | |
536 | |
537 /* Add the tuple mapping from {expression E, basic block PRED} to | |
538 value V, to the phi translation table. */ | |
539 | |
540 static inline void | |
541 phi_trans_add (pre_expr e, pre_expr v, basic_block pred) | |
542 { | |
543 void **slot; | |
544 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d); | |
545 new_pair->e = e; | |
546 new_pair->pred = pred; | |
547 new_pair->v = v; | |
548 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e), | |
549 pred->index); | |
550 | |
551 slot = htab_find_slot_with_hash (phi_translate_table, new_pair, | |
552 new_pair->hashcode, INSERT); | |
553 if (*slot) | |
554 free (*slot); | |
555 *slot = (void *) new_pair; | |
556 } | |
557 | |
558 | |
559 /* Add expression E to the expression set of value id V. */ | |
560 | |
561 void | |
562 add_to_value (unsigned int v, pre_expr e) | |
563 { | |
564 bitmap_set_t set; | |
565 | |
566 gcc_assert (get_expr_value_id (e) == v); | |
567 | |
568 if (v >= VEC_length (bitmap_set_t, value_expressions)) | |
569 { | |
570 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions, | |
571 v + 1); | |
572 } | |
573 | |
574 set = VEC_index (bitmap_set_t, value_expressions, v); | |
575 if (!set) | |
576 { | |
577 set = bitmap_set_new (); | |
578 VEC_replace (bitmap_set_t, value_expressions, v, set); | |
579 } | |
580 | |
581 bitmap_insert_into_set_1 (set, e, true); | |
582 } | |
583 | |
584 /* Create a new bitmap set and return it. */ | |
585 | |
586 static bitmap_set_t | |
587 bitmap_set_new (void) | |
588 { | |
589 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool); | |
590 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack); | |
591 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack); | |
592 return ret; | |
593 } | |
594 | |
595 /* Return the value id for a PRE expression EXPR. */ | |
596 | |
597 static unsigned int | |
598 get_expr_value_id (pre_expr expr) | |
599 { | |
600 switch (expr->kind) | |
601 { | |
602 case CONSTANT: | |
603 { | |
604 unsigned int id; | |
605 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr)); | |
606 if (id == 0) | |
607 { | |
608 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr)); | |
609 add_to_value (id, expr); | |
610 } | |
611 return id; | |
612 } | |
613 case NAME: | |
614 return VN_INFO (PRE_EXPR_NAME (expr))->value_id; | |
615 case NARY: | |
616 return PRE_EXPR_NARY (expr)->value_id; | |
617 case REFERENCE: | |
618 return PRE_EXPR_REFERENCE (expr)->value_id; | |
619 default: | |
620 gcc_unreachable (); | |
621 } | |
622 } | |
623 | |
624 /* Remove an expression EXPR from a bitmapped set. */ | |
625 | |
626 static void | |
627 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr) | |
628 { | |
629 unsigned int val = get_expr_value_id (expr); | |
630 if (!value_id_constant_p (val)) | |
631 { | |
632 bitmap_clear_bit (set->values, val); | |
633 bitmap_clear_bit (set->expressions, get_expression_id (expr)); | |
634 } | |
635 } | |
636 | |
637 static void | |
638 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr, | |
639 bool allow_constants) | |
640 { | |
641 unsigned int val = get_expr_value_id (expr); | |
642 if (allow_constants || !value_id_constant_p (val)) | |
643 { | |
644 /* We specifically expect this and only this function to be able to | |
645 insert constants into a set. */ | |
646 bitmap_set_bit (set->values, val); | |
647 bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr)); | |
648 } | |
649 } | |
650 | |
651 /* Insert an expression EXPR into a bitmapped set. */ | |
652 | |
653 static void | |
654 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr) | |
655 { | |
656 bitmap_insert_into_set_1 (set, expr, false); | |
657 } | |
658 | |
659 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */ | |
660 | |
661 static void | |
662 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig) | |
663 { | |
664 bitmap_copy (dest->expressions, orig->expressions); | |
665 bitmap_copy (dest->values, orig->values); | |
666 } | |
667 | |
668 | |
669 /* Free memory used up by SET. */ | |
670 static void | |
671 bitmap_set_free (bitmap_set_t set) | |
672 { | |
673 BITMAP_FREE (set->expressions); | |
674 BITMAP_FREE (set->values); | |
675 } | |
676 | |
677 | |
678 /* Generate an topological-ordered array of bitmap set SET. */ | |
679 | |
680 static VEC(pre_expr, heap) * | |
681 sorted_array_from_bitmap_set (bitmap_set_t set) | |
682 { | |
683 unsigned int i, j; | |
684 bitmap_iterator bi, bj; | |
685 VEC(pre_expr, heap) *result = NULL; | |
686 | |
687 FOR_EACH_VALUE_ID_IN_SET (set, i, bi) | |
688 { | |
689 /* The number of expressions having a given value is usually | |
690 relatively small. Thus, rather than making a vector of all | |
691 the expressions and sorting it by value-id, we walk the values | |
692 and check in the reverse mapping that tells us what expressions | |
693 have a given value, to filter those in our set. As a result, | |
694 the expressions are inserted in value-id order, which means | |
695 topological order. | |
696 | |
697 If this is somehow a significant lose for some cases, we can | |
698 choose which set to walk based on the set size. */ | |
699 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i); | |
700 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj) | |
701 { | |
702 if (bitmap_bit_p (set->expressions, j)) | |
703 VEC_safe_push (pre_expr, heap, result, expression_for_id (j)); | |
704 } | |
705 } | |
706 | |
707 return result; | |
708 } | |
709 | |
710 /* Perform bitmapped set operation DEST &= ORIG. */ | |
711 | |
712 static void | |
713 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig) | |
714 { | |
715 bitmap_iterator bi; | |
716 unsigned int i; | |
717 | |
718 if (dest != orig) | |
719 { | |
720 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack); | |
721 | |
722 bitmap_and_into (dest->values, orig->values); | |
723 bitmap_copy (temp, dest->expressions); | |
724 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi) | |
725 { | |
726 pre_expr expr = expression_for_id (i); | |
727 unsigned int value_id = get_expr_value_id (expr); | |
728 if (!bitmap_bit_p (dest->values, value_id)) | |
729 bitmap_clear_bit (dest->expressions, i); | |
730 } | |
731 BITMAP_FREE (temp); | |
732 } | |
733 } | |
734 | |
735 /* Subtract all values and expressions contained in ORIG from DEST. */ | |
736 | |
737 static bitmap_set_t | |
738 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig) | |
739 { | |
740 bitmap_set_t result = bitmap_set_new (); | |
741 bitmap_iterator bi; | |
742 unsigned int i; | |
743 | |
744 bitmap_and_compl (result->expressions, dest->expressions, | |
745 orig->expressions); | |
746 | |
747 FOR_EACH_EXPR_ID_IN_SET (result, i, bi) | |
748 { | |
749 pre_expr expr = expression_for_id (i); | |
750 unsigned int value_id = get_expr_value_id (expr); | |
751 bitmap_set_bit (result->values, value_id); | |
752 } | |
753 | |
754 return result; | |
755 } | |
756 | |
757 /* Subtract all the values in bitmap set B from bitmap set A. */ | |
758 | |
759 static void | |
760 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b) | |
761 { | |
762 unsigned int i; | |
763 bitmap_iterator bi; | |
764 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack); | |
765 | |
766 bitmap_copy (temp, a->expressions); | |
767 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi) | |
768 { | |
769 pre_expr expr = expression_for_id (i); | |
770 if (bitmap_set_contains_value (b, get_expr_value_id (expr))) | |
771 bitmap_remove_from_set (a, expr); | |
772 } | |
773 BITMAP_FREE (temp); | |
774 } | |
775 | |
776 | |
777 /* Return true if bitmapped set SET contains the value VALUE_ID. */ | |
778 | |
779 static bool | |
780 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id) | |
781 { | |
782 if (value_id_constant_p (value_id)) | |
783 return true; | |
784 | |
785 if (!set || bitmap_empty_p (set->expressions)) | |
786 return false; | |
787 | |
788 return bitmap_bit_p (set->values, value_id); | |
789 } | |
790 | |
791 static inline bool | |
792 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr) | |
793 { | |
794 return bitmap_bit_p (set->expressions, get_expression_id (expr)); | |
795 } | |
796 | |
797 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */ | |
798 | |
799 static void | |
800 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor, | |
801 const pre_expr expr) | |
802 { | |
803 bitmap_set_t exprset; | |
804 unsigned int i; | |
805 bitmap_iterator bi; | |
806 | |
807 if (value_id_constant_p (lookfor)) | |
808 return; | |
809 | |
810 if (!bitmap_set_contains_value (set, lookfor)) | |
811 return; | |
812 | |
813 /* The number of expressions having a given value is usually | |
814 significantly less than the total number of expressions in SET. | |
815 Thus, rather than check, for each expression in SET, whether it | |
816 has the value LOOKFOR, we walk the reverse mapping that tells us | |
817 what expressions have a given value, and see if any of those | |
818 expressions are in our set. For large testcases, this is about | |
819 5-10x faster than walking the bitmap. If this is somehow a | |
820 significant lose for some cases, we can choose which set to walk | |
821 based on the set size. */ | |
822 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor); | |
823 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) | |
824 { | |
825 if (bitmap_bit_p (set->expressions, i)) | |
826 { | |
827 bitmap_clear_bit (set->expressions, i); | |
828 bitmap_set_bit (set->expressions, get_expression_id (expr)); | |
829 return; | |
830 } | |
831 } | |
832 } | |
833 | |
834 /* Return true if two bitmap sets are equal. */ | |
835 | |
836 static bool | |
837 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b) | |
838 { | |
839 return bitmap_equal_p (a->values, b->values); | |
840 } | |
841 | |
842 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists, | |
843 and add it otherwise. */ | |
844 | |
845 static void | |
846 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr) | |
847 { | |
848 unsigned int val = get_expr_value_id (expr); | |
849 | |
850 if (bitmap_set_contains_value (set, val)) | |
851 bitmap_set_replace_value (set, val, expr); | |
852 else | |
853 bitmap_insert_into_set (set, expr); | |
854 } | |
855 | |
856 /* Insert EXPR into SET if EXPR's value is not already present in | |
857 SET. */ | |
858 | |
859 static void | |
860 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr) | |
861 { | |
862 unsigned int val = get_expr_value_id (expr); | |
863 | |
864 if (value_id_constant_p (val)) | |
865 return; | |
866 | |
867 if (!bitmap_set_contains_value (set, val)) | |
868 bitmap_insert_into_set (set, expr); | |
869 } | |
870 | |
871 /* Print out EXPR to outfile. */ | |
872 | |
873 static void | |
874 print_pre_expr (FILE *outfile, const pre_expr expr) | |
875 { | |
876 switch (expr->kind) | |
877 { | |
878 case CONSTANT: | |
879 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0); | |
880 break; | |
881 case NAME: | |
882 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0); | |
883 break; | |
884 case NARY: | |
885 { | |
886 unsigned int i; | |
887 vn_nary_op_t nary = PRE_EXPR_NARY (expr); | |
888 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]); | |
889 for (i = 0; i < nary->length; i++) | |
890 { | |
891 print_generic_expr (outfile, nary->op[i], 0); | |
892 if (i != (unsigned) nary->length - 1) | |
893 fprintf (outfile, ","); | |
894 } | |
895 fprintf (outfile, "}"); | |
896 } | |
897 break; | |
898 | |
899 case REFERENCE: | |
900 { | |
901 vn_reference_op_t vro; | |
902 unsigned int i; | |
903 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); | |
904 fprintf (outfile, "{"); | |
905 for (i = 0; | |
906 VEC_iterate (vn_reference_op_s, ref->operands, i, vro); | |
907 i++) | |
908 { | |
909 if (vro->opcode != SSA_NAME | |
910 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration) | |
911 fprintf (outfile, "%s ", tree_code_name [vro->opcode]); | |
912 if (vro->op0) | |
913 { | |
914 if (vro->op1) | |
915 fprintf (outfile, "<"); | |
916 print_generic_expr (outfile, vro->op0, 0); | |
917 if (vro->op1) | |
918 { | |
919 fprintf (outfile, ","); | |
920 print_generic_expr (outfile, vro->op1, 0); | |
921 } | |
922 if (vro->op1) | |
923 fprintf (outfile, ">"); | |
924 } | |
925 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1) | |
926 fprintf (outfile, ","); | |
927 } | |
928 fprintf (outfile, "}"); | |
929 } | |
930 break; | |
931 } | |
932 } | |
933 void debug_pre_expr (pre_expr); | |
934 | |
935 /* Like print_pre_expr but always prints to stderr. */ | |
936 void | |
937 debug_pre_expr (pre_expr e) | |
938 { | |
939 print_pre_expr (stderr, e); | |
940 fprintf (stderr, "\n"); | |
941 } | |
942 | |
943 /* Print out SET to OUTFILE. */ | |
944 | |
945 static void | |
946 print_bitmap_set (FILE *outfile, bitmap_set_t set, | |
947 const char *setname, int blockindex) | |
948 { | |
949 fprintf (outfile, "%s[%d] := { ", setname, blockindex); | |
950 if (set) | |
951 { | |
952 bool first = true; | |
953 unsigned i; | |
954 bitmap_iterator bi; | |
955 | |
956 FOR_EACH_EXPR_ID_IN_SET (set, i, bi) | |
957 { | |
958 const pre_expr expr = expression_for_id (i); | |
959 | |
960 if (!first) | |
961 fprintf (outfile, ", "); | |
962 first = false; | |
963 print_pre_expr (outfile, expr); | |
964 | |
965 fprintf (outfile, " (%04d)", get_expr_value_id (expr)); | |
966 } | |
967 } | |
968 fprintf (outfile, " }\n"); | |
969 } | |
970 | |
971 void debug_bitmap_set (bitmap_set_t); | |
972 | |
973 void | |
974 debug_bitmap_set (bitmap_set_t set) | |
975 { | |
976 print_bitmap_set (stderr, set, "debug", 0); | |
977 } | |
978 | |
979 /* Print out the expressions that have VAL to OUTFILE. */ | |
980 | |
981 void | |
982 print_value_expressions (FILE *outfile, unsigned int val) | |
983 { | |
984 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val); | |
985 if (set) | |
986 { | |
987 char s[10]; | |
988 sprintf (s, "%04d", val); | |
989 print_bitmap_set (outfile, set, s, 0); | |
990 } | |
991 } | |
992 | |
993 | |
994 void | |
995 debug_value_expressions (unsigned int val) | |
996 { | |
997 print_value_expressions (stderr, val); | |
998 } | |
999 | |
1000 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to | |
1001 represent it. */ | |
1002 | |
1003 static pre_expr | |
1004 get_or_alloc_expr_for_constant (tree constant) | |
1005 { | |
1006 unsigned int result_id; | |
1007 unsigned int value_id; | |
1008 pre_expr newexpr = (pre_expr) pool_alloc (pre_expr_pool); | |
1009 newexpr->kind = CONSTANT; | |
1010 PRE_EXPR_CONSTANT (newexpr) = constant; | |
1011 result_id = lookup_expression_id (newexpr); | |
1012 if (result_id != 0) | |
1013 { | |
1014 pool_free (pre_expr_pool, newexpr); | |
1015 newexpr = expression_for_id (result_id); | |
1016 return newexpr; | |
1017 } | |
1018 value_id = get_or_alloc_constant_value_id (constant); | |
1019 get_or_alloc_expression_id (newexpr); | |
1020 add_to_value (value_id, newexpr); | |
1021 return newexpr; | |
1022 } | |
1023 | |
1024 /* Given a value id V, find the actual tree representing the constant | |
1025 value if there is one, and return it. Return NULL if we can't find | |
1026 a constant. */ | |
1027 | |
1028 static tree | |
1029 get_constant_for_value_id (unsigned int v) | |
1030 { | |
1031 if (value_id_constant_p (v)) | |
1032 { | |
1033 unsigned int i; | |
1034 bitmap_iterator bi; | |
1035 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v); | |
1036 | |
1037 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) | |
1038 { | |
1039 pre_expr expr = expression_for_id (i); | |
1040 if (expr->kind == CONSTANT) | |
1041 return PRE_EXPR_CONSTANT (expr); | |
1042 } | |
1043 } | |
1044 return NULL; | |
1045 } | |
1046 | |
1047 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it. | |
1048 Currently only supports constants and SSA_NAMES. */ | |
1049 static pre_expr | |
1050 get_or_alloc_expr_for (tree t) | |
1051 { | |
1052 if (TREE_CODE (t) == SSA_NAME) | |
1053 return get_or_alloc_expr_for_name (t); | |
1054 else if (is_gimple_min_invariant (t)) | |
1055 return get_or_alloc_expr_for_constant (t); | |
1056 else | |
1057 { | |
1058 /* More complex expressions can result from SCCVN expression | |
1059 simplification that inserts values for them. As they all | |
1060 do not have VOPs the get handled by the nary ops struct. */ | |
1061 vn_nary_op_t result; | |
1062 unsigned int result_id; | |
1063 vn_nary_op_lookup (t, &result); | |
1064 if (result != NULL) | |
1065 { | |
1066 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool); | |
1067 e->kind = NARY; | |
1068 PRE_EXPR_NARY (e) = result; | |
1069 result_id = lookup_expression_id (e); | |
1070 if (result_id != 0) | |
1071 { | |
1072 pool_free (pre_expr_pool, e); | |
1073 e = expression_for_id (result_id); | |
1074 return e; | |
1075 } | |
1076 alloc_expression_id (e); | |
1077 return e; | |
1078 } | |
1079 } | |
1080 return NULL; | |
1081 } | |
1082 | |
1083 /* Return the folded version of T if T, when folded, is a gimple | |
1084 min_invariant. Otherwise, return T. */ | |
1085 | |
1086 static pre_expr | |
1087 fully_constant_expression (pre_expr e) | |
1088 { | |
1089 switch (e->kind) | |
1090 { | |
1091 case CONSTANT: | |
1092 return e; | |
1093 case NARY: | |
1094 { | |
1095 vn_nary_op_t nary = PRE_EXPR_NARY (e); | |
1096 switch (TREE_CODE_CLASS (nary->opcode)) | |
1097 { | |
1098 case tcc_expression: | |
1099 if (nary->opcode == TRUTH_NOT_EXPR) | |
1100 goto do_unary; | |
1101 if (nary->opcode != TRUTH_AND_EXPR | |
1102 && nary->opcode != TRUTH_OR_EXPR | |
1103 && nary->opcode != TRUTH_XOR_EXPR) | |
1104 return e; | |
1105 /* Fallthrough. */ | |
1106 case tcc_binary: | |
1107 case tcc_comparison: | |
1108 { | |
1109 /* We have to go from trees to pre exprs to value ids to | |
1110 constants. */ | |
1111 tree naryop0 = nary->op[0]; | |
1112 tree naryop1 = nary->op[1]; | |
1113 tree result; | |
1114 if (!is_gimple_min_invariant (naryop0)) | |
1115 { | |
1116 pre_expr rep0 = get_or_alloc_expr_for (naryop0); | |
1117 unsigned int vrep0 = get_expr_value_id (rep0); | |
1118 tree const0 = get_constant_for_value_id (vrep0); | |
1119 if (const0) | |
1120 naryop0 = fold_convert (TREE_TYPE (naryop0), const0); | |
1121 } | |
1122 if (!is_gimple_min_invariant (naryop1)) | |
1123 { | |
1124 pre_expr rep1 = get_or_alloc_expr_for (naryop1); | |
1125 unsigned int vrep1 = get_expr_value_id (rep1); | |
1126 tree const1 = get_constant_for_value_id (vrep1); | |
1127 if (const1) | |
1128 naryop1 = fold_convert (TREE_TYPE (naryop1), const1); | |
1129 } | |
1130 result = fold_binary (nary->opcode, nary->type, | |
1131 naryop0, naryop1); | |
1132 if (result && is_gimple_min_invariant (result)) | |
1133 return get_or_alloc_expr_for_constant (result); | |
1134 /* We might have simplified the expression to a | |
1135 SSA_NAME for example from x_1 * 1. But we cannot | |
1136 insert a PHI for x_1 unconditionally as x_1 might | |
1137 not be available readily. */ | |
1138 return e; | |
1139 } | |
1140 case tcc_reference: | |
1141 if (nary->opcode != REALPART_EXPR | |
1142 && nary->opcode != IMAGPART_EXPR | |
1143 && nary->opcode != VIEW_CONVERT_EXPR) | |
1144 return e; | |
1145 /* Fallthrough. */ | |
1146 case tcc_unary: | |
1147 do_unary: | |
1148 { | |
1149 /* We have to go from trees to pre exprs to value ids to | |
1150 constants. */ | |
1151 tree naryop0 = nary->op[0]; | |
1152 tree const0, result; | |
1153 if (is_gimple_min_invariant (naryop0)) | |
1154 const0 = naryop0; | |
1155 else | |
1156 { | |
1157 pre_expr rep0 = get_or_alloc_expr_for (naryop0); | |
1158 unsigned int vrep0 = get_expr_value_id (rep0); | |
1159 const0 = get_constant_for_value_id (vrep0); | |
1160 } | |
1161 result = NULL; | |
1162 if (const0) | |
1163 { | |
1164 tree type1 = TREE_TYPE (nary->op[0]); | |
1165 const0 = fold_convert (type1, const0); | |
1166 result = fold_unary (nary->opcode, nary->type, const0); | |
1167 } | |
1168 if (result && is_gimple_min_invariant (result)) | |
1169 return get_or_alloc_expr_for_constant (result); | |
1170 return e; | |
1171 } | |
1172 default: | |
1173 return e; | |
1174 } | |
1175 } | |
1176 case REFERENCE: | |
1177 { | |
1178 vn_reference_t ref = PRE_EXPR_REFERENCE (e); | |
1179 VEC (vn_reference_op_s, heap) *operands = ref->operands; | |
1180 vn_reference_op_t op; | |
1181 | |
1182 /* Try to simplify the translated expression if it is | |
1183 a call to a builtin function with at most two arguments. */ | |
1184 op = VEC_index (vn_reference_op_s, operands, 0); | |
1185 if (op->opcode == CALL_EXPR | |
1186 && TREE_CODE (op->op0) == ADDR_EXPR | |
1187 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL | |
1188 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0)) | |
1189 && VEC_length (vn_reference_op_s, operands) >= 2 | |
1190 && VEC_length (vn_reference_op_s, operands) <= 3) | |
1191 { | |
1192 vn_reference_op_t arg0, arg1 = NULL; | |
1193 bool anyconst = false; | |
1194 arg0 = VEC_index (vn_reference_op_s, operands, 1); | |
1195 if (VEC_length (vn_reference_op_s, operands) > 2) | |
1196 arg1 = VEC_index (vn_reference_op_s, operands, 2); | |
1197 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant | |
1198 || (arg0->opcode == ADDR_EXPR | |
1199 && is_gimple_min_invariant (arg0->op0))) | |
1200 anyconst = true; | |
1201 if (arg1 | |
1202 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant | |
1203 || (arg1->opcode == ADDR_EXPR | |
1204 && is_gimple_min_invariant (arg1->op0)))) | |
1205 anyconst = true; | |
1206 if (anyconst) | |
1207 { | |
1208 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0), | |
1209 arg1 ? 2 : 1, | |
1210 arg0->op0, | |
1211 arg1 ? arg1->op0 : NULL); | |
1212 if (folded | |
1213 && TREE_CODE (folded) == NOP_EXPR) | |
1214 folded = TREE_OPERAND (folded, 0); | |
1215 if (folded | |
1216 && is_gimple_min_invariant (folded)) | |
1217 return get_or_alloc_expr_for_constant (folded); | |
1218 } | |
1219 } | |
1220 return e; | |
1221 } | |
1222 default: | |
1223 return e; | |
1224 } | |
1225 return e; | |
1226 } | |
1227 | |
1228 /* Translate the vuses in the VUSES vector backwards through phi nodes | |
1229 in PHIBLOCK, so that they have the value they would have in | |
1230 BLOCK. */ | |
1231 | |
1232 static VEC(tree, gc) * | |
1233 translate_vuses_through_block (VEC (tree, gc) *vuses, | |
1234 basic_block phiblock, | |
1235 basic_block block) | |
1236 { | |
1237 tree oldvuse; | |
1238 VEC(tree, gc) *result = NULL; | |
1239 int i; | |
1240 | |
1241 for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++) | |
1242 { | |
1243 gimple phi = SSA_NAME_DEF_STMT (oldvuse); | |
1244 if (gimple_code (phi) == GIMPLE_PHI | |
1245 && gimple_bb (phi) == phiblock) | |
1246 { | |
1247 edge e = find_edge (block, gimple_bb (phi)); | |
1248 if (e) | |
1249 { | |
1250 tree def = PHI_ARG_DEF (phi, e->dest_idx); | |
1251 if (def != oldvuse) | |
1252 { | |
1253 if (!result) | |
1254 result = VEC_copy (tree, gc, vuses); | |
1255 VEC_replace (tree, result, i, def); | |
1256 } | |
1257 } | |
1258 } | |
1259 } | |
1260 | |
1261 /* We avoid creating a new copy of the vuses unless something | |
1262 actually changed, so result can be NULL. */ | |
1263 if (result) | |
1264 { | |
1265 sort_vuses (result); | |
1266 return result; | |
1267 } | |
1268 return vuses; | |
1269 | |
1270 } | |
1271 | |
1272 /* Like find_leader, but checks for the value existing in SET1 *or* | |
1273 SET2. This is used to avoid making a set consisting of the union | |
1274 of PA_IN and ANTIC_IN during insert. */ | |
1275 | |
1276 static inline pre_expr | |
1277 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2) | |
1278 { | |
1279 pre_expr result; | |
1280 | |
1281 result = bitmap_find_leader (set1, val, NULL); | |
1282 if (!result && set2) | |
1283 result = bitmap_find_leader (set2, val, NULL); | |
1284 return result; | |
1285 } | |
1286 | |
1287 /* Get the tree type for our PRE expression e. */ | |
1288 | |
1289 static tree | |
1290 get_expr_type (const pre_expr e) | |
1291 { | |
1292 switch (e->kind) | |
1293 { | |
1294 case NAME: | |
1295 return TREE_TYPE (PRE_EXPR_NAME (e)); | |
1296 case CONSTANT: | |
1297 return TREE_TYPE (PRE_EXPR_CONSTANT (e)); | |
1298 case REFERENCE: | |
1299 { | |
1300 vn_reference_op_t vro; | |
1301 | |
1302 gcc_assert (PRE_EXPR_REFERENCE (e)->operands); | |
1303 vro = VEC_index (vn_reference_op_s, | |
1304 PRE_EXPR_REFERENCE (e)->operands, | |
1305 0); | |
1306 /* We don't store type along with COMPONENT_REF because it is | |
1307 always the same as FIELD_DECL's type. */ | |
1308 if (!vro->type) | |
1309 { | |
1310 gcc_assert (vro->opcode == COMPONENT_REF); | |
1311 return TREE_TYPE (vro->op0); | |
1312 } | |
1313 return vro->type; | |
1314 } | |
1315 | |
1316 case NARY: | |
1317 return PRE_EXPR_NARY (e)->type; | |
1318 } | |
1319 gcc_unreachable(); | |
1320 } | |
1321 | |
1322 /* Get a representative SSA_NAME for a given expression. | |
1323 Since all of our sub-expressions are treated as values, we require | |
1324 them to be SSA_NAME's for simplicity. | |
1325 Prior versions of GVNPRE used to use "value handles" here, so that | |
1326 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In | |
1327 either case, the operands are really values (IE we do not expect | |
1328 them to be usable without finding leaders). */ | |
1329 | |
1330 static tree | |
1331 get_representative_for (const pre_expr e) | |
1332 { | |
1333 tree exprtype; | |
1334 tree name; | |
1335 unsigned int value_id = get_expr_value_id (e); | |
1336 | |
1337 switch (e->kind) | |
1338 { | |
1339 case NAME: | |
1340 return PRE_EXPR_NAME (e); | |
1341 case CONSTANT: | |
1342 return PRE_EXPR_CONSTANT (e); | |
1343 case NARY: | |
1344 case REFERENCE: | |
1345 { | |
1346 /* Go through all of the expressions representing this value | |
1347 and pick out an SSA_NAME. */ | |
1348 unsigned int i; | |
1349 bitmap_iterator bi; | |
1350 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions, | |
1351 value_id); | |
1352 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi) | |
1353 { | |
1354 pre_expr rep = expression_for_id (i); | |
1355 if (rep->kind == NAME) | |
1356 return PRE_EXPR_NAME (rep); | |
1357 } | |
1358 } | |
1359 break; | |
1360 } | |
1361 /* If we reached here we couldn't find an SSA_NAME. This can | |
1362 happen when we've discovered a value that has never appeared in | |
1363 the program as set to an SSA_NAME, most likely as the result of | |
1364 phi translation. */ | |
1365 if (dump_file) | |
1366 { | |
1367 fprintf (dump_file, | |
1368 "Could not find SSA_NAME representative for expression:"); | |
1369 print_pre_expr (dump_file, e); | |
1370 fprintf (dump_file, "\n"); | |
1371 } | |
1372 | |
1373 exprtype = get_expr_type (e); | |
1374 | |
1375 /* Build and insert the assignment of the end result to the temporary | |
1376 that we will return. */ | |
1377 if (!pretemp || exprtype != TREE_TYPE (pretemp)) | |
1378 { | |
1379 pretemp = create_tmp_var (exprtype, "pretmp"); | |
1380 get_var_ann (pretemp); | |
1381 } | |
1382 | |
1383 name = make_ssa_name (pretemp, gimple_build_nop ()); | |
1384 VN_INFO_GET (name)->value_id = value_id; | |
1385 if (e->kind == CONSTANT) | |
1386 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e); | |
1387 else | |
1388 VN_INFO (name)->valnum = name; | |
1389 | |
1390 add_to_value (value_id, get_or_alloc_expr_for_name (name)); | |
1391 if (dump_file) | |
1392 { | |
1393 fprintf (dump_file, "Created SSA_NAME representative "); | |
1394 print_generic_expr (dump_file, name, 0); | |
1395 fprintf (dump_file, " for expression:"); | |
1396 print_pre_expr (dump_file, e); | |
1397 fprintf (dump_file, "\n"); | |
1398 } | |
1399 | |
1400 return name; | |
1401 } | |
1402 | |
1403 | |
1404 | |
1405 | |
1406 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of | |
1407 the phis in PRED. SEEN is a bitmap saying which expression we have | |
1408 translated since we started translation of the toplevel expression. | |
1409 Return NULL if we can't find a leader for each part of the | |
1410 translated expression. */ | |
1411 | |
1412 static pre_expr | |
1413 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, | |
1414 basic_block pred, basic_block phiblock, bitmap seen) | |
1415 { | |
1416 pre_expr oldexpr = expr; | |
1417 pre_expr phitrans; | |
1418 | |
1419 if (!expr) | |
1420 return NULL; | |
1421 | |
1422 if (value_id_constant_p (get_expr_value_id (expr))) | |
1423 return expr; | |
1424 | |
1425 phitrans = phi_trans_lookup (expr, pred); | |
1426 if (phitrans) | |
1427 return phitrans; | |
1428 | |
1429 /* Prevent cycles when we have recursively dependent leaders. This | |
1430 can only happen when phi translating the maximal set. */ | |
1431 if (seen) | |
1432 { | |
1433 unsigned int expr_id = get_expression_id (expr); | |
1434 if (bitmap_bit_p (seen, expr_id)) | |
1435 return NULL; | |
1436 bitmap_set_bit (seen, expr_id); | |
1437 } | |
1438 | |
1439 switch (expr->kind) | |
1440 { | |
1441 /* Constants contain no values that need translation. */ | |
1442 case CONSTANT: | |
1443 return expr; | |
1444 | |
1445 case NARY: | |
1446 { | |
1447 unsigned int i; | |
1448 bool changed = false; | |
1449 vn_nary_op_t nary = PRE_EXPR_NARY (expr); | |
1450 struct vn_nary_op_s newnary; | |
1451 /* The NARY structure is only guaranteed to have been | |
1452 allocated to the nary->length operands. */ | |
1453 memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s) | |
1454 - sizeof (tree) * (4 - nary->length))); | |
1455 | |
1456 for (i = 0; i < newnary.length; i++) | |
1457 { | |
1458 if (TREE_CODE (newnary.op[i]) != SSA_NAME) | |
1459 continue; | |
1460 else | |
1461 { | |
1462 unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id; | |
1463 pre_expr leader = find_leader_in_sets (op_val_id, set1, set2); | |
1464 pre_expr result = phi_translate_1 (leader, set1, set2, | |
1465 pred, phiblock, seen); | |
1466 if (result && result != leader) | |
1467 { | |
1468 tree name = get_representative_for (result); | |
1469 if (!name) | |
1470 return NULL; | |
1471 newnary.op[i] = name; | |
1472 } | |
1473 else if (!result) | |
1474 return NULL; | |
1475 | |
1476 changed |= newnary.op[i] != nary->op[i]; | |
1477 } | |
1478 } | |
1479 if (changed) | |
1480 { | |
1481 pre_expr constant; | |
1482 | |
1483 tree result = vn_nary_op_lookup_pieces (newnary.length, | |
1484 newnary.opcode, | |
1485 newnary.type, | |
1486 newnary.op[0], | |
1487 newnary.op[1], | |
1488 newnary.op[2], | |
1489 newnary.op[3], | |
1490 &nary); | |
1491 unsigned int new_val_id; | |
1492 | |
1493 expr = (pre_expr) pool_alloc (pre_expr_pool); | |
1494 expr->kind = NARY; | |
1495 expr->id = 0; | |
1496 if (result && is_gimple_min_invariant (result)) | |
1497 return get_or_alloc_expr_for_constant (result); | |
1498 | |
1499 | |
1500 if (nary) | |
1501 { | |
1502 PRE_EXPR_NARY (expr) = nary; | |
1503 constant = fully_constant_expression (expr); | |
1504 if (constant != expr) | |
1505 return constant; | |
1506 | |
1507 new_val_id = nary->value_id; | |
1508 get_or_alloc_expression_id (expr); | |
1509 } | |
1510 else | |
1511 { | |
1512 new_val_id = get_next_value_id (); | |
1513 VEC_safe_grow_cleared (bitmap_set_t, heap, | |
1514 value_expressions, | |
1515 get_max_value_id() + 1); | |
1516 nary = vn_nary_op_insert_pieces (newnary.length, | |
1517 newnary.opcode, | |
1518 newnary.type, | |
1519 newnary.op[0], | |
1520 newnary.op[1], | |
1521 newnary.op[2], | |
1522 newnary.op[3], | |
1523 result, new_val_id); | |
1524 PRE_EXPR_NARY (expr) = nary; | |
1525 constant = fully_constant_expression (expr); | |
1526 if (constant != expr) | |
1527 return constant; | |
1528 get_or_alloc_expression_id (expr); | |
1529 } | |
1530 add_to_value (new_val_id, expr); | |
1531 } | |
1532 phi_trans_add (oldexpr, expr, pred); | |
1533 return expr; | |
1534 } | |
1535 break; | |
1536 | |
1537 case REFERENCE: | |
1538 { | |
1539 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); | |
1540 VEC (vn_reference_op_s, heap) *operands = ref->operands; | |
1541 VEC (tree, gc) *vuses = ref->vuses; | |
1542 VEC (tree, gc) *newvuses = vuses; | |
1543 VEC (vn_reference_op_s, heap) *newoperands = NULL; | |
1544 bool changed = false; | |
1545 unsigned int i; | |
1546 vn_reference_op_t operand; | |
1547 vn_reference_t newref; | |
1548 | |
1549 for (i = 0; VEC_iterate (vn_reference_op_s, operands, i, operand); i++) | |
1550 { | |
1551 pre_expr opresult; | |
1552 pre_expr leader; | |
1553 tree oldop0 = operand->op0; | |
1554 tree oldop1 = operand->op1; | |
1555 tree oldop2 = operand->op2; | |
1556 tree op0 = oldop0; | |
1557 tree op1 = oldop1; | |
1558 tree op2 = oldop2; | |
1559 tree type = operand->type; | |
1560 vn_reference_op_s newop = *operand; | |
1561 | |
1562 if (op0 && TREE_CODE (op0) == SSA_NAME) | |
1563 { | |
1564 unsigned int op_val_id = VN_INFO (op0)->value_id; | |
1565 leader = find_leader_in_sets (op_val_id, set1, set2); | |
1566 opresult = phi_translate_1 (leader, set1, set2, | |
1567 pred, phiblock, seen); | |
1568 if (opresult && opresult != leader) | |
1569 { | |
1570 tree name = get_representative_for (opresult); | |
1571 if (!name) | |
1572 break; | |
1573 op0 = name; | |
1574 } | |
1575 else if (!opresult) | |
1576 break; | |
1577 } | |
1578 changed |= op0 != oldop0; | |
1579 | |
1580 if (op1 && TREE_CODE (op1) == SSA_NAME) | |
1581 { | |
1582 unsigned int op_val_id = VN_INFO (op1)->value_id; | |
1583 leader = find_leader_in_sets (op_val_id, set1, set2); | |
1584 opresult = phi_translate_1 (leader, set1, set2, | |
1585 pred, phiblock, seen); | |
1586 if (opresult && opresult != leader) | |
1587 { | |
1588 tree name = get_representative_for (opresult); | |
1589 if (!name) | |
1590 break; | |
1591 op1 = name; | |
1592 } | |
1593 else if (!opresult) | |
1594 break; | |
1595 } | |
1596 changed |= op1 != oldop1; | |
1597 if (op2 && TREE_CODE (op2) == SSA_NAME) | |
1598 { | |
1599 unsigned int op_val_id = VN_INFO (op2)->value_id; | |
1600 leader = find_leader_in_sets (op_val_id, set1, set2); | |
1601 opresult = phi_translate_1 (leader, set1, set2, | |
1602 pred, phiblock, seen); | |
1603 if (opresult && opresult != leader) | |
1604 { | |
1605 tree name = get_representative_for (opresult); | |
1606 if (!name) | |
1607 break; | |
1608 op2 = name; | |
1609 } | |
1610 else if (!opresult) | |
1611 break; | |
1612 } | |
1613 changed |= op2 != oldop2; | |
1614 | |
1615 if (!newoperands) | |
1616 newoperands = VEC_copy (vn_reference_op_s, heap, operands); | |
1617 /* We may have changed from an SSA_NAME to a constant */ | |
1618 if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME) | |
1619 newop.opcode = TREE_CODE (op0); | |
1620 newop.type = type; | |
1621 newop.op0 = op0; | |
1622 newop.op1 = op1; | |
1623 newop.op2 = op2; | |
1624 VEC_replace (vn_reference_op_s, newoperands, i, &newop); | |
1625 } | |
1626 if (i != VEC_length (vn_reference_op_s, operands)) | |
1627 { | |
1628 if (newoperands) | |
1629 VEC_free (vn_reference_op_s, heap, newoperands); | |
1630 return NULL; | |
1631 } | |
1632 | |
1633 newvuses = translate_vuses_through_block (vuses, phiblock, pred); | |
1634 changed |= newvuses != vuses; | |
1635 | |
1636 if (changed) | |
1637 { | |
1638 unsigned int new_val_id; | |
1639 pre_expr constant; | |
1640 | |
1641 tree result = vn_reference_lookup_pieces (newvuses, | |
1642 newoperands, | |
1643 &newref, true); | |
1644 if (newref) | |
1645 VEC_free (vn_reference_op_s, heap, newoperands); | |
1646 | |
1647 if (result && is_gimple_min_invariant (result)) | |
1648 { | |
1649 gcc_assert (!newoperands); | |
1650 return get_or_alloc_expr_for_constant (result); | |
1651 } | |
1652 | |
1653 expr = (pre_expr) pool_alloc (pre_expr_pool); | |
1654 expr->kind = REFERENCE; | |
1655 expr->id = 0; | |
1656 | |
1657 if (newref) | |
1658 { | |
1659 PRE_EXPR_REFERENCE (expr) = newref; | |
1660 constant = fully_constant_expression (expr); | |
1661 if (constant != expr) | |
1662 return constant; | |
1663 | |
1664 new_val_id = newref->value_id; | |
1665 get_or_alloc_expression_id (expr); | |
1666 } | |
1667 else | |
1668 { | |
1669 new_val_id = get_next_value_id (); | |
1670 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions, | |
1671 get_max_value_id() + 1); | |
1672 newref = vn_reference_insert_pieces (newvuses, | |
1673 newoperands, | |
1674 result, new_val_id); | |
1675 newoperands = NULL; | |
1676 PRE_EXPR_REFERENCE (expr) = newref; | |
1677 constant = fully_constant_expression (expr); | |
1678 if (constant != expr) | |
1679 return constant; | |
1680 get_or_alloc_expression_id (expr); | |
1681 } | |
1682 add_to_value (new_val_id, expr); | |
1683 } | |
1684 VEC_free (vn_reference_op_s, heap, newoperands); | |
1685 phi_trans_add (oldexpr, expr, pred); | |
1686 return expr; | |
1687 } | |
1688 break; | |
1689 | |
1690 case NAME: | |
1691 { | |
1692 gimple phi = NULL; | |
1693 edge e; | |
1694 gimple def_stmt; | |
1695 tree name = PRE_EXPR_NAME (expr); | |
1696 | |
1697 def_stmt = SSA_NAME_DEF_STMT (name); | |
1698 if (gimple_code (def_stmt) == GIMPLE_PHI | |
1699 && gimple_bb (def_stmt) == phiblock) | |
1700 phi = def_stmt; | |
1701 else | |
1702 return expr; | |
1703 | |
1704 e = find_edge (pred, gimple_bb (phi)); | |
1705 if (e) | |
1706 { | |
1707 tree def = PHI_ARG_DEF (phi, e->dest_idx); | |
1708 pre_expr newexpr; | |
1709 | |
1710 if (TREE_CODE (def) == SSA_NAME) | |
1711 def = VN_INFO (def)->valnum; | |
1712 | |
1713 /* Handle constant. */ | |
1714 if (is_gimple_min_invariant (def)) | |
1715 return get_or_alloc_expr_for_constant (def); | |
1716 | |
1717 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def)) | |
1718 return NULL; | |
1719 | |
1720 newexpr = get_or_alloc_expr_for_name (def); | |
1721 return newexpr; | |
1722 } | |
1723 } | |
1724 return expr; | |
1725 | |
1726 default: | |
1727 gcc_unreachable (); | |
1728 } | |
1729 } | |
1730 | |
1731 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of | |
1732 the phis in PRED. | |
1733 Return NULL if we can't find a leader for each part of the | |
1734 translated expression. */ | |
1735 | |
1736 static pre_expr | |
1737 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, | |
1738 basic_block pred, basic_block phiblock) | |
1739 { | |
1740 bitmap_clear (seen_during_translate); | |
1741 return phi_translate_1 (expr, set1, set2, pred, phiblock, | |
1742 seen_during_translate); | |
1743 } | |
1744 | |
1745 /* For each expression in SET, translate the values through phi nodes | |
1746 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting | |
1747 expressions in DEST. */ | |
1748 | |
1749 static void | |
1750 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred, | |
1751 basic_block phiblock) | |
1752 { | |
1753 VEC (pre_expr, heap) *exprs; | |
1754 pre_expr expr; | |
1755 int i; | |
1756 | |
1757 if (!phi_nodes (phiblock)) | |
1758 { | |
1759 bitmap_set_copy (dest, set); | |
1760 return; | |
1761 } | |
1762 | |
1763 exprs = sorted_array_from_bitmap_set (set); | |
1764 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | |
1765 { | |
1766 pre_expr translated; | |
1767 translated = phi_translate (expr, set, NULL, pred, phiblock); | |
1768 | |
1769 /* Don't add empty translations to the cache */ | |
1770 if (translated) | |
1771 phi_trans_add (expr, translated, pred); | |
1772 | |
1773 if (translated != NULL) | |
1774 bitmap_value_insert_into_set (dest, translated); | |
1775 } | |
1776 VEC_free (pre_expr, heap, exprs); | |
1777 } | |
1778 | |
1779 /* Find the leader for a value (i.e., the name representing that | |
1780 value) in a given set, and return it. If STMT is non-NULL it | |
1781 makes sure the defining statement for the leader dominates it. | |
1782 Return NULL if no leader is found. */ | |
1783 | |
1784 static pre_expr | |
1785 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt) | |
1786 { | |
1787 if (value_id_constant_p (val)) | |
1788 { | |
1789 unsigned int i; | |
1790 bitmap_iterator bi; | |
1791 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val); | |
1792 | |
1793 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) | |
1794 { | |
1795 pre_expr expr = expression_for_id (i); | |
1796 if (expr->kind == CONSTANT) | |
1797 return expr; | |
1798 } | |
1799 } | |
1800 if (bitmap_set_contains_value (set, val)) | |
1801 { | |
1802 /* Rather than walk the entire bitmap of expressions, and see | |
1803 whether any of them has the value we are looking for, we look | |
1804 at the reverse mapping, which tells us the set of expressions | |
1805 that have a given value (IE value->expressions with that | |
1806 value) and see if any of those expressions are in our set. | |
1807 The number of expressions per value is usually significantly | |
1808 less than the number of expressions in the set. In fact, for | |
1809 large testcases, doing it this way is roughly 5-10x faster | |
1810 than walking the bitmap. | |
1811 If this is somehow a significant lose for some cases, we can | |
1812 choose which set to walk based on which set is smaller. */ | |
1813 unsigned int i; | |
1814 bitmap_iterator bi; | |
1815 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val); | |
1816 | |
1817 EXECUTE_IF_AND_IN_BITMAP (exprset->expressions, | |
1818 set->expressions, 0, i, bi) | |
1819 { | |
1820 pre_expr val = expression_for_id (i); | |
1821 /* At the point where stmt is not null, there should always | |
1822 be an SSA_NAME first in the list of expressions. */ | |
1823 if (stmt) | |
1824 { | |
1825 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val)); | |
1826 if (gimple_code (def_stmt) != GIMPLE_PHI | |
1827 && gimple_bb (def_stmt) == gimple_bb (stmt) | |
1828 && gimple_uid (def_stmt) >= gimple_uid (stmt)) | |
1829 continue; | |
1830 } | |
1831 return val; | |
1832 } | |
1833 } | |
1834 return NULL; | |
1835 } | |
1836 | |
1837 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of | |
1838 BLOCK by seeing if it is not killed in the block. Note that we are | |
1839 only determining whether there is a store that kills it. Because | |
1840 of the order in which clean iterates over values, we are guaranteed | |
1841 that altered operands will have caused us to be eliminated from the | |
1842 ANTIC_IN set already. */ | |
1843 | |
1844 static bool | |
1845 value_dies_in_block_x (pre_expr expr, basic_block block) | |
1846 { | |
1847 int i; | |
1848 tree vuse; | |
1849 VEC (tree, gc) *vuses = PRE_EXPR_REFERENCE (expr)->vuses; | |
1850 | |
1851 /* Conservatively, a value dies if it's vuses are defined in this | |
1852 block, unless they come from phi nodes (which are merge operations, | |
1853 rather than stores. */ | |
1854 for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++) | |
1855 { | |
1856 gimple def = SSA_NAME_DEF_STMT (vuse); | |
1857 | |
1858 if (gimple_bb (def) != block) | |
1859 continue; | |
1860 if (gimple_code (def) == GIMPLE_PHI) | |
1861 continue; | |
1862 return true; | |
1863 } | |
1864 return false; | |
1865 } | |
1866 | |
1867 | |
1868 #define union_contains_value(SET1, SET2, VAL) \ | |
1869 (bitmap_set_contains_value ((SET1), (VAL)) \ | |
1870 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL)))) | |
1871 | |
1872 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2. | |
1873 */ | |
1874 static bool | |
1875 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, | |
1876 vn_reference_op_t vro) | |
1877 { | |
1878 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME) | |
1879 { | |
1880 struct pre_expr_d temp; | |
1881 temp.kind = NAME; | |
1882 temp.id = 0; | |
1883 PRE_EXPR_NAME (&temp) = vro->op0; | |
1884 temp.id = lookup_expression_id (&temp); | |
1885 if (temp.id == 0) | |
1886 return false; | |
1887 if (!union_contains_value (set1, set2, | |
1888 get_expr_value_id (&temp))) | |
1889 return false; | |
1890 } | |
1891 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) | |
1892 { | |
1893 struct pre_expr_d temp; | |
1894 temp.kind = NAME; | |
1895 temp.id = 0; | |
1896 PRE_EXPR_NAME (&temp) = vro->op1; | |
1897 temp.id = lookup_expression_id (&temp); | |
1898 if (temp.id == 0) | |
1899 return false; | |
1900 if (!union_contains_value (set1, set2, | |
1901 get_expr_value_id (&temp))) | |
1902 return false; | |
1903 } | |
1904 | |
1905 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) | |
1906 { | |
1907 struct pre_expr_d temp; | |
1908 temp.kind = NAME; | |
1909 temp.id = 0; | |
1910 PRE_EXPR_NAME (&temp) = vro->op2; | |
1911 temp.id = lookup_expression_id (&temp); | |
1912 if (temp.id == 0) | |
1913 return false; | |
1914 if (!union_contains_value (set1, set2, | |
1915 get_expr_value_id (&temp))) | |
1916 return false; | |
1917 } | |
1918 | |
1919 return true; | |
1920 } | |
1921 | |
1922 /* Determine if the expression EXPR is valid in SET1 U SET2. | |
1923 ONLY SET2 CAN BE NULL. | |
1924 This means that we have a leader for each part of the expression | |
1925 (if it consists of values), or the expression is an SSA_NAME. | |
1926 For loads/calls, we also see if the vuses are killed in this block. | |
1927 */ | |
1928 | |
1929 static bool | |
1930 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr, | |
1931 basic_block block) | |
1932 { | |
1933 switch (expr->kind) | |
1934 { | |
1935 case NAME: | |
1936 return bitmap_set_contains_expr (AVAIL_OUT (block), expr); | |
1937 case NARY: | |
1938 { | |
1939 unsigned int i; | |
1940 vn_nary_op_t nary = PRE_EXPR_NARY (expr); | |
1941 for (i = 0; i < nary->length; i++) | |
1942 { | |
1943 if (TREE_CODE (nary->op[i]) == SSA_NAME) | |
1944 { | |
1945 struct pre_expr_d temp; | |
1946 temp.kind = NAME; | |
1947 temp.id = 0; | |
1948 PRE_EXPR_NAME (&temp) = nary->op[i]; | |
1949 temp.id = lookup_expression_id (&temp); | |
1950 if (temp.id == 0) | |
1951 return false; | |
1952 if (!union_contains_value (set1, set2, | |
1953 get_expr_value_id (&temp))) | |
1954 return false; | |
1955 } | |
1956 } | |
1957 return true; | |
1958 } | |
1959 break; | |
1960 case REFERENCE: | |
1961 { | |
1962 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); | |
1963 vn_reference_op_t vro; | |
1964 unsigned int i; | |
1965 | |
1966 for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++) | |
1967 { | |
1968 if (!vro_valid_in_sets (set1, set2, vro)) | |
1969 return false; | |
1970 } | |
1971 return !value_dies_in_block_x (expr, block); | |
1972 } | |
1973 default: | |
1974 gcc_unreachable (); | |
1975 } | |
1976 } | |
1977 | |
1978 /* Clean the set of expressions that are no longer valid in SET1 or | |
1979 SET2. This means expressions that are made up of values we have no | |
1980 leaders for in SET1 or SET2. This version is used for partial | |
1981 anticipation, which means it is not valid in either ANTIC_IN or | |
1982 PA_IN. */ | |
1983 | |
1984 static void | |
1985 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block) | |
1986 { | |
1987 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1); | |
1988 pre_expr expr; | |
1989 int i; | |
1990 | |
1991 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | |
1992 { | |
1993 if (!valid_in_sets (set1, set2, expr, block)) | |
1994 bitmap_remove_from_set (set1, expr); | |
1995 } | |
1996 VEC_free (pre_expr, heap, exprs); | |
1997 } | |
1998 | |
1999 /* Clean the set of expressions that are no longer valid in SET. This | |
2000 means expressions that are made up of values we have no leaders for | |
2001 in SET. */ | |
2002 | |
2003 static void | |
2004 clean (bitmap_set_t set, basic_block block) | |
2005 { | |
2006 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set); | |
2007 pre_expr expr; | |
2008 int i; | |
2009 | |
2010 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | |
2011 { | |
2012 if (!valid_in_sets (set, NULL, expr, block)) | |
2013 bitmap_remove_from_set (set, expr); | |
2014 } | |
2015 VEC_free (pre_expr, heap, exprs); | |
2016 } | |
2017 | |
2018 static sbitmap has_abnormal_preds; | |
2019 | |
2020 /* List of blocks that may have changed during ANTIC computation and | |
2021 thus need to be iterated over. */ | |
2022 | |
2023 static sbitmap changed_blocks; | |
2024 | |
2025 /* Decide whether to defer a block for a later iteration, or PHI | |
2026 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we | |
2027 should defer the block, and true if we processed it. */ | |
2028 | |
2029 static bool | |
2030 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source, | |
2031 basic_block block, basic_block phiblock) | |
2032 { | |
2033 if (!BB_VISITED (phiblock)) | |
2034 { | |
2035 SET_BIT (changed_blocks, block->index); | |
2036 BB_VISITED (block) = 0; | |
2037 BB_DEFERRED (block) = 1; | |
2038 return false; | |
2039 } | |
2040 else | |
2041 phi_translate_set (dest, source, block, phiblock); | |
2042 return true; | |
2043 } | |
2044 | |
2045 /* Compute the ANTIC set for BLOCK. | |
2046 | |
2047 If succs(BLOCK) > 1 then | |
2048 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK) | |
2049 else if succs(BLOCK) == 1 then | |
2050 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)]) | |
2051 | |
2052 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK]) | |
2053 */ | |
2054 | |
2055 static bool | |
2056 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge) | |
2057 { | |
2058 bool changed = false; | |
2059 bitmap_set_t S, old, ANTIC_OUT; | |
2060 bitmap_iterator bi; | |
2061 unsigned int bii; | |
2062 edge e; | |
2063 edge_iterator ei; | |
2064 | |
2065 old = ANTIC_OUT = S = NULL; | |
2066 BB_VISITED (block) = 1; | |
2067 | |
2068 /* If any edges from predecessors are abnormal, antic_in is empty, | |
2069 so do nothing. */ | |
2070 if (block_has_abnormal_pred_edge) | |
2071 goto maybe_dump_sets; | |
2072 | |
2073 old = ANTIC_IN (block); | |
2074 ANTIC_OUT = bitmap_set_new (); | |
2075 | |
2076 /* If the block has no successors, ANTIC_OUT is empty. */ | |
2077 if (EDGE_COUNT (block->succs) == 0) | |
2078 ; | |
2079 /* If we have one successor, we could have some phi nodes to | |
2080 translate through. */ | |
2081 else if (single_succ_p (block)) | |
2082 { | |
2083 basic_block succ_bb = single_succ (block); | |
2084 | |
2085 /* We trade iterations of the dataflow equations for having to | |
2086 phi translate the maximal set, which is incredibly slow | |
2087 (since the maximal set often has 300+ members, even when you | |
2088 have a small number of blocks). | |
2089 Basically, we defer the computation of ANTIC for this block | |
2090 until we have processed it's successor, which will inevitably | |
2091 have a *much* smaller set of values to phi translate once | |
2092 clean has been run on it. | |
2093 The cost of doing this is that we technically perform more | |
2094 iterations, however, they are lower cost iterations. | |
2095 | |
2096 Timings for PRE on tramp3d-v4: | |
2097 without maximal set fix: 11 seconds | |
2098 with maximal set fix/without deferring: 26 seconds | |
2099 with maximal set fix/with deferring: 11 seconds | |
2100 */ | |
2101 | |
2102 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb), | |
2103 block, succ_bb)) | |
2104 { | |
2105 changed = true; | |
2106 goto maybe_dump_sets; | |
2107 } | |
2108 } | |
2109 /* If we have multiple successors, we take the intersection of all of | |
2110 them. Note that in the case of loop exit phi nodes, we may have | |
2111 phis to translate through. */ | |
2112 else | |
2113 { | |
2114 VEC(basic_block, heap) * worklist; | |
2115 size_t i; | |
2116 basic_block bprime, first; | |
2117 | |
2118 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs)); | |
2119 FOR_EACH_EDGE (e, ei, block->succs) | |
2120 VEC_quick_push (basic_block, worklist, e->dest); | |
2121 first = VEC_index (basic_block, worklist, 0); | |
2122 | |
2123 if (phi_nodes (first)) | |
2124 { | |
2125 bitmap_set_t from = ANTIC_IN (first); | |
2126 | |
2127 if (!BB_VISITED (first)) | |
2128 from = maximal_set; | |
2129 phi_translate_set (ANTIC_OUT, from, block, first); | |
2130 } | |
2131 else | |
2132 { | |
2133 if (!BB_VISITED (first)) | |
2134 bitmap_set_copy (ANTIC_OUT, maximal_set); | |
2135 else | |
2136 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first)); | |
2137 } | |
2138 | |
2139 for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++) | |
2140 { | |
2141 if (phi_nodes (bprime)) | |
2142 { | |
2143 bitmap_set_t tmp = bitmap_set_new (); | |
2144 bitmap_set_t from = ANTIC_IN (bprime); | |
2145 | |
2146 if (!BB_VISITED (bprime)) | |
2147 from = maximal_set; | |
2148 phi_translate_set (tmp, from, block, bprime); | |
2149 bitmap_set_and (ANTIC_OUT, tmp); | |
2150 bitmap_set_free (tmp); | |
2151 } | |
2152 else | |
2153 { | |
2154 if (!BB_VISITED (bprime)) | |
2155 bitmap_set_and (ANTIC_OUT, maximal_set); | |
2156 else | |
2157 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime)); | |
2158 } | |
2159 } | |
2160 VEC_free (basic_block, heap, worklist); | |
2161 } | |
2162 | |
2163 /* Generate ANTIC_OUT - TMP_GEN. */ | |
2164 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block)); | |
2165 | |
2166 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */ | |
2167 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block), | |
2168 TMP_GEN (block)); | |
2169 | |
2170 /* Then union in the ANTIC_OUT - TMP_GEN values, | |
2171 to get ANTIC_OUT U EXP_GEN - TMP_GEN */ | |
2172 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi) | |
2173 bitmap_value_insert_into_set (ANTIC_IN (block), | |
2174 expression_for_id (bii)); | |
2175 | |
2176 clean (ANTIC_IN (block), block); | |
2177 | |
2178 /* !old->expressions can happen when we deferred a block. */ | |
2179 if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block))) | |
2180 { | |
2181 changed = true; | |
2182 SET_BIT (changed_blocks, block->index); | |
2183 FOR_EACH_EDGE (e, ei, block->preds) | |
2184 SET_BIT (changed_blocks, e->src->index); | |
2185 } | |
2186 else | |
2187 RESET_BIT (changed_blocks, block->index); | |
2188 | |
2189 maybe_dump_sets: | |
2190 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2191 { | |
2192 if (!BB_DEFERRED (block) || BB_VISITED (block)) | |
2193 { | |
2194 if (ANTIC_OUT) | |
2195 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index); | |
2196 | |
2197 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN", | |
2198 block->index); | |
2199 | |
2200 if (S) | |
2201 print_bitmap_set (dump_file, S, "S", block->index); | |
2202 } | |
2203 else | |
2204 { | |
2205 fprintf (dump_file, | |
2206 "Block %d was deferred for a future iteration.\n", | |
2207 block->index); | |
2208 } | |
2209 } | |
2210 if (old) | |
2211 bitmap_set_free (old); | |
2212 if (S) | |
2213 bitmap_set_free (S); | |
2214 if (ANTIC_OUT) | |
2215 bitmap_set_free (ANTIC_OUT); | |
2216 return changed; | |
2217 } | |
2218 | |
2219 /* Compute PARTIAL_ANTIC for BLOCK. | |
2220 | |
2221 If succs(BLOCK) > 1 then | |
2222 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not | |
2223 in ANTIC_OUT for all succ(BLOCK) | |
2224 else if succs(BLOCK) == 1 then | |
2225 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)]) | |
2226 | |
2227 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] | |
2228 - ANTIC_IN[BLOCK]) | |
2229 | |
2230 */ | |
2231 static bool | |
2232 compute_partial_antic_aux (basic_block block, | |
2233 bool block_has_abnormal_pred_edge) | |
2234 { | |
2235 bool changed = false; | |
2236 bitmap_set_t old_PA_IN; | |
2237 bitmap_set_t PA_OUT; | |
2238 edge e; | |
2239 edge_iterator ei; | |
2240 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH); | |
2241 | |
2242 old_PA_IN = PA_OUT = NULL; | |
2243 | |
2244 /* If any edges from predecessors are abnormal, antic_in is empty, | |
2245 so do nothing. */ | |
2246 if (block_has_abnormal_pred_edge) | |
2247 goto maybe_dump_sets; | |
2248 | |
2249 /* If there are too many partially anticipatable values in the | |
2250 block, phi_translate_set can take an exponential time: stop | |
2251 before the translation starts. */ | |
2252 if (max_pa | |
2253 && single_succ_p (block) | |
2254 && bitmap_count_bits (PA_IN (single_succ (block))->values) > max_pa) | |
2255 goto maybe_dump_sets; | |
2256 | |
2257 old_PA_IN = PA_IN (block); | |
2258 PA_OUT = bitmap_set_new (); | |
2259 | |
2260 /* If the block has no successors, ANTIC_OUT is empty. */ | |
2261 if (EDGE_COUNT (block->succs) == 0) | |
2262 ; | |
2263 /* If we have one successor, we could have some phi nodes to | |
2264 translate through. Note that we can't phi translate across DFS | |
2265 back edges in partial antic, because it uses a union operation on | |
2266 the successors. For recurrences like IV's, we will end up | |
2267 generating a new value in the set on each go around (i + 3 (VH.1) | |
2268 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */ | |
2269 else if (single_succ_p (block)) | |
2270 { | |
2271 basic_block succ = single_succ (block); | |
2272 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK)) | |
2273 phi_translate_set (PA_OUT, PA_IN (succ), block, succ); | |
2274 } | |
2275 /* If we have multiple successors, we take the union of all of | |
2276 them. */ | |
2277 else | |
2278 { | |
2279 VEC(basic_block, heap) * worklist; | |
2280 size_t i; | |
2281 basic_block bprime; | |
2282 | |
2283 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs)); | |
2284 FOR_EACH_EDGE (e, ei, block->succs) | |
2285 { | |
2286 if (e->flags & EDGE_DFS_BACK) | |
2287 continue; | |
2288 VEC_quick_push (basic_block, worklist, e->dest); | |
2289 } | |
2290 if (VEC_length (basic_block, worklist) > 0) | |
2291 { | |
2292 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++) | |
2293 { | |
2294 unsigned int i; | |
2295 bitmap_iterator bi; | |
2296 | |
2297 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi) | |
2298 bitmap_value_insert_into_set (PA_OUT, | |
2299 expression_for_id (i)); | |
2300 if (phi_nodes (bprime)) | |
2301 { | |
2302 bitmap_set_t pa_in = bitmap_set_new (); | |
2303 phi_translate_set (pa_in, PA_IN (bprime), block, bprime); | |
2304 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi) | |
2305 bitmap_value_insert_into_set (PA_OUT, | |
2306 expression_for_id (i)); | |
2307 bitmap_set_free (pa_in); | |
2308 } | |
2309 else | |
2310 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi) | |
2311 bitmap_value_insert_into_set (PA_OUT, | |
2312 expression_for_id (i)); | |
2313 } | |
2314 } | |
2315 VEC_free (basic_block, heap, worklist); | |
2316 } | |
2317 | |
2318 /* PA_IN starts with PA_OUT - TMP_GEN. | |
2319 Then we subtract things from ANTIC_IN. */ | |
2320 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block)); | |
2321 | |
2322 /* For partial antic, we want to put back in the phi results, since | |
2323 we will properly avoid making them partially antic over backedges. */ | |
2324 bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values); | |
2325 bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions); | |
2326 | |
2327 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */ | |
2328 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block)); | |
2329 | |
2330 dependent_clean (PA_IN (block), ANTIC_IN (block), block); | |
2331 | |
2332 if (!bitmap_set_equal (old_PA_IN, PA_IN (block))) | |
2333 { | |
2334 changed = true; | |
2335 SET_BIT (changed_blocks, block->index); | |
2336 FOR_EACH_EDGE (e, ei, block->preds) | |
2337 SET_BIT (changed_blocks, e->src->index); | |
2338 } | |
2339 else | |
2340 RESET_BIT (changed_blocks, block->index); | |
2341 | |
2342 maybe_dump_sets: | |
2343 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2344 { | |
2345 if (PA_OUT) | |
2346 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index); | |
2347 | |
2348 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index); | |
2349 } | |
2350 if (old_PA_IN) | |
2351 bitmap_set_free (old_PA_IN); | |
2352 if (PA_OUT) | |
2353 bitmap_set_free (PA_OUT); | |
2354 return changed; | |
2355 } | |
2356 | |
2357 /* Compute ANTIC and partial ANTIC sets. */ | |
2358 | |
2359 static void | |
2360 compute_antic (void) | |
2361 { | |
2362 bool changed = true; | |
2363 int num_iterations = 0; | |
2364 basic_block block; | |
2365 int i; | |
2366 | |
2367 /* If any predecessor edges are abnormal, we punt, so antic_in is empty. | |
2368 We pre-build the map of blocks with incoming abnormal edges here. */ | |
2369 has_abnormal_preds = sbitmap_alloc (last_basic_block); | |
2370 sbitmap_zero (has_abnormal_preds); | |
2371 | |
2372 FOR_EACH_BB (block) | |
2373 { | |
2374 edge_iterator ei; | |
2375 edge e; | |
2376 | |
2377 FOR_EACH_EDGE (e, ei, block->preds) | |
2378 { | |
2379 e->flags &= ~EDGE_DFS_BACK; | |
2380 if (e->flags & EDGE_ABNORMAL) | |
2381 { | |
2382 SET_BIT (has_abnormal_preds, block->index); | |
2383 break; | |
2384 } | |
2385 } | |
2386 | |
2387 BB_VISITED (block) = 0; | |
2388 BB_DEFERRED (block) = 0; | |
2389 /* While we are here, give empty ANTIC_IN sets to each block. */ | |
2390 ANTIC_IN (block) = bitmap_set_new (); | |
2391 PA_IN (block) = bitmap_set_new (); | |
2392 } | |
2393 | |
2394 /* At the exit block we anticipate nothing. */ | |
2395 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new (); | |
2396 BB_VISITED (EXIT_BLOCK_PTR) = 1; | |
2397 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new (); | |
2398 | |
2399 changed_blocks = sbitmap_alloc (last_basic_block + 1); | |
2400 sbitmap_ones (changed_blocks); | |
2401 while (changed) | |
2402 { | |
2403 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2404 fprintf (dump_file, "Starting iteration %d\n", num_iterations); | |
2405 num_iterations++; | |
2406 changed = false; | |
2407 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++) | |
2408 { | |
2409 if (TEST_BIT (changed_blocks, postorder[i])) | |
2410 { | |
2411 basic_block block = BASIC_BLOCK (postorder[i]); | |
2412 changed |= compute_antic_aux (block, | |
2413 TEST_BIT (has_abnormal_preds, | |
2414 block->index)); | |
2415 } | |
2416 } | |
2417 #ifdef ENABLE_CHECKING | |
2418 /* Theoretically possible, but *highly* unlikely. */ | |
2419 gcc_assert (num_iterations < 500); | |
2420 #endif | |
2421 } | |
2422 | |
2423 statistics_histogram_event (cfun, "compute_antic iterations", | |
2424 num_iterations); | |
2425 | |
2426 if (do_partial_partial) | |
2427 { | |
2428 sbitmap_ones (changed_blocks); | |
2429 mark_dfs_back_edges (); | |
2430 num_iterations = 0; | |
2431 changed = true; | |
2432 while (changed) | |
2433 { | |
2434 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2435 fprintf (dump_file, "Starting iteration %d\n", num_iterations); | |
2436 num_iterations++; | |
2437 changed = false; | |
2438 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++) | |
2439 { | |
2440 if (TEST_BIT (changed_blocks, postorder[i])) | |
2441 { | |
2442 basic_block block = BASIC_BLOCK (postorder[i]); | |
2443 changed | |
2444 |= compute_partial_antic_aux (block, | |
2445 TEST_BIT (has_abnormal_preds, | |
2446 block->index)); | |
2447 } | |
2448 } | |
2449 #ifdef ENABLE_CHECKING | |
2450 /* Theoretically possible, but *highly* unlikely. */ | |
2451 gcc_assert (num_iterations < 500); | |
2452 #endif | |
2453 } | |
2454 statistics_histogram_event (cfun, "compute_partial_antic iterations", | |
2455 num_iterations); | |
2456 } | |
2457 sbitmap_free (has_abnormal_preds); | |
2458 sbitmap_free (changed_blocks); | |
2459 } | |
2460 | |
2461 /* Return true if we can value number the call in STMT. This is true | |
2462 if we have a pure or constant call. */ | |
2463 | |
2464 static bool | |
2465 can_value_number_call (gimple stmt) | |
2466 { | |
2467 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)) | |
2468 return true; | |
2469 return false; | |
2470 } | |
2471 | |
2472 /* Return true if OP is an exception handler related operation, such as | |
2473 FILTER_EXPR or EXC_PTR_EXPR. */ | |
2474 | |
2475 static bool | |
2476 is_exception_related (gimple stmt) | |
2477 { | |
2478 return (is_gimple_assign (stmt) | |
2479 && (gimple_assign_rhs_code (stmt) == FILTER_EXPR | |
2480 || gimple_assign_rhs_code (stmt) == EXC_PTR_EXPR)); | |
2481 } | |
2482 | |
2483 /* Return true if OP is a tree which we can perform PRE on | |
2484 on. This may not match the operations we can value number, but in | |
2485 a perfect world would. */ | |
2486 | |
2487 static bool | |
2488 can_PRE_operation (tree op) | |
2489 { | |
2490 return UNARY_CLASS_P (op) | |
2491 || BINARY_CLASS_P (op) | |
2492 || COMPARISON_CLASS_P (op) | |
2493 || TREE_CODE (op) == INDIRECT_REF | |
2494 || TREE_CODE (op) == COMPONENT_REF | |
2495 || TREE_CODE (op) == VIEW_CONVERT_EXPR | |
2496 || TREE_CODE (op) == CALL_EXPR | |
2497 || TREE_CODE (op) == ARRAY_REF; | |
2498 } | |
2499 | |
2500 | |
2501 /* Inserted expressions are placed onto this worklist, which is used | |
2502 for performing quick dead code elimination of insertions we made | |
2503 that didn't turn out to be necessary. */ | |
2504 static VEC(gimple,heap) *inserted_exprs; | |
2505 | |
2506 /* Pool allocated fake store expressions are placed onto this | |
2507 worklist, which, after performing dead code elimination, is walked | |
2508 to see which expressions need to be put into GC'able memory */ | |
2509 static VEC(gimple, heap) *need_creation; | |
2510 | |
2511 /* The actual worker for create_component_ref_by_pieces. */ | |
2512 | |
2513 static tree | |
2514 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref, | |
2515 unsigned int *operand, gimple_seq *stmts, | |
2516 gimple domstmt) | |
2517 { | |
2518 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands, | |
2519 *operand); | |
2520 tree genop; | |
2521 ++*operand; | |
2522 switch (currop->opcode) | |
2523 { | |
2524 case CALL_EXPR: | |
2525 { | |
2526 tree folded, sc = currop->op1; | |
2527 unsigned int nargs = 0; | |
2528 tree *args = XNEWVEC (tree, VEC_length (vn_reference_op_s, | |
2529 ref->operands) - 1); | |
2530 while (*operand < VEC_length (vn_reference_op_s, ref->operands)) | |
2531 { | |
2532 args[nargs] = create_component_ref_by_pieces_1 (block, ref, | |
2533 operand, stmts, | |
2534 domstmt); | |
2535 nargs++; | |
2536 } | |
2537 folded = build_call_array (currop->type, | |
2538 TREE_CODE (currop->op0) == FUNCTION_DECL | |
2539 ? build_fold_addr_expr (currop->op0) | |
2540 : currop->op0, | |
2541 nargs, args); | |
2542 free (args); | |
2543 if (sc) | |
2544 { | |
2545 pre_expr scexpr = get_or_alloc_expr_for (sc); | |
2546 sc = find_or_generate_expression (block, scexpr, stmts, domstmt); | |
2547 if (!sc) | |
2548 return NULL_TREE; | |
2549 CALL_EXPR_STATIC_CHAIN (folded) = sc; | |
2550 } | |
2551 return folded; | |
2552 } | |
2553 break; | |
2554 case ADDR_EXPR: | |
2555 if (currop->op0) | |
2556 { | |
2557 gcc_assert (is_gimple_min_invariant (currop->op0)); | |
2558 return currop->op0; | |
2559 } | |
2560 /* Fallthrough. */ | |
2561 case REALPART_EXPR: | |
2562 case IMAGPART_EXPR: | |
2563 case VIEW_CONVERT_EXPR: | |
2564 { | |
2565 tree folded; | |
2566 tree genop0 = create_component_ref_by_pieces_1 (block, ref, | |
2567 operand, | |
2568 stmts, domstmt); | |
2569 if (!genop0) | |
2570 return NULL_TREE; | |
2571 folded = fold_build1 (currop->opcode, currop->type, | |
2572 genop0); | |
2573 return folded; | |
2574 } | |
2575 break; | |
2576 case ALIGN_INDIRECT_REF: | |
2577 case MISALIGNED_INDIRECT_REF: | |
2578 case INDIRECT_REF: | |
2579 { | |
2580 tree folded; | |
2581 tree genop1 = create_component_ref_by_pieces_1 (block, ref, | |
2582 operand, | |
2583 stmts, domstmt); | |
2584 if (!genop1) | |
2585 return NULL_TREE; | |
2586 genop1 = fold_convert (build_pointer_type (currop->type), | |
2587 genop1); | |
2588 | |
2589 if (currop->opcode == MISALIGNED_INDIRECT_REF) | |
2590 folded = fold_build2 (currop->opcode, currop->type, | |
2591 genop1, currop->op1); | |
2592 else | |
2593 folded = fold_build1 (currop->opcode, currop->type, | |
2594 genop1); | |
2595 return folded; | |
2596 } | |
2597 break; | |
2598 case BIT_FIELD_REF: | |
2599 { | |
2600 tree folded; | |
2601 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, | |
2602 stmts, domstmt); | |
2603 pre_expr op1expr = get_or_alloc_expr_for (currop->op0); | |
2604 pre_expr op2expr = get_or_alloc_expr_for (currop->op1); | |
2605 tree genop1; | |
2606 tree genop2; | |
2607 | |
2608 if (!genop0) | |
2609 return NULL_TREE; | |
2610 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt); | |
2611 if (!genop1) | |
2612 return NULL_TREE; | |
2613 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt); | |
2614 if (!genop2) | |
2615 return NULL_TREE; | |
2616 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1, | |
2617 genop2); | |
2618 return folded; | |
2619 } | |
2620 | |
2621 /* For array ref vn_reference_op's, operand 1 of the array ref | |
2622 is op0 of the reference op and operand 3 of the array ref is | |
2623 op1. */ | |
2624 case ARRAY_RANGE_REF: | |
2625 case ARRAY_REF: | |
2626 { | |
2627 tree genop0; | |
2628 tree genop1 = currop->op0; | |
2629 pre_expr op1expr; | |
2630 tree genop2 = currop->op1; | |
2631 pre_expr op2expr; | |
2632 tree genop3; | |
2633 genop0 = create_component_ref_by_pieces_1 (block, ref, operand, | |
2634 stmts, domstmt); | |
2635 if (!genop0) | |
2636 return NULL_TREE; | |
2637 op1expr = get_or_alloc_expr_for (genop1); | |
2638 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt); | |
2639 if (!genop1) | |
2640 return NULL_TREE; | |
2641 if (genop2) | |
2642 { | |
2643 op2expr = get_or_alloc_expr_for (genop2); | |
2644 genop2 = find_or_generate_expression (block, op2expr, stmts, | |
2645 domstmt); | |
2646 if (!genop2) | |
2647 return NULL_TREE; | |
2648 } | |
2649 | |
2650 genop3 = currop->op2; | |
2651 return build4 (currop->opcode, currop->type, genop0, genop1, | |
2652 genop2, genop3); | |
2653 } | |
2654 case COMPONENT_REF: | |
2655 { | |
2656 tree op0; | |
2657 tree op1; | |
2658 tree genop2 = currop->op1; | |
2659 pre_expr op2expr; | |
2660 op0 = create_component_ref_by_pieces_1 (block, ref, operand, | |
2661 stmts, domstmt); | |
2662 if (!op0) | |
2663 return NULL_TREE; | |
2664 /* op1 should be a FIELD_DECL, which are represented by | |
2665 themselves. */ | |
2666 op1 = currop->op0; | |
2667 if (genop2) | |
2668 { | |
2669 op2expr = get_or_alloc_expr_for (genop2); | |
2670 genop2 = find_or_generate_expression (block, op2expr, stmts, | |
2671 domstmt); | |
2672 if (!genop2) | |
2673 return NULL_TREE; | |
2674 } | |
2675 | |
2676 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, | |
2677 genop2); | |
2678 } | |
2679 break; | |
2680 case SSA_NAME: | |
2681 { | |
2682 pre_expr op0expr = get_or_alloc_expr_for (currop->op0); | |
2683 genop = find_or_generate_expression (block, op0expr, stmts, domstmt); | |
2684 return genop; | |
2685 } | |
2686 case STRING_CST: | |
2687 case INTEGER_CST: | |
2688 case COMPLEX_CST: | |
2689 case VECTOR_CST: | |
2690 case REAL_CST: | |
2691 case CONSTRUCTOR: | |
2692 case VAR_DECL: | |
2693 case PARM_DECL: | |
2694 case CONST_DECL: | |
2695 case RESULT_DECL: | |
2696 case FUNCTION_DECL: | |
2697 return currop->op0; | |
2698 | |
2699 default: | |
2700 gcc_unreachable (); | |
2701 } | |
2702 } | |
2703 | |
2704 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the | |
2705 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with | |
2706 trying to rename aggregates into ssa form directly, which is a no no. | |
2707 | |
2708 Thus, this routine doesn't create temporaries, it just builds a | |
2709 single access expression for the array, calling | |
2710 find_or_generate_expression to build the innermost pieces. | |
2711 | |
2712 This function is a subroutine of create_expression_by_pieces, and | |
2713 should not be called on it's own unless you really know what you | |
2714 are doing. */ | |
2715 | |
2716 static tree | |
2717 create_component_ref_by_pieces (basic_block block, vn_reference_t ref, | |
2718 gimple_seq *stmts, gimple domstmt) | |
2719 { | |
2720 unsigned int op = 0; | |
2721 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt); | |
2722 } | |
2723 | |
2724 /* Find a leader for an expression, or generate one using | |
2725 create_expression_by_pieces if it's ANTIC but | |
2726 complex. | |
2727 BLOCK is the basic_block we are looking for leaders in. | |
2728 EXPR is the expression to find a leader or generate for. | |
2729 STMTS is the statement list to put the inserted expressions on. | |
2730 Returns the SSA_NAME of the LHS of the generated expression or the | |
2731 leader. | |
2732 DOMSTMT if non-NULL is a statement that should be dominated by | |
2733 all uses in the generated expression. If DOMSTMT is non-NULL this | |
2734 routine can fail and return NULL_TREE. Otherwise it will assert | |
2735 on failure. */ | |
2736 | |
2737 static tree | |
2738 find_or_generate_expression (basic_block block, pre_expr expr, | |
2739 gimple_seq *stmts, gimple domstmt) | |
2740 { | |
2741 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), | |
2742 get_expr_value_id (expr), domstmt); | |
2743 tree genop = NULL; | |
2744 if (leader) | |
2745 { | |
2746 if (leader->kind == NAME) | |
2747 genop = PRE_EXPR_NAME (leader); | |
2748 else if (leader->kind == CONSTANT) | |
2749 genop = PRE_EXPR_CONSTANT (leader); | |
2750 } | |
2751 | |
2752 /* If it's still NULL, it must be a complex expression, so generate | |
2753 it recursively. Not so for FRE though. */ | |
2754 if (genop == NULL | |
2755 && !in_fre) | |
2756 { | |
2757 bitmap_set_t exprset; | |
2758 unsigned int lookfor = get_expr_value_id (expr); | |
2759 bool handled = false; | |
2760 bitmap_iterator bi; | |
2761 unsigned int i; | |
2762 | |
2763 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor); | |
2764 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) | |
2765 { | |
2766 pre_expr temp = expression_for_id (i); | |
2767 if (temp->kind != NAME) | |
2768 { | |
2769 handled = true; | |
2770 genop = create_expression_by_pieces (block, temp, stmts, | |
2771 domstmt, | |
2772 get_expr_type (expr)); | |
2773 break; | |
2774 } | |
2775 } | |
2776 if (!handled && domstmt) | |
2777 return NULL_TREE; | |
2778 | |
2779 gcc_assert (handled); | |
2780 } | |
2781 return genop; | |
2782 } | |
2783 | |
2784 #define NECESSARY GF_PLF_1 | |
2785 | |
2786 /* Create an expression in pieces, so that we can handle very complex | |
2787 expressions that may be ANTIC, but not necessary GIMPLE. | |
2788 BLOCK is the basic block the expression will be inserted into, | |
2789 EXPR is the expression to insert (in value form) | |
2790 STMTS is a statement list to append the necessary insertions into. | |
2791 | |
2792 This function will die if we hit some value that shouldn't be | |
2793 ANTIC but is (IE there is no leader for it, or its components). | |
2794 This function may also generate expressions that are themselves | |
2795 partially or fully redundant. Those that are will be either made | |
2796 fully redundant during the next iteration of insert (for partially | |
2797 redundant ones), or eliminated by eliminate (for fully redundant | |
2798 ones). | |
2799 | |
2800 If DOMSTMT is non-NULL then we make sure that all uses in the | |
2801 expressions dominate that statement. In this case the function | |
2802 can return NULL_TREE to signal failure. */ | |
2803 | |
2804 static tree | |
2805 create_expression_by_pieces (basic_block block, pre_expr expr, | |
2806 gimple_seq *stmts, gimple domstmt, tree type) | |
2807 { | |
2808 tree temp, name; | |
2809 tree folded, newexpr; | |
2810 gimple_seq forced_stmts; | |
2811 unsigned int value_id; | |
2812 gimple_stmt_iterator gsi; | |
2813 tree exprtype = type ? type : get_expr_type (expr); | |
2814 pre_expr nameexpr; | |
2815 gimple newstmt; | |
2816 | |
2817 switch (expr->kind) | |
2818 { | |
2819 /* We may hit the NAME/CONSTANT case if we have to convert types | |
2820 that value numbering saw through. */ | |
2821 case NAME: | |
2822 folded = PRE_EXPR_NAME (expr); | |
2823 break; | |
2824 case CONSTANT: | |
2825 folded = PRE_EXPR_CONSTANT (expr); | |
2826 break; | |
2827 case REFERENCE: | |
2828 { | |
2829 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); | |
2830 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt); | |
2831 } | |
2832 break; | |
2833 case NARY: | |
2834 { | |
2835 vn_nary_op_t nary = PRE_EXPR_NARY (expr); | |
2836 switch (nary->length) | |
2837 { | |
2838 case 2: | |
2839 { | |
2840 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]); | |
2841 pre_expr op2 = get_or_alloc_expr_for (nary->op[1]); | |
2842 tree genop1 = find_or_generate_expression (block, op1, | |
2843 stmts, domstmt); | |
2844 tree genop2 = find_or_generate_expression (block, op2, | |
2845 stmts, domstmt); | |
2846 if (!genop1 || !genop2) | |
2847 return NULL_TREE; | |
2848 genop1 = fold_convert (TREE_TYPE (nary->op[0]), | |
2849 genop1); | |
2850 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It | |
2851 may be a constant with the wrong type. */ | |
2852 if (nary->opcode == POINTER_PLUS_EXPR) | |
2853 genop2 = fold_convert (sizetype, genop2); | |
2854 else | |
2855 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2); | |
2856 | |
2857 folded = fold_build2 (nary->opcode, nary->type, | |
2858 genop1, genop2); | |
2859 } | |
2860 break; | |
2861 case 1: | |
2862 { | |
2863 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]); | |
2864 tree genop1 = find_or_generate_expression (block, op1, | |
2865 stmts, domstmt); | |
2866 if (!genop1) | |
2867 return NULL_TREE; | |
2868 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1); | |
2869 | |
2870 folded = fold_build1 (nary->opcode, nary->type, | |
2871 genop1); | |
2872 } | |
2873 break; | |
2874 default: | |
2875 return NULL_TREE; | |
2876 } | |
2877 } | |
2878 break; | |
2879 default: | |
2880 return NULL_TREE; | |
2881 } | |
2882 folded = fold_convert (exprtype, folded); | |
2883 /* Force the generated expression to be a sequence of GIMPLE | |
2884 statements. | |
2885 We have to call unshare_expr because force_gimple_operand may | |
2886 modify the tree we pass to it. */ | |
2887 newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts, | |
2888 false, NULL); | |
2889 | |
2890 /* If we have any intermediate expressions to the value sets, add them | |
2891 to the value sets and chain them in the instruction stream. */ | |
2892 if (forced_stmts) | |
2893 { | |
2894 gsi = gsi_start (forced_stmts); | |
2895 for (; !gsi_end_p (gsi); gsi_next (&gsi)) | |
2896 { | |
2897 gimple stmt = gsi_stmt (gsi); | |
2898 tree forcedname = gimple_get_lhs (stmt); | |
2899 pre_expr nameexpr; | |
2900 | |
2901 VEC_safe_push (gimple, heap, inserted_exprs, stmt); | |
2902 if (TREE_CODE (forcedname) == SSA_NAME) | |
2903 { | |
2904 VN_INFO_GET (forcedname)->valnum = forcedname; | |
2905 VN_INFO (forcedname)->value_id = get_next_value_id (); | |
2906 nameexpr = get_or_alloc_expr_for_name (forcedname); | |
2907 add_to_value (VN_INFO (forcedname)->value_id, nameexpr); | |
2908 if (!in_fre) | |
2909 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr); | |
2910 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr); | |
2911 } | |
2912 mark_symbols_for_renaming (stmt); | |
2913 } | |
2914 gimple_seq_add_seq (stmts, forced_stmts); | |
2915 } | |
2916 | |
2917 /* Build and insert the assignment of the end result to the temporary | |
2918 that we will return. */ | |
2919 if (!pretemp || exprtype != TREE_TYPE (pretemp)) | |
2920 { | |
2921 pretemp = create_tmp_var (exprtype, "pretmp"); | |
2922 get_var_ann (pretemp); | |
2923 } | |
2924 | |
2925 temp = pretemp; | |
2926 add_referenced_var (temp); | |
2927 | |
2928 if (TREE_CODE (exprtype) == COMPLEX_TYPE | |
2929 || TREE_CODE (exprtype) == VECTOR_TYPE) | |
2930 DECL_GIMPLE_REG_P (temp) = 1; | |
2931 | |
2932 newstmt = gimple_build_assign (temp, newexpr); | |
2933 name = make_ssa_name (temp, newstmt); | |
2934 gimple_assign_set_lhs (newstmt, name); | |
2935 gimple_set_plf (newstmt, NECESSARY, false); | |
2936 | |
2937 gimple_seq_add_stmt (stmts, newstmt); | |
2938 VEC_safe_push (gimple, heap, inserted_exprs, newstmt); | |
2939 | |
2940 /* All the symbols in NEWEXPR should be put into SSA form. */ | |
2941 mark_symbols_for_renaming (newstmt); | |
2942 | |
2943 /* Add a value number to the temporary. | |
2944 The value may already exist in either NEW_SETS, or AVAIL_OUT, because | |
2945 we are creating the expression by pieces, and this particular piece of | |
2946 the expression may have been represented. There is no harm in replacing | |
2947 here. */ | |
2948 VN_INFO_GET (name)->valnum = name; | |
2949 value_id = get_expr_value_id (expr); | |
2950 VN_INFO (name)->value_id = value_id; | |
2951 nameexpr = get_or_alloc_expr_for_name (name); | |
2952 add_to_value (value_id, nameexpr); | |
2953 if (!in_fre) | |
2954 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr); | |
2955 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr); | |
2956 | |
2957 pre_stats.insertions++; | |
2958 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2959 { | |
2960 fprintf (dump_file, "Inserted "); | |
2961 print_gimple_stmt (dump_file, newstmt, 0, 0); | |
2962 fprintf (dump_file, " in predecessor %d\n", block->index); | |
2963 } | |
2964 | |
2965 return name; | |
2966 } | |
2967 | |
2968 | |
2969 /* Insert the to-be-made-available values of expression EXPRNUM for each | |
2970 predecessor, stored in AVAIL, into the predecessors of BLOCK, and | |
2971 merge the result with a phi node, given the same value number as | |
2972 NODE. Return true if we have inserted new stuff. */ | |
2973 | |
2974 static bool | |
2975 insert_into_preds_of_block (basic_block block, unsigned int exprnum, | |
2976 pre_expr *avail) | |
2977 { | |
2978 pre_expr expr = expression_for_id (exprnum); | |
2979 pre_expr newphi; | |
2980 unsigned int val = get_expr_value_id (expr); | |
2981 edge pred; | |
2982 bool insertions = false; | |
2983 bool nophi = false; | |
2984 basic_block bprime; | |
2985 pre_expr eprime; | |
2986 edge_iterator ei; | |
2987 tree type = get_expr_type (expr); | |
2988 tree temp; | |
2989 gimple phi; | |
2990 | |
2991 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2992 { | |
2993 fprintf (dump_file, "Found partial redundancy for expression "); | |
2994 print_pre_expr (dump_file, expr); | |
2995 fprintf (dump_file, " (%04d)\n", val); | |
2996 } | |
2997 | |
2998 /* Make sure we aren't creating an induction variable. */ | |
2999 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2 | |
3000 && expr->kind != REFERENCE) | |
3001 { | |
3002 bool firstinsideloop = false; | |
3003 bool secondinsideloop = false; | |
3004 firstinsideloop = flow_bb_inside_loop_p (block->loop_father, | |
3005 EDGE_PRED (block, 0)->src); | |
3006 secondinsideloop = flow_bb_inside_loop_p (block->loop_father, | |
3007 EDGE_PRED (block, 1)->src); | |
3008 /* Induction variables only have one edge inside the loop. */ | |
3009 if (firstinsideloop ^ secondinsideloop) | |
3010 { | |
3011 if (dump_file && (dump_flags & TDF_DETAILS)) | |
3012 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n"); | |
3013 nophi = true; | |
3014 } | |
3015 } | |
3016 | |
3017 /* Make sure we are not inserting trapping expressions. */ | |
3018 FOR_EACH_EDGE (pred, ei, block->preds) | |
3019 { | |
3020 bprime = pred->src; | |
3021 eprime = avail[bprime->index]; | |
3022 if (eprime->kind == NARY | |
3023 && vn_nary_may_trap (PRE_EXPR_NARY (eprime))) | |
3024 return false; | |
3025 } | |
3026 | |
3027 /* Make the necessary insertions. */ | |
3028 FOR_EACH_EDGE (pred, ei, block->preds) | |
3029 { | |
3030 gimple_seq stmts = NULL; | |
3031 tree builtexpr; | |
3032 bprime = pred->src; | |
3033 eprime = avail[bprime->index]; | |
3034 | |
3035 if (eprime->kind != NAME && eprime->kind != CONSTANT) | |
3036 { | |
3037 builtexpr = create_expression_by_pieces (bprime, | |
3038 eprime, | |
3039 &stmts, NULL, | |
3040 type); | |
3041 gcc_assert (!(pred->flags & EDGE_ABNORMAL)); | |
3042 gsi_insert_seq_on_edge (pred, stmts); | |
3043 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr); | |
3044 insertions = true; | |
3045 } | |
3046 else if (eprime->kind == CONSTANT) | |
3047 { | |
3048 /* Constants may not have the right type, fold_convert | |
3049 should give us back a constant with the right type. | |
3050 */ | |
3051 tree constant = PRE_EXPR_CONSTANT (eprime); | |
3052 if (!useless_type_conversion_p (type, TREE_TYPE (constant))) | |
3053 { | |
3054 tree builtexpr = fold_convert (type, constant); | |
3055 if (!is_gimple_min_invariant (builtexpr)) | |
3056 { | |
3057 tree forcedexpr = force_gimple_operand (builtexpr, | |
3058 &stmts, true, | |
3059 NULL); | |
3060 if (!is_gimple_min_invariant (forcedexpr)) | |
3061 { | |
3062 if (forcedexpr != builtexpr) | |
3063 { | |
3064 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime); | |
3065 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime); | |
3066 } | |
3067 if (stmts) | |
3068 { | |
3069 gimple_stmt_iterator gsi; | |
3070 gsi = gsi_start (stmts); | |
3071 for (; !gsi_end_p (gsi); gsi_next (&gsi)) | |
3072 { | |
3073 gimple stmt = gsi_stmt (gsi); | |
3074 VEC_safe_push (gimple, heap, inserted_exprs, stmt); | |
3075 gimple_set_plf (stmt, NECESSARY, false); | |
3076 } | |
3077 gsi_insert_seq_on_edge (pred, stmts); | |
3078 } | |
3079 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr); | |
3080 } | |
3081 } | |
3082 } | |
3083 } | |
3084 else if (eprime->kind == NAME) | |
3085 { | |
3086 /* We may have to do a conversion because our value | |
3087 numbering can look through types in certain cases, but | |
3088 our IL requires all operands of a phi node have the same | |
3089 type. */ | |
3090 tree name = PRE_EXPR_NAME (eprime); | |
3091 if (!useless_type_conversion_p (type, TREE_TYPE (name))) | |
3092 { | |
3093 tree builtexpr; | |
3094 tree forcedexpr; | |
3095 builtexpr = fold_convert (type, name); | |
3096 forcedexpr = force_gimple_operand (builtexpr, | |
3097 &stmts, true, | |
3098 NULL); | |
3099 | |
3100 if (forcedexpr != name) | |
3101 { | |
3102 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum; | |
3103 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id; | |
3104 } | |
3105 | |
3106 if (stmts) | |
3107 { | |
3108 gimple_stmt_iterator gsi; | |
3109 gsi = gsi_start (stmts); | |
3110 for (; !gsi_end_p (gsi); gsi_next (&gsi)) | |
3111 { | |
3112 gimple stmt = gsi_stmt (gsi); | |
3113 VEC_safe_push (gimple, heap, inserted_exprs, stmt); | |
3114 gimple_set_plf (stmt, NECESSARY, false); | |
3115 } | |
3116 gsi_insert_seq_on_edge (pred, stmts); | |
3117 } | |
3118 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr); | |
3119 } | |
3120 } | |
3121 } | |
3122 /* If we didn't want a phi node, and we made insertions, we still have | |
3123 inserted new stuff, and thus return true. If we didn't want a phi node, | |
3124 and didn't make insertions, we haven't added anything new, so return | |
3125 false. */ | |
3126 if (nophi && insertions) | |
3127 return true; | |
3128 else if (nophi && !insertions) | |
3129 return false; | |
3130 | |
3131 /* Now build a phi for the new variable. */ | |
3132 if (!prephitemp || TREE_TYPE (prephitemp) != type) | |
3133 { | |
3134 prephitemp = create_tmp_var (type, "prephitmp"); | |
3135 get_var_ann (prephitemp); | |
3136 } | |
3137 | |
3138 temp = prephitemp; | |
3139 add_referenced_var (temp); | |
3140 | |
3141 if (TREE_CODE (type) == COMPLEX_TYPE | |
3142 || TREE_CODE (type) == VECTOR_TYPE) | |
3143 DECL_GIMPLE_REG_P (temp) = 1; | |
3144 phi = create_phi_node (temp, block); | |
3145 | |
3146 gimple_set_plf (phi, NECESSARY, false); | |
3147 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi); | |
3148 VN_INFO (gimple_phi_result (phi))->value_id = val; | |
3149 VEC_safe_push (gimple, heap, inserted_exprs, phi); | |
3150 FOR_EACH_EDGE (pred, ei, block->preds) | |
3151 { | |
3152 pre_expr ae = avail[pred->src->index]; | |
3153 gcc_assert (get_expr_type (ae) == type | |
3154 || useless_type_conversion_p (type, get_expr_type (ae))); | |
3155 if (ae->kind == CONSTANT) | |
3156 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred); | |
3157 else | |
3158 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred); | |
3159 } | |
3160 | |
3161 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi)); | |
3162 add_to_value (val, newphi); | |
3163 | |
3164 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing | |
3165 this insertion, since we test for the existence of this value in PHI_GEN | |
3166 before proceeding with the partial redundancy checks in insert_aux. | |
3167 | |
3168 The value may exist in AVAIL_OUT, in particular, it could be represented | |
3169 by the expression we are trying to eliminate, in which case we want the | |
3170 replacement to occur. If it's not existing in AVAIL_OUT, we want it | |
3171 inserted there. | |
3172 | |
3173 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of | |
3174 this block, because if it did, it would have existed in our dominator's | |
3175 AVAIL_OUT, and would have been skipped due to the full redundancy check. | |
3176 */ | |
3177 | |
3178 bitmap_insert_into_set (PHI_GEN (block), newphi); | |
3179 bitmap_value_replace_in_set (AVAIL_OUT (block), | |
3180 newphi); | |
3181 bitmap_insert_into_set (NEW_SETS (block), | |
3182 newphi); | |
3183 | |
3184 if (dump_file && (dump_flags & TDF_DETAILS)) | |
3185 { | |
3186 fprintf (dump_file, "Created phi "); | |
3187 print_gimple_stmt (dump_file, phi, 0, 0); | |
3188 fprintf (dump_file, " in block %d\n", block->index); | |
3189 } | |
3190 pre_stats.phis++; | |
3191 return true; | |
3192 } | |
3193 | |
3194 | |
3195 | |
3196 /* Perform insertion of partially redundant values. | |
3197 For BLOCK, do the following: | |
3198 1. Propagate the NEW_SETS of the dominator into the current block. | |
3199 If the block has multiple predecessors, | |
3200 2a. Iterate over the ANTIC expressions for the block to see if | |
3201 any of them are partially redundant. | |
3202 2b. If so, insert them into the necessary predecessors to make | |
3203 the expression fully redundant. | |
3204 2c. Insert a new PHI merging the values of the predecessors. | |
3205 2d. Insert the new PHI, and the new expressions, into the | |
3206 NEW_SETS set. | |
3207 3. Recursively call ourselves on the dominator children of BLOCK. | |
3208 | |
3209 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by | |
3210 do_regular_insertion and do_partial_insertion. | |
3211 | |
3212 */ | |
3213 | |
3214 static bool | |
3215 do_regular_insertion (basic_block block, basic_block dom) | |
3216 { | |
3217 bool new_stuff = false; | |
3218 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block)); | |
3219 pre_expr expr; | |
3220 int i; | |
3221 | |
3222 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | |
3223 { | |
3224 if (expr->kind != NAME) | |
3225 { | |
3226 pre_expr *avail; | |
3227 unsigned int val; | |
3228 bool by_some = false; | |
3229 bool cant_insert = false; | |
3230 bool all_same = true; | |
3231 pre_expr first_s = NULL; | |
3232 edge pred; | |
3233 basic_block bprime; | |
3234 pre_expr eprime = NULL; | |
3235 edge_iterator ei; | |
3236 pre_expr edoubleprime = NULL; | |
3237 | |
3238 val = get_expr_value_id (expr); | |
3239 if (bitmap_set_contains_value (PHI_GEN (block), val)) | |
3240 continue; | |
3241 if (bitmap_set_contains_value (AVAIL_OUT (dom), val)) | |
3242 { | |
3243 if (dump_file && (dump_flags & TDF_DETAILS)) | |
3244 fprintf (dump_file, "Found fully redundant value\n"); | |
3245 continue; | |
3246 } | |
3247 | |
3248 avail = XCNEWVEC (pre_expr, last_basic_block); | |
3249 FOR_EACH_EDGE (pred, ei, block->preds) | |
3250 { | |
3251 unsigned int vprime; | |
3252 | |
3253 /* We should never run insertion for the exit block | |
3254 and so not come across fake pred edges. */ | |
3255 gcc_assert (!(pred->flags & EDGE_FAKE)); | |
3256 bprime = pred->src; | |
3257 eprime = phi_translate (expr, ANTIC_IN (block), NULL, | |
3258 bprime, block); | |
3259 | |
3260 /* eprime will generally only be NULL if the | |
3261 value of the expression, translated | |
3262 through the PHI for this predecessor, is | |
3263 undefined. If that is the case, we can't | |
3264 make the expression fully redundant, | |
3265 because its value is undefined along a | |
3266 predecessor path. We can thus break out | |
3267 early because it doesn't matter what the | |
3268 rest of the results are. */ | |
3269 if (eprime == NULL) | |
3270 { | |
3271 cant_insert = true; | |
3272 break; | |
3273 } | |
3274 | |
3275 eprime = fully_constant_expression (eprime); | |
3276 vprime = get_expr_value_id (eprime); | |
3277 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), | |
3278 vprime, NULL); | |
3279 if (edoubleprime == NULL) | |
3280 { | |
3281 avail[bprime->index] = eprime; | |
3282 all_same = false; | |
3283 } | |
3284 else | |
3285 { | |
3286 avail[bprime->index] = edoubleprime; | |
3287 by_some = true; | |
3288 if (first_s == NULL) | |
3289 first_s = edoubleprime; | |
3290 else if (!pre_expr_eq (first_s, edoubleprime)) | |
3291 all_same = false; | |
3292 } | |
3293 } | |
3294 /* If we can insert it, it's not the same value | |
3295 already existing along every predecessor, and | |
3296 it's defined by some predecessor, it is | |
3297 partially redundant. */ | |
3298 if (!cant_insert && !all_same && by_some && dbg_cnt (treepre_insert)) | |
3299 { | |
3300 if (insert_into_preds_of_block (block, get_expression_id (expr), | |
3301 avail)) | |
3302 new_stuff = true; | |
3303 } | |
3304 /* If all edges produce the same value and that value is | |
3305 an invariant, then the PHI has the same value on all | |
3306 edges. Note this. */ | |
3307 else if (!cant_insert && all_same && eprime | |
3308 && (edoubleprime->kind == CONSTANT | |
3309 || edoubleprime->kind == NAME) | |
3310 && !value_id_constant_p (val)) | |
3311 { | |
3312 unsigned int j; | |
3313 bitmap_iterator bi; | |
3314 bitmap_set_t exprset = VEC_index (bitmap_set_t, | |
3315 value_expressions, val); | |
3316 | |
3317 unsigned int new_val = get_expr_value_id (edoubleprime); | |
3318 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi) | |
3319 { | |
3320 pre_expr expr = expression_for_id (j); | |
3321 | |
3322 if (expr->kind == NAME) | |
3323 { | |
3324 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr)); | |
3325 /* Just reset the value id and valnum so it is | |
3326 the same as the constant we have discovered. */ | |
3327 if (edoubleprime->kind == CONSTANT) | |
3328 { | |
3329 info->valnum = PRE_EXPR_CONSTANT (edoubleprime); | |
3330 pre_stats.constified++; | |
3331 } | |
3332 else | |
3333 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum; | |
3334 info->value_id = new_val; | |
3335 } | |
3336 } | |
3337 } | |
3338 free (avail); | |
3339 } | |
3340 } | |
3341 | |
3342 VEC_free (pre_expr, heap, exprs); | |
3343 return new_stuff; | |
3344 } | |
3345 | |
3346 | |
3347 /* Perform insertion for partially anticipatable expressions. There | |
3348 is only one case we will perform insertion for these. This case is | |
3349 if the expression is partially anticipatable, and fully available. | |
3350 In this case, we know that putting it earlier will enable us to | |
3351 remove the later computation. */ | |
3352 | |
3353 | |
3354 static bool | |
3355 do_partial_partial_insertion (basic_block block, basic_block dom) | |
3356 { | |
3357 bool new_stuff = false; | |
3358 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block)); | |
3359 pre_expr expr; | |
3360 int i; | |
3361 | |
3362 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | |
3363 { | |
3364 if (expr->kind != NAME) | |
3365 { | |
3366 pre_expr *avail; | |
3367 unsigned int val; | |
3368 bool by_all = true; | |
3369 bool cant_insert = false; | |
3370 edge pred; | |
3371 basic_block bprime; | |
3372 pre_expr eprime = NULL; | |
3373 edge_iterator ei; | |
3374 | |
3375 val = get_expr_value_id (expr); | |
3376 if (bitmap_set_contains_value (PHI_GEN (block), val)) | |
3377 continue; | |
3378 if (bitmap_set_contains_value (AVAIL_OUT (dom), val)) | |
3379 continue; | |
3380 | |
3381 avail = XCNEWVEC (pre_expr, last_basic_block); | |
3382 FOR_EACH_EDGE (pred, ei, block->preds) | |
3383 { | |
3384 unsigned int vprime; | |
3385 pre_expr edoubleprime; | |
3386 | |
3387 /* We should never run insertion for the exit block | |
3388 and so not come across fake pred edges. */ | |
3389 gcc_assert (!(pred->flags & EDGE_FAKE)); | |
3390 bprime = pred->src; | |
3391 eprime = phi_translate (expr, ANTIC_IN (block), | |
3392 PA_IN (block), | |
3393 bprime, block); | |
3394 | |
3395 /* eprime will generally only be NULL if the | |
3396 value of the expression, translated | |
3397 through the PHI for this predecessor, is | |
3398 undefined. If that is the case, we can't | |
3399 make the expression fully redundant, | |
3400 because its value is undefined along a | |
3401 predecessor path. We can thus break out | |
3402 early because it doesn't matter what the | |
3403 rest of the results are. */ | |
3404 if (eprime == NULL) | |
3405 { | |
3406 cant_insert = true; | |
3407 break; | |
3408 } | |
3409 | |
3410 eprime = fully_constant_expression (eprime); | |
3411 vprime = get_expr_value_id (eprime); | |
3412 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), | |
3413 vprime, NULL); | |
3414 if (edoubleprime == NULL) | |
3415 { | |
3416 by_all = false; | |
3417 break; | |
3418 } | |
3419 else | |
3420 avail[bprime->index] = edoubleprime; | |
3421 | |
3422 } | |
3423 | |
3424 /* If we can insert it, it's not the same value | |
3425 already existing along every predecessor, and | |
3426 it's defined by some predecessor, it is | |
3427 partially redundant. */ | |
3428 if (!cant_insert && by_all && dbg_cnt (treepre_insert)) | |
3429 { | |
3430 pre_stats.pa_insert++; | |
3431 if (insert_into_preds_of_block (block, get_expression_id (expr), | |
3432 avail)) | |
3433 new_stuff = true; | |
3434 } | |
3435 free (avail); | |
3436 } | |
3437 } | |
3438 | |
3439 VEC_free (pre_expr, heap, exprs); | |
3440 return new_stuff; | |
3441 } | |
3442 | |
3443 static bool | |
3444 insert_aux (basic_block block) | |
3445 { | |
3446 basic_block son; | |
3447 bool new_stuff = false; | |
3448 | |
3449 if (block) | |
3450 { | |
3451 basic_block dom; | |
3452 dom = get_immediate_dominator (CDI_DOMINATORS, block); | |
3453 if (dom) | |
3454 { | |
3455 unsigned i; | |
3456 bitmap_iterator bi; | |
3457 bitmap_set_t newset = NEW_SETS (dom); | |
3458 if (newset) | |
3459 { | |
3460 /* Note that we need to value_replace both NEW_SETS, and | |
3461 AVAIL_OUT. For both the case of NEW_SETS, the value may be | |
3462 represented by some non-simple expression here that we want | |
3463 to replace it with. */ | |
3464 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi) | |
3465 { | |
3466 pre_expr expr = expression_for_id (i); | |
3467 bitmap_value_replace_in_set (NEW_SETS (block), expr); | |
3468 bitmap_value_replace_in_set (AVAIL_OUT (block), expr); | |
3469 } | |
3470 } | |
3471 if (!single_pred_p (block)) | |
3472 { | |
3473 new_stuff |= do_regular_insertion (block, dom); | |
3474 if (do_partial_partial) | |
3475 new_stuff |= do_partial_partial_insertion (block, dom); | |
3476 } | |
3477 } | |
3478 } | |
3479 for (son = first_dom_son (CDI_DOMINATORS, block); | |
3480 son; | |
3481 son = next_dom_son (CDI_DOMINATORS, son)) | |
3482 { | |
3483 new_stuff |= insert_aux (son); | |
3484 } | |
3485 | |
3486 return new_stuff; | |
3487 } | |
3488 | |
3489 /* Perform insertion of partially redundant values. */ | |
3490 | |
3491 static void | |
3492 insert (void) | |
3493 { | |
3494 bool new_stuff = true; | |
3495 basic_block bb; | |
3496 int num_iterations = 0; | |
3497 | |
3498 FOR_ALL_BB (bb) | |
3499 NEW_SETS (bb) = bitmap_set_new (); | |
3500 | |
3501 while (new_stuff) | |
3502 { | |
3503 num_iterations++; | |
3504 new_stuff = insert_aux (ENTRY_BLOCK_PTR); | |
3505 } | |
3506 statistics_histogram_event (cfun, "insert iterations", num_iterations); | |
3507 } | |
3508 | |
3509 | |
3510 /* Add OP to EXP_GEN (block), and possibly to the maximal set if it is | |
3511 not defined by a phi node. | |
3512 PHI nodes can't go in the maximal sets because they are not in | |
3513 TMP_GEN, so it is possible to get into non-monotonic situations | |
3514 during ANTIC calculation, because it will *add* bits. */ | |
3515 | |
3516 static void | |
3517 add_to_exp_gen (basic_block block, tree op) | |
3518 { | |
3519 if (!in_fre) | |
3520 { | |
3521 pre_expr result; | |
3522 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op)) | |
3523 return; | |
3524 result = get_or_alloc_expr_for_name (op); | |
3525 bitmap_value_insert_into_set (EXP_GEN (block), result); | |
3526 if (TREE_CODE (op) != SSA_NAME | |
3527 || gimple_code (SSA_NAME_DEF_STMT (op)) != GIMPLE_PHI) | |
3528 bitmap_value_insert_into_set (maximal_set, result); | |
3529 } | |
3530 } | |
3531 | |
3532 /* Create value ids for PHI in BLOCK. */ | |
3533 | |
3534 static void | |
3535 make_values_for_phi (gimple phi, basic_block block) | |
3536 { | |
3537 tree result = gimple_phi_result (phi); | |
3538 | |
3539 /* We have no need for virtual phis, as they don't represent | |
3540 actual computations. */ | |
3541 if (is_gimple_reg (result)) | |
3542 { | |
3543 pre_expr e = get_or_alloc_expr_for_name (result); | |
3544 add_to_value (get_expr_value_id (e), e); | |
3545 bitmap_insert_into_set (PHI_GEN (block), e); | |
3546 bitmap_value_insert_into_set (AVAIL_OUT (block), e); | |
3547 } | |
3548 } | |
3549 | |
3550 /* Compute the AVAIL set for all basic blocks. | |
3551 | |
3552 This function performs value numbering of the statements in each basic | |
3553 block. The AVAIL sets are built from information we glean while doing | |
3554 this value numbering, since the AVAIL sets contain only one entry per | |
3555 value. | |
3556 | |
3557 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)]. | |
3558 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */ | |
3559 | |
3560 static void | |
3561 compute_avail (void) | |
3562 { | |
3563 | |
3564 basic_block block, son; | |
3565 basic_block *worklist; | |
3566 size_t sp = 0; | |
3567 tree param; | |
3568 | |
3569 /* For arguments with default definitions, we pretend they are | |
3570 defined in the entry block. */ | |
3571 for (param = DECL_ARGUMENTS (current_function_decl); | |
3572 param; | |
3573 param = TREE_CHAIN (param)) | |
3574 { | |
3575 if (gimple_default_def (cfun, param) != NULL) | |
3576 { | |
3577 tree def = gimple_default_def (cfun, param); | |
3578 pre_expr e = get_or_alloc_expr_for_name (def); | |
3579 | |
3580 add_to_value (get_expr_value_id (e), e); | |
3581 if (!in_fre) | |
3582 { | |
3583 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e); | |
3584 bitmap_value_insert_into_set (maximal_set, e); | |
3585 } | |
3586 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e); | |
3587 } | |
3588 } | |
3589 | |
3590 /* Likewise for the static chain decl. */ | |
3591 if (cfun->static_chain_decl) | |
3592 { | |
3593 param = cfun->static_chain_decl; | |
3594 if (gimple_default_def (cfun, param) != NULL) | |
3595 { | |
3596 tree def = gimple_default_def (cfun, param); | |
3597 pre_expr e = get_or_alloc_expr_for_name (def); | |
3598 | |
3599 add_to_value (get_expr_value_id (e), e); | |
3600 if (!in_fre) | |
3601 { | |
3602 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e); | |
3603 bitmap_value_insert_into_set (maximal_set, e); | |
3604 } | |
3605 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e); | |
3606 } | |
3607 } | |
3608 | |
3609 /* Allocate the worklist. */ | |
3610 worklist = XNEWVEC (basic_block, n_basic_blocks); | |
3611 | |
3612 /* Seed the algorithm by putting the dominator children of the entry | |
3613 block on the worklist. */ | |
3614 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR); | |
3615 son; | |
3616 son = next_dom_son (CDI_DOMINATORS, son)) | |
3617 worklist[sp++] = son; | |
3618 | |
3619 /* Loop until the worklist is empty. */ | |
3620 while (sp) | |
3621 { | |
3622 gimple_stmt_iterator gsi; | |
3623 gimple stmt; | |
3624 basic_block dom; | |
3625 unsigned int stmt_uid = 1; | |
3626 | |
3627 /* Pick a block from the worklist. */ | |
3628 block = worklist[--sp]; | |
3629 | |
3630 /* Initially, the set of available values in BLOCK is that of | |
3631 its immediate dominator. */ | |
3632 dom = get_immediate_dominator (CDI_DOMINATORS, block); | |
3633 if (dom) | |
3634 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom)); | |
3635 | |
3636 /* Generate values for PHI nodes. */ | |
3637 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3638 make_values_for_phi (gsi_stmt (gsi), block); | |
3639 | |
3640 /* Now compute value numbers and populate value sets with all | |
3641 the expressions computed in BLOCK. */ | |
3642 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3643 { | |
3644 ssa_op_iter iter; | |
3645 tree op; | |
3646 | |
3647 stmt = gsi_stmt (gsi); | |
3648 gimple_set_uid (stmt, stmt_uid++); | |
3649 | |
3650 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF) | |
3651 { | |
3652 pre_expr e = get_or_alloc_expr_for_name (op); | |
3653 | |
3654 add_to_value (get_expr_value_id (e), e); | |
3655 if (!in_fre) | |
3656 bitmap_insert_into_set (TMP_GEN (block), e); | |
3657 bitmap_value_insert_into_set (AVAIL_OUT (block), e); | |
3658 } | |
3659 | |
3660 if (gimple_has_volatile_ops (stmt) | |
3661 || stmt_could_throw_p (stmt)) | |
3662 continue; | |
3663 | |
3664 switch (gimple_code (stmt)) | |
3665 { | |
3666 case GIMPLE_RETURN: | |
3667 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) | |
3668 add_to_exp_gen (block, op); | |
3669 continue; | |
3670 | |
3671 case GIMPLE_CALL: | |
3672 { | |
3673 vn_reference_t ref; | |
3674 unsigned int i; | |
3675 vn_reference_op_t vro; | |
3676 pre_expr result = NULL; | |
3677 VEC(vn_reference_op_s, heap) *ops = NULL; | |
3678 | |
3679 if (!can_value_number_call (stmt)) | |
3680 continue; | |
3681 | |
3682 copy_reference_ops_from_call (stmt, &ops); | |
3683 vn_reference_lookup_pieces (shared_vuses_from_stmt (stmt), | |
3684 ops, &ref, false); | |
3685 VEC_free (vn_reference_op_s, heap, ops); | |
3686 if (!ref) | |
3687 continue; | |
3688 | |
3689 for (i = 0; VEC_iterate (vn_reference_op_s, | |
3690 ref->operands, i, | |
3691 vro); i++) | |
3692 { | |
3693 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME) | |
3694 add_to_exp_gen (block, vro->op0); | |
3695 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) | |
3696 add_to_exp_gen (block, vro->op1); | |
3697 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) | |
3698 add_to_exp_gen (block, vro->op2); | |
3699 } | |
3700 result = (pre_expr) pool_alloc (pre_expr_pool); | |
3701 result->kind = REFERENCE; | |
3702 result->id = 0; | |
3703 PRE_EXPR_REFERENCE (result) = ref; | |
3704 | |
3705 get_or_alloc_expression_id (result); | |
3706 add_to_value (get_expr_value_id (result), result); | |
3707 if (!in_fre) | |
3708 { | |
3709 bitmap_value_insert_into_set (EXP_GEN (block), | |
3710 result); | |
3711 bitmap_value_insert_into_set (maximal_set, result); | |
3712 } | |
3713 continue; | |
3714 } | |
3715 | |
3716 case GIMPLE_ASSIGN: | |
3717 { | |
3718 pre_expr result = NULL; | |
3719 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))) | |
3720 { | |
3721 case tcc_unary: | |
3722 if (is_exception_related (stmt)) | |
3723 continue; | |
3724 case tcc_binary: | |
3725 case tcc_comparison: | |
3726 { | |
3727 vn_nary_op_t nary; | |
3728 unsigned int i; | |
3729 | |
3730 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1, | |
3731 gimple_assign_rhs_code (stmt), | |
3732 gimple_expr_type (stmt), | |
3733 gimple_assign_rhs1 (stmt), | |
3734 gimple_assign_rhs2 (stmt), | |
3735 NULL_TREE, NULL_TREE, &nary); | |
3736 | |
3737 if (!nary) | |
3738 continue; | |
3739 | |
3740 for (i = 0; i < nary->length; i++) | |
3741 if (TREE_CODE (nary->op[i]) == SSA_NAME) | |
3742 add_to_exp_gen (block, nary->op[i]); | |
3743 | |
3744 result = (pre_expr) pool_alloc (pre_expr_pool); | |
3745 result->kind = NARY; | |
3746 result->id = 0; | |
3747 PRE_EXPR_NARY (result) = nary; | |
3748 break; | |
3749 } | |
3750 | |
3751 case tcc_declaration: | |
3752 case tcc_reference: | |
3753 { | |
3754 vn_reference_t ref; | |
3755 unsigned int i; | |
3756 vn_reference_op_t vro; | |
3757 | |
3758 vn_reference_lookup (gimple_assign_rhs1 (stmt), | |
3759 shared_vuses_from_stmt (stmt), | |
3760 false, &ref); | |
3761 if (!ref) | |
3762 continue; | |
3763 | |
3764 for (i = 0; VEC_iterate (vn_reference_op_s, | |
3765 ref->operands, i, | |
3766 vro); i++) | |
3767 { | |
3768 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME) | |
3769 add_to_exp_gen (block, vro->op0); | |
3770 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) | |
3771 add_to_exp_gen (block, vro->op1); | |
3772 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) | |
3773 add_to_exp_gen (block, vro->op2); | |
3774 } | |
3775 result = (pre_expr) pool_alloc (pre_expr_pool); | |
3776 result->kind = REFERENCE; | |
3777 result->id = 0; | |
3778 PRE_EXPR_REFERENCE (result) = ref; | |
3779 break; | |
3780 } | |
3781 | |
3782 default: | |
3783 /* For any other statement that we don't | |
3784 recognize, simply add all referenced | |
3785 SSA_NAMEs to EXP_GEN. */ | |
3786 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) | |
3787 add_to_exp_gen (block, op); | |
3788 continue; | |
3789 } | |
3790 | |
3791 get_or_alloc_expression_id (result); | |
3792 add_to_value (get_expr_value_id (result), result); | |
3793 if (!in_fre) | |
3794 { | |
3795 bitmap_value_insert_into_set (EXP_GEN (block), result); | |
3796 bitmap_value_insert_into_set (maximal_set, result); | |
3797 } | |
3798 | |
3799 continue; | |
3800 } | |
3801 default: | |
3802 break; | |
3803 } | |
3804 } | |
3805 | |
3806 /* Put the dominator children of BLOCK on the worklist of blocks | |
3807 to compute available sets for. */ | |
3808 for (son = first_dom_son (CDI_DOMINATORS, block); | |
3809 son; | |
3810 son = next_dom_son (CDI_DOMINATORS, son)) | |
3811 worklist[sp++] = son; | |
3812 } | |
3813 | |
3814 free (worklist); | |
3815 } | |
3816 | |
3817 /* Insert the expression for SSA_VN that SCCVN thought would be simpler | |
3818 than the available expressions for it. The insertion point is | |
3819 right before the first use in STMT. Returns the SSA_NAME that should | |
3820 be used for replacement. */ | |
3821 | |
3822 static tree | |
3823 do_SCCVN_insertion (gimple stmt, tree ssa_vn) | |
3824 { | |
3825 basic_block bb = gimple_bb (stmt); | |
3826 gimple_stmt_iterator gsi; | |
3827 gimple_seq stmts = NULL; | |
3828 tree expr; | |
3829 pre_expr e; | |
3830 | |
3831 /* First create a value expression from the expression we want | |
3832 to insert and associate it with the value handle for SSA_VN. */ | |
3833 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn)); | |
3834 if (e == NULL) | |
3835 return NULL_TREE; | |
3836 | |
3837 /* Then use create_expression_by_pieces to generate a valid | |
3838 expression to insert at this point of the IL stream. */ | |
3839 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL); | |
3840 if (expr == NULL_TREE) | |
3841 return NULL_TREE; | |
3842 gsi = gsi_for_stmt (stmt); | |
3843 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT); | |
3844 | |
3845 return expr; | |
3846 } | |
3847 | |
3848 /* Eliminate fully redundant computations. */ | |
3849 | |
3850 static unsigned int | |
3851 eliminate (void) | |
3852 { | |
3853 basic_block b; | |
3854 unsigned int todo = 0; | |
3855 | |
3856 FOR_EACH_BB (b) | |
3857 { | |
3858 gimple_stmt_iterator i; | |
3859 | |
3860 for (i = gsi_start_bb (b); !gsi_end_p (i); gsi_next (&i)) | |
3861 { | |
3862 gimple stmt = gsi_stmt (i); | |
3863 | |
3864 /* Lookup the RHS of the expression, see if we have an | |
3865 available computation for it. If so, replace the RHS with | |
3866 the available computation. */ | |
3867 if (gimple_has_lhs (stmt) | |
3868 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME | |
3869 && !gimple_assign_ssa_name_copy_p (stmt) | |
3870 && (!gimple_assign_single_p (stmt) | |
3871 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt))) | |
3872 && !gimple_has_volatile_ops (stmt) | |
3873 && !has_zero_uses (gimple_get_lhs (stmt))) | |
3874 { | |
3875 tree lhs = gimple_get_lhs (stmt); | |
3876 tree rhs = NULL_TREE; | |
3877 tree sprime = NULL; | |
3878 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs); | |
3879 pre_expr sprimeexpr; | |
3880 | |
3881 if (gimple_assign_single_p (stmt)) | |
3882 rhs = gimple_assign_rhs1 (stmt); | |
3883 | |
3884 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b), | |
3885 get_expr_value_id (lhsexpr), | |
3886 NULL); | |
3887 | |
3888 if (sprimeexpr) | |
3889 { | |
3890 if (sprimeexpr->kind == CONSTANT) | |
3891 sprime = PRE_EXPR_CONSTANT (sprimeexpr); | |
3892 else if (sprimeexpr->kind == NAME) | |
3893 sprime = PRE_EXPR_NAME (sprimeexpr); | |
3894 else | |
3895 gcc_unreachable (); | |
3896 } | |
3897 | |
3898 /* If there is no existing leader but SCCVN knows this | |
3899 value is constant, use that constant. */ | |
3900 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum)) | |
3901 { | |
3902 sprime = fold_convert (TREE_TYPE (lhs), | |
3903 VN_INFO (lhs)->valnum); | |
3904 | |
3905 if (dump_file && (dump_flags & TDF_DETAILS)) | |
3906 { | |
3907 fprintf (dump_file, "Replaced "); | |
3908 print_gimple_expr (dump_file, stmt, 0, 0); | |
3909 fprintf (dump_file, " with "); | |
3910 print_generic_expr (dump_file, sprime, 0); | |
3911 fprintf (dump_file, " in "); | |
3912 print_gimple_stmt (dump_file, stmt, 0, 0); | |
3913 } | |
3914 pre_stats.eliminations++; | |
3915 propagate_tree_value_into_stmt (&i, sprime); | |
3916 stmt = gsi_stmt (i); | |
3917 update_stmt (stmt); | |
3918 continue; | |
3919 } | |
3920 | |
3921 /* If there is no existing usable leader but SCCVN thinks | |
3922 it has an expression it wants to use as replacement, | |
3923 insert that. */ | |
3924 if (!sprime || sprime == lhs) | |
3925 { | |
3926 tree val = VN_INFO (lhs)->valnum; | |
3927 if (val != VN_TOP | |
3928 && TREE_CODE (val) == SSA_NAME | |
3929 && VN_INFO (val)->needs_insertion | |
3930 && can_PRE_operation (vn_get_expr_for (val))) | |
3931 sprime = do_SCCVN_insertion (stmt, val); | |
3932 } | |
3933 if (sprime | |
3934 && sprime != lhs | |
3935 && (rhs == NULL_TREE | |
3936 || TREE_CODE (rhs) != SSA_NAME | |
3937 || may_propagate_copy (rhs, sprime))) | |
3938 { | |
3939 gcc_assert (sprime != rhs); | |
3940 | |
3941 if (dump_file && (dump_flags & TDF_DETAILS)) | |
3942 { | |
3943 fprintf (dump_file, "Replaced "); | |
3944 print_gimple_expr (dump_file, stmt, 0, 0); | |
3945 fprintf (dump_file, " with "); | |
3946 print_generic_expr (dump_file, sprime, 0); | |
3947 fprintf (dump_file, " in "); | |
3948 print_gimple_stmt (dump_file, stmt, 0, 0); | |
3949 } | |
3950 | |
3951 if (TREE_CODE (sprime) == SSA_NAME) | |
3952 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), | |
3953 NECESSARY, true); | |
3954 /* We need to make sure the new and old types actually match, | |
3955 which may require adding a simple cast, which fold_convert | |
3956 will do for us. */ | |
3957 if ((!rhs || TREE_CODE (rhs) != SSA_NAME) | |
3958 && !useless_type_conversion_p (gimple_expr_type (stmt), | |
3959 TREE_TYPE (sprime))) | |
3960 sprime = fold_convert (gimple_expr_type (stmt), sprime); | |
3961 | |
3962 pre_stats.eliminations++; | |
3963 propagate_tree_value_into_stmt (&i, sprime); | |
3964 stmt = gsi_stmt (i); | |
3965 update_stmt (stmt); | |
3966 | |
3967 /* If we removed EH side effects from the statement, clean | |
3968 its EH information. */ | |
3969 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) | |
3970 { | |
3971 bitmap_set_bit (need_eh_cleanup, | |
3972 gimple_bb (stmt)->index); | |
3973 if (dump_file && (dump_flags & TDF_DETAILS)) | |
3974 fprintf (dump_file, " Removed EH side effects.\n"); | |
3975 } | |
3976 } | |
3977 } | |
3978 /* Visit COND_EXPRs and fold the comparison with the | |
3979 available value-numbers. */ | |
3980 else if (gimple_code (stmt) == GIMPLE_COND) | |
3981 { | |
3982 tree op0 = gimple_cond_lhs (stmt); | |
3983 tree op1 = gimple_cond_rhs (stmt); | |
3984 tree result; | |
3985 | |
3986 if (TREE_CODE (op0) == SSA_NAME) | |
3987 op0 = VN_INFO (op0)->valnum; | |
3988 if (TREE_CODE (op1) == SSA_NAME) | |
3989 op1 = VN_INFO (op1)->valnum; | |
3990 result = fold_binary (gimple_cond_code (stmt), boolean_type_node, | |
3991 op0, op1); | |
3992 if (result && TREE_CODE (result) == INTEGER_CST) | |
3993 { | |
3994 if (integer_zerop (result)) | |
3995 gimple_cond_make_false (stmt); | |
3996 else | |
3997 gimple_cond_make_true (stmt); | |
3998 update_stmt (stmt); | |
3999 todo = TODO_cleanup_cfg; | |
4000 } | |
4001 } | |
4002 } | |
4003 } | |
4004 | |
4005 return todo; | |
4006 } | |
4007 | |
4008 /* Borrow a bit of tree-ssa-dce.c for the moment. | |
4009 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though | |
4010 this may be a bit faster, and we may want critical edges kept split. */ | |
4011 | |
4012 /* If OP's defining statement has not already been determined to be necessary, | |
4013 mark that statement necessary. Return the stmt, if it is newly | |
4014 necessary. */ | |
4015 | |
4016 static inline gimple | |
4017 mark_operand_necessary (tree op) | |
4018 { | |
4019 gimple stmt; | |
4020 | |
4021 gcc_assert (op); | |
4022 | |
4023 if (TREE_CODE (op) != SSA_NAME) | |
4024 return NULL; | |
4025 | |
4026 stmt = SSA_NAME_DEF_STMT (op); | |
4027 gcc_assert (stmt); | |
4028 | |
4029 if (gimple_plf (stmt, NECESSARY) | |
4030 || gimple_nop_p (stmt)) | |
4031 return NULL; | |
4032 | |
4033 gimple_set_plf (stmt, NECESSARY, true); | |
4034 return stmt; | |
4035 } | |
4036 | |
4037 /* Because we don't follow exactly the standard PRE algorithm, and decide not | |
4038 to insert PHI nodes sometimes, and because value numbering of casts isn't | |
4039 perfect, we sometimes end up inserting dead code. This simple DCE-like | |
4040 pass removes any insertions we made that weren't actually used. */ | |
4041 | |
4042 static void | |
4043 remove_dead_inserted_code (void) | |
4044 { | |
4045 VEC(gimple,heap) *worklist = NULL; | |
4046 int i; | |
4047 gimple t; | |
4048 | |
4049 worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs)); | |
4050 for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++) | |
4051 { | |
4052 if (gimple_plf (t, NECESSARY)) | |
4053 VEC_quick_push (gimple, worklist, t); | |
4054 } | |
4055 while (VEC_length (gimple, worklist) > 0) | |
4056 { | |
4057 t = VEC_pop (gimple, worklist); | |
4058 | |
4059 /* PHI nodes are somewhat special in that each PHI alternative has | |
4060 data and control dependencies. All the statements feeding the | |
4061 PHI node's arguments are always necessary. */ | |
4062 if (gimple_code (t) == GIMPLE_PHI) | |
4063 { | |
4064 unsigned k; | |
4065 | |
4066 VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t)); | |
4067 for (k = 0; k < gimple_phi_num_args (t); k++) | |
4068 { | |
4069 tree arg = PHI_ARG_DEF (t, k); | |
4070 if (TREE_CODE (arg) == SSA_NAME) | |
4071 { | |
4072 gimple n = mark_operand_necessary (arg); | |
4073 if (n) | |
4074 VEC_quick_push (gimple, worklist, n); | |
4075 } | |
4076 } | |
4077 } | |
4078 else | |
4079 { | |
4080 /* Propagate through the operands. Examine all the USE, VUSE and | |
4081 VDEF operands in this statement. Mark all the statements | |
4082 which feed this statement's uses as necessary. */ | |
4083 ssa_op_iter iter; | |
4084 tree use; | |
4085 | |
4086 /* The operands of VDEF expressions are also needed as they | |
4087 represent potential definitions that may reach this | |
4088 statement (VDEF operands allow us to follow def-def | |
4089 links). */ | |
4090 | |
4091 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES) | |
4092 { | |
4093 gimple n = mark_operand_necessary (use); | |
4094 if (n) | |
4095 VEC_safe_push (gimple, heap, worklist, n); | |
4096 } | |
4097 } | |
4098 } | |
4099 | |
4100 for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++) | |
4101 { | |
4102 if (!gimple_plf (t, NECESSARY)) | |
4103 { | |
4104 gimple_stmt_iterator gsi; | |
4105 | |
4106 if (dump_file && (dump_flags & TDF_DETAILS)) | |
4107 { | |
4108 fprintf (dump_file, "Removing unnecessary insertion:"); | |
4109 print_gimple_stmt (dump_file, t, 0, 0); | |
4110 } | |
4111 | |
4112 gsi = gsi_for_stmt (t); | |
4113 if (gimple_code (t) == GIMPLE_PHI) | |
4114 remove_phi_node (&gsi, true); | |
4115 else | |
4116 gsi_remove (&gsi, true); | |
4117 release_defs (t); | |
4118 } | |
4119 } | |
4120 VEC_free (gimple, heap, worklist); | |
4121 } | |
4122 | |
4123 /* Initialize data structures used by PRE. */ | |
4124 | |
4125 static void | |
4126 init_pre (bool do_fre) | |
4127 { | |
4128 basic_block bb; | |
4129 | |
4130 next_expression_id = 1; | |
4131 expressions = NULL; | |
4132 VEC_safe_push (pre_expr, heap, expressions, NULL); | |
4133 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1); | |
4134 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions, | |
4135 get_max_value_id() + 1); | |
4136 | |
4137 in_fre = do_fre; | |
4138 | |
4139 inserted_exprs = NULL; | |
4140 need_creation = NULL; | |
4141 pretemp = NULL_TREE; | |
4142 storetemp = NULL_TREE; | |
4143 prephitemp = NULL_TREE; | |
4144 | |
4145 connect_infinite_loops_to_exit (); | |
4146 memset (&pre_stats, 0, sizeof (pre_stats)); | |
4147 | |
4148 | |
4149 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS); | |
4150 post_order_compute (postorder, false, false); | |
4151 | |
4152 FOR_ALL_BB (bb) | |
4153 bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1); | |
4154 | |
4155 calculate_dominance_info (CDI_POST_DOMINATORS); | |
4156 calculate_dominance_info (CDI_DOMINATORS); | |
4157 | |
4158 bitmap_obstack_initialize (&grand_bitmap_obstack); | |
4159 phi_translate_table = htab_create (5110, expr_pred_trans_hash, | |
4160 expr_pred_trans_eq, free); | |
4161 expression_to_id = htab_create (num_ssa_names * 3, | |
4162 pre_expr_hash, | |
4163 pre_expr_eq, NULL); | |
4164 seen_during_translate = BITMAP_ALLOC (&grand_bitmap_obstack); | |
4165 bitmap_set_pool = create_alloc_pool ("Bitmap sets", | |
4166 sizeof (struct bitmap_set), 30); | |
4167 pre_expr_pool = create_alloc_pool ("pre_expr nodes", | |
4168 sizeof (struct pre_expr_d), 30); | |
4169 FOR_ALL_BB (bb) | |
4170 { | |
4171 EXP_GEN (bb) = bitmap_set_new (); | |
4172 PHI_GEN (bb) = bitmap_set_new (); | |
4173 TMP_GEN (bb) = bitmap_set_new (); | |
4174 AVAIL_OUT (bb) = bitmap_set_new (); | |
4175 } | |
4176 maximal_set = in_fre ? NULL : bitmap_set_new (); | |
4177 | |
4178 need_eh_cleanup = BITMAP_ALLOC (NULL); | |
4179 } | |
4180 | |
4181 | |
4182 /* Deallocate data structures used by PRE. */ | |
4183 | |
4184 static void | |
4185 fini_pre (bool do_fre) | |
4186 { | |
4187 basic_block bb; | |
4188 | |
4189 free (postorder); | |
4190 VEC_free (bitmap_set_t, heap, value_expressions); | |
4191 VEC_free (gimple, heap, inserted_exprs); | |
4192 VEC_free (gimple, heap, need_creation); | |
4193 bitmap_obstack_release (&grand_bitmap_obstack); | |
4194 free_alloc_pool (bitmap_set_pool); | |
4195 free_alloc_pool (pre_expr_pool); | |
4196 htab_delete (phi_translate_table); | |
4197 htab_delete (expression_to_id); | |
4198 | |
4199 FOR_ALL_BB (bb) | |
4200 { | |
4201 free (bb->aux); | |
4202 bb->aux = NULL; | |
4203 } | |
4204 | |
4205 free_dominance_info (CDI_POST_DOMINATORS); | |
4206 | |
4207 if (!bitmap_empty_p (need_eh_cleanup)) | |
4208 { | |
4209 gimple_purge_all_dead_eh_edges (need_eh_cleanup); | |
4210 cleanup_tree_cfg (); | |
4211 } | |
4212 | |
4213 BITMAP_FREE (need_eh_cleanup); | |
4214 | |
4215 if (!do_fre) | |
4216 loop_optimizer_finalize (); | |
4217 } | |
4218 | |
4219 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller | |
4220 only wants to do full redundancy elimination. */ | |
4221 | |
4222 static unsigned int | |
4223 execute_pre (bool do_fre ATTRIBUTE_UNUSED) | |
4224 { | |
4225 unsigned int todo = 0; | |
4226 | |
4227 do_partial_partial = optimize > 2; | |
4228 | |
4229 /* This has to happen before SCCVN runs because | |
4230 loop_optimizer_init may create new phis, etc. */ | |
4231 if (!do_fre) | |
4232 loop_optimizer_init (LOOPS_NORMAL); | |
4233 | |
4234 if (!run_scc_vn (do_fre)) | |
4235 { | |
4236 if (!do_fre) | |
4237 { | |
4238 remove_dead_inserted_code (); | |
4239 loop_optimizer_finalize (); | |
4240 } | |
4241 | |
4242 return 0; | |
4243 } | |
4244 init_pre (do_fre); | |
4245 | |
4246 | |
4247 /* Collect and value number expressions computed in each basic block. */ | |
4248 compute_avail (); | |
4249 | |
4250 if (dump_file && (dump_flags & TDF_DETAILS)) | |
4251 { | |
4252 basic_block bb; | |
4253 | |
4254 FOR_ALL_BB (bb) | |
4255 { | |
4256 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index); | |
4257 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", | |
4258 bb->index); | |
4259 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", | |
4260 bb->index); | |
4261 } | |
4262 } | |
4263 | |
4264 /* Insert can get quite slow on an incredibly large number of basic | |
4265 blocks due to some quadratic behavior. Until this behavior is | |
4266 fixed, don't run it when he have an incredibly large number of | |
4267 bb's. If we aren't going to run insert, there is no point in | |
4268 computing ANTIC, either, even though it's plenty fast. */ | |
4269 if (!do_fre && n_basic_blocks < 4000) | |
4270 { | |
4271 compute_antic (); | |
4272 insert (); | |
4273 } | |
4274 | |
4275 /* Remove all the redundant expressions. */ | |
4276 todo |= eliminate (); | |
4277 | |
4278 statistics_counter_event (cfun, "Insertions", pre_stats.insertions); | |
4279 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert); | |
4280 statistics_counter_event (cfun, "New PHIs", pre_stats.phis); | |
4281 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations); | |
4282 statistics_counter_event (cfun, "Constified", pre_stats.constified); | |
4283 | |
4284 /* Make sure to remove fake edges before committing our inserts. | |
4285 This makes sure we don't end up with extra critical edges that | |
4286 we would need to split. */ | |
4287 remove_fake_exit_edges (); | |
4288 gsi_commit_edge_inserts (); | |
4289 | |
4290 clear_expression_ids (); | |
4291 free_scc_vn (); | |
4292 if (!do_fre) | |
4293 remove_dead_inserted_code (); | |
4294 | |
4295 fini_pre (do_fre); | |
4296 | |
4297 return todo; | |
4298 } | |
4299 | |
4300 /* Gate and execute functions for PRE. */ | |
4301 | |
4302 static unsigned int | |
4303 do_pre (void) | |
4304 { | |
4305 return TODO_rebuild_alias | execute_pre (false); | |
4306 } | |
4307 | |
4308 static bool | |
4309 gate_pre (void) | |
4310 { | |
4311 /* PRE tends to generate bigger code. */ | |
4312 return flag_tree_pre != 0 && optimize_function_for_speed_p (cfun); | |
4313 } | |
4314 | |
4315 struct gimple_opt_pass pass_pre = | |
4316 { | |
4317 { | |
4318 GIMPLE_PASS, | |
4319 "pre", /* name */ | |
4320 gate_pre, /* gate */ | |
4321 do_pre, /* execute */ | |
4322 NULL, /* sub */ | |
4323 NULL, /* next */ | |
4324 0, /* static_pass_number */ | |
4325 TV_TREE_PRE, /* tv_id */ | |
4326 PROP_no_crit_edges | PROP_cfg | |
4327 | PROP_ssa | PROP_alias, /* properties_required */ | |
4328 0, /* properties_provided */ | |
4329 0, /* properties_destroyed */ | |
4330 0, /* todo_flags_start */ | |
4331 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect | |
4332 | TODO_verify_ssa /* todo_flags_finish */ | |
4333 } | |
4334 }; | |
4335 | |
4336 | |
4337 /* Gate and execute functions for FRE. */ | |
4338 | |
4339 static unsigned int | |
4340 execute_fre (void) | |
4341 { | |
4342 return execute_pre (true); | |
4343 } | |
4344 | |
4345 static bool | |
4346 gate_fre (void) | |
4347 { | |
4348 return flag_tree_fre != 0; | |
4349 } | |
4350 | |
4351 struct gimple_opt_pass pass_fre = | |
4352 { | |
4353 { | |
4354 GIMPLE_PASS, | |
4355 "fre", /* name */ | |
4356 gate_fre, /* gate */ | |
4357 execute_fre, /* execute */ | |
4358 NULL, /* sub */ | |
4359 NULL, /* next */ | |
4360 0, /* static_pass_number */ | |
4361 TV_TREE_FRE, /* tv_id */ | |
4362 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */ | |
4363 0, /* properties_provided */ | |
4364 0, /* properties_destroyed */ | |
4365 0, /* todo_flags_start */ | |
4366 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */ | |
4367 } | |
4368 }; |