Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-outof-ssa.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 77e2b8dfacca |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Convert a program in SSA form into Normal form. | |
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. | |
3 Contributed by Andrew Macleod <amacleod@redhat.com> | |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify | |
8 it under the terms of the GNU General Public License as published by | |
9 the Free Software Foundation; either version 3, or (at your option) | |
10 any later version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, | |
13 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 GNU General Public License for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "tm.h" | |
25 #include "tree.h" | |
26 #include "ggc.h" | |
27 #include "basic-block.h" | |
28 #include "diagnostic.h" | |
29 #include "bitmap.h" | |
30 #include "tree-flow.h" | |
31 #include "timevar.h" | |
32 #include "tree-dump.h" | |
33 #include "tree-ssa-live.h" | |
34 #include "tree-pass.h" | |
35 #include "toplev.h" | |
36 | |
37 | |
38 /* Used to hold all the components required to do SSA PHI elimination. | |
39 The node and pred/succ list is a simple linear list of nodes and | |
40 edges represented as pairs of nodes. | |
41 | |
42 The predecessor and successor list: Nodes are entered in pairs, where | |
43 [0] ->PRED, [1]->SUCC. All the even indexes in the array represent | |
44 predecessors, all the odd elements are successors. | |
45 | |
46 Rationale: | |
47 When implemented as bitmaps, very large programs SSA->Normal times were | |
48 being dominated by clearing the interference graph. | |
49 | |
50 Typically this list of edges is extremely small since it only includes | |
51 PHI results and uses from a single edge which have not coalesced with | |
52 each other. This means that no virtual PHI nodes are included, and | |
53 empirical evidence suggests that the number of edges rarely exceed | |
54 3, and in a bootstrap of GCC, the maximum size encountered was 7. | |
55 This also limits the number of possible nodes that are involved to | |
56 rarely more than 6, and in the bootstrap of gcc, the maximum number | |
57 of nodes encountered was 12. */ | |
58 | |
59 typedef struct _elim_graph { | |
60 /* Size of the elimination vectors. */ | |
61 int size; | |
62 | |
63 /* List of nodes in the elimination graph. */ | |
64 VEC(tree,heap) *nodes; | |
65 | |
66 /* The predecessor and successor edge list. */ | |
67 VEC(int,heap) *edge_list; | |
68 | |
69 /* Visited vector. */ | |
70 sbitmap visited; | |
71 | |
72 /* Stack for visited nodes. */ | |
73 VEC(int,heap) *stack; | |
74 | |
75 /* The variable partition map. */ | |
76 var_map map; | |
77 | |
78 /* Edge being eliminated by this graph. */ | |
79 edge e; | |
80 | |
81 /* List of constant copies to emit. These are pushed on in pairs. */ | |
82 VEC(tree,heap) *const_copies; | |
83 } *elim_graph; | |
84 | |
85 | |
86 /* Create a temporary variable based on the type of variable T. Use T's name | |
87 as the prefix. */ | |
88 | |
89 static tree | |
90 create_temp (tree t) | |
91 { | |
92 tree tmp; | |
93 const char *name = NULL; | |
94 tree type; | |
95 | |
96 if (TREE_CODE (t) == SSA_NAME) | |
97 t = SSA_NAME_VAR (t); | |
98 | |
99 gcc_assert (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL); | |
100 | |
101 type = TREE_TYPE (t); | |
102 tmp = DECL_NAME (t); | |
103 if (tmp) | |
104 name = IDENTIFIER_POINTER (tmp); | |
105 | |
106 if (name == NULL) | |
107 name = "temp"; | |
108 tmp = create_tmp_var (type, name); | |
109 | |
110 if (DECL_DEBUG_EXPR_IS_FROM (t) && DECL_DEBUG_EXPR (t)) | |
111 { | |
112 SET_DECL_DEBUG_EXPR (tmp, DECL_DEBUG_EXPR (t)); | |
113 DECL_DEBUG_EXPR_IS_FROM (tmp) = 1; | |
114 } | |
115 else if (!DECL_IGNORED_P (t)) | |
116 { | |
117 SET_DECL_DEBUG_EXPR (tmp, t); | |
118 DECL_DEBUG_EXPR_IS_FROM (tmp) = 1; | |
119 } | |
120 DECL_ARTIFICIAL (tmp) = DECL_ARTIFICIAL (t); | |
121 DECL_IGNORED_P (tmp) = DECL_IGNORED_P (t); | |
122 DECL_GIMPLE_REG_P (tmp) = DECL_GIMPLE_REG_P (t); | |
123 add_referenced_var (tmp); | |
124 | |
125 /* add_referenced_var will create the annotation and set up some | |
126 of the flags in the annotation. However, some flags we need to | |
127 inherit from our original variable. */ | |
128 set_symbol_mem_tag (tmp, symbol_mem_tag (t)); | |
129 if (is_call_clobbered (t)) | |
130 mark_call_clobbered (tmp, var_ann (t)->escape_mask); | |
131 if (bitmap_bit_p (gimple_call_used_vars (cfun), DECL_UID (t))) | |
132 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (tmp)); | |
133 | |
134 return tmp; | |
135 } | |
136 | |
137 | |
138 /* This helper function fill insert a copy from a constant or variable SRC to | |
139 variable DEST on edge E. */ | |
140 | |
141 static void | |
142 insert_copy_on_edge (edge e, tree dest, tree src) | |
143 { | |
144 gimple copy; | |
145 | |
146 copy = gimple_build_assign (dest, src); | |
147 set_is_used (dest); | |
148 | |
149 if (TREE_CODE (src) == ADDR_EXPR) | |
150 src = TREE_OPERAND (src, 0); | |
151 if (TREE_CODE (src) == VAR_DECL || TREE_CODE (src) == PARM_DECL) | |
152 set_is_used (src); | |
153 | |
154 if (dump_file && (dump_flags & TDF_DETAILS)) | |
155 { | |
156 fprintf (dump_file, | |
157 "Inserting a copy on edge BB%d->BB%d :", | |
158 e->src->index, | |
159 e->dest->index); | |
160 print_gimple_stmt (dump_file, copy, 0, dump_flags); | |
161 fprintf (dump_file, "\n"); | |
162 } | |
163 | |
164 gsi_insert_on_edge (e, copy); | |
165 } | |
166 | |
167 | |
168 /* Create an elimination graph with SIZE nodes and associated data | |
169 structures. */ | |
170 | |
171 static elim_graph | |
172 new_elim_graph (int size) | |
173 { | |
174 elim_graph g = (elim_graph) xmalloc (sizeof (struct _elim_graph)); | |
175 | |
176 g->nodes = VEC_alloc (tree, heap, 30); | |
177 g->const_copies = VEC_alloc (tree, heap, 20); | |
178 g->edge_list = VEC_alloc (int, heap, 20); | |
179 g->stack = VEC_alloc (int, heap, 30); | |
180 | |
181 g->visited = sbitmap_alloc (size); | |
182 | |
183 return g; | |
184 } | |
185 | |
186 | |
187 /* Empty elimination graph G. */ | |
188 | |
189 static inline void | |
190 clear_elim_graph (elim_graph g) | |
191 { | |
192 VEC_truncate (tree, g->nodes, 0); | |
193 VEC_truncate (int, g->edge_list, 0); | |
194 } | |
195 | |
196 | |
197 /* Delete elimination graph G. */ | |
198 | |
199 static inline void | |
200 delete_elim_graph (elim_graph g) | |
201 { | |
202 sbitmap_free (g->visited); | |
203 VEC_free (int, heap, g->stack); | |
204 VEC_free (int, heap, g->edge_list); | |
205 VEC_free (tree, heap, g->const_copies); | |
206 VEC_free (tree, heap, g->nodes); | |
207 free (g); | |
208 } | |
209 | |
210 | |
211 /* Return the number of nodes in graph G. */ | |
212 | |
213 static inline int | |
214 elim_graph_size (elim_graph g) | |
215 { | |
216 return VEC_length (tree, g->nodes); | |
217 } | |
218 | |
219 | |
220 /* Add NODE to graph G, if it doesn't exist already. */ | |
221 | |
222 static inline void | |
223 elim_graph_add_node (elim_graph g, tree node) | |
224 { | |
225 int x; | |
226 tree t; | |
227 | |
228 for (x = 0; VEC_iterate (tree, g->nodes, x, t); x++) | |
229 if (t == node) | |
230 return; | |
231 VEC_safe_push (tree, heap, g->nodes, node); | |
232 } | |
233 | |
234 | |
235 /* Add the edge PRED->SUCC to graph G. */ | |
236 | |
237 static inline void | |
238 elim_graph_add_edge (elim_graph g, int pred, int succ) | |
239 { | |
240 VEC_safe_push (int, heap, g->edge_list, pred); | |
241 VEC_safe_push (int, heap, g->edge_list, succ); | |
242 } | |
243 | |
244 | |
245 /* Remove an edge from graph G for which NODE is the predecessor, and | |
246 return the successor node. -1 is returned if there is no such edge. */ | |
247 | |
248 static inline int | |
249 elim_graph_remove_succ_edge (elim_graph g, int node) | |
250 { | |
251 int y; | |
252 unsigned x; | |
253 for (x = 0; x < VEC_length (int, g->edge_list); x += 2) | |
254 if (VEC_index (int, g->edge_list, x) == node) | |
255 { | |
256 VEC_replace (int, g->edge_list, x, -1); | |
257 y = VEC_index (int, g->edge_list, x + 1); | |
258 VEC_replace (int, g->edge_list, x + 1, -1); | |
259 return y; | |
260 } | |
261 return -1; | |
262 } | |
263 | |
264 | |
265 /* Find all the nodes in GRAPH which are successors to NODE in the | |
266 edge list. VAR will hold the partition number found. CODE is the | |
267 code fragment executed for every node found. */ | |
268 | |
269 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, CODE) \ | |
270 do { \ | |
271 unsigned x_; \ | |
272 int y_; \ | |
273 for (x_ = 0; x_ < VEC_length (int, (GRAPH)->edge_list); x_ += 2) \ | |
274 { \ | |
275 y_ = VEC_index (int, (GRAPH)->edge_list, x_); \ | |
276 if (y_ != (NODE)) \ | |
277 continue; \ | |
278 (VAR) = VEC_index (int, (GRAPH)->edge_list, x_ + 1); \ | |
279 CODE; \ | |
280 } \ | |
281 } while (0) | |
282 | |
283 | |
284 /* Find all the nodes which are predecessors of NODE in the edge list for | |
285 GRAPH. VAR will hold the partition number found. CODE is the | |
286 code fragment executed for every node found. */ | |
287 | |
288 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, CODE) \ | |
289 do { \ | |
290 unsigned x_; \ | |
291 int y_; \ | |
292 for (x_ = 0; x_ < VEC_length (int, (GRAPH)->edge_list); x_ += 2) \ | |
293 { \ | |
294 y_ = VEC_index (int, (GRAPH)->edge_list, x_ + 1); \ | |
295 if (y_ != (NODE)) \ | |
296 continue; \ | |
297 (VAR) = VEC_index (int, (GRAPH)->edge_list, x_); \ | |
298 CODE; \ | |
299 } \ | |
300 } while (0) | |
301 | |
302 | |
303 /* Add T to elimination graph G. */ | |
304 | |
305 static inline void | |
306 eliminate_name (elim_graph g, tree T) | |
307 { | |
308 elim_graph_add_node (g, T); | |
309 } | |
310 | |
311 | |
312 /* Build elimination graph G for basic block BB on incoming PHI edge | |
313 G->e. */ | |
314 | |
315 static void | |
316 eliminate_build (elim_graph g, basic_block B) | |
317 { | |
318 tree T0, Ti; | |
319 int p0, pi; | |
320 gimple_stmt_iterator gsi; | |
321 | |
322 clear_elim_graph (g); | |
323 | |
324 for (gsi = gsi_start_phis (B); !gsi_end_p (gsi); gsi_next (&gsi)) | |
325 { | |
326 gimple phi = gsi_stmt (gsi); | |
327 | |
328 T0 = var_to_partition_to_var (g->map, gimple_phi_result (phi)); | |
329 | |
330 /* Ignore results which are not in partitions. */ | |
331 if (T0 == NULL_TREE) | |
332 continue; | |
333 | |
334 Ti = PHI_ARG_DEF (phi, g->e->dest_idx); | |
335 | |
336 /* If this argument is a constant, or a SSA_NAME which is being | |
337 left in SSA form, just queue a copy to be emitted on this | |
338 edge. */ | |
339 if (!phi_ssa_name_p (Ti) | |
340 || (TREE_CODE (Ti) == SSA_NAME | |
341 && var_to_partition (g->map, Ti) == NO_PARTITION)) | |
342 { | |
343 /* Save constant copies until all other copies have been emitted | |
344 on this edge. */ | |
345 VEC_safe_push (tree, heap, g->const_copies, T0); | |
346 VEC_safe_push (tree, heap, g->const_copies, Ti); | |
347 } | |
348 else | |
349 { | |
350 Ti = var_to_partition_to_var (g->map, Ti); | |
351 if (T0 != Ti) | |
352 { | |
353 eliminate_name (g, T0); | |
354 eliminate_name (g, Ti); | |
355 p0 = var_to_partition (g->map, T0); | |
356 pi = var_to_partition (g->map, Ti); | |
357 elim_graph_add_edge (g, p0, pi); | |
358 } | |
359 } | |
360 } | |
361 } | |
362 | |
363 | |
364 /* Push successors of T onto the elimination stack for G. */ | |
365 | |
366 static void | |
367 elim_forward (elim_graph g, int T) | |
368 { | |
369 int S; | |
370 SET_BIT (g->visited, T); | |
371 FOR_EACH_ELIM_GRAPH_SUCC (g, T, S, | |
372 { | |
373 if (!TEST_BIT (g->visited, S)) | |
374 elim_forward (g, S); | |
375 }); | |
376 VEC_safe_push (int, heap, g->stack, T); | |
377 } | |
378 | |
379 | |
380 /* Return 1 if there unvisited predecessors of T in graph G. */ | |
381 | |
382 static int | |
383 elim_unvisited_predecessor (elim_graph g, int T) | |
384 { | |
385 int P; | |
386 FOR_EACH_ELIM_GRAPH_PRED (g, T, P, | |
387 { | |
388 if (!TEST_BIT (g->visited, P)) | |
389 return 1; | |
390 }); | |
391 return 0; | |
392 } | |
393 | |
394 /* Process predecessors first, and insert a copy. */ | |
395 | |
396 static void | |
397 elim_backward (elim_graph g, int T) | |
398 { | |
399 int P; | |
400 SET_BIT (g->visited, T); | |
401 FOR_EACH_ELIM_GRAPH_PRED (g, T, P, | |
402 { | |
403 if (!TEST_BIT (g->visited, P)) | |
404 { | |
405 elim_backward (g, P); | |
406 insert_copy_on_edge (g->e, | |
407 partition_to_var (g->map, P), | |
408 partition_to_var (g->map, T)); | |
409 } | |
410 }); | |
411 } | |
412 | |
413 /* Insert required copies for T in graph G. Check for a strongly connected | |
414 region, and create a temporary to break the cycle if one is found. */ | |
415 | |
416 static void | |
417 elim_create (elim_graph g, int T) | |
418 { | |
419 tree U; | |
420 int P, S; | |
421 | |
422 if (elim_unvisited_predecessor (g, T)) | |
423 { | |
424 U = create_temp (partition_to_var (g->map, T)); | |
425 insert_copy_on_edge (g->e, U, partition_to_var (g->map, T)); | |
426 FOR_EACH_ELIM_GRAPH_PRED (g, T, P, | |
427 { | |
428 if (!TEST_BIT (g->visited, P)) | |
429 { | |
430 elim_backward (g, P); | |
431 insert_copy_on_edge (g->e, partition_to_var (g->map, P), U); | |
432 } | |
433 }); | |
434 } | |
435 else | |
436 { | |
437 S = elim_graph_remove_succ_edge (g, T); | |
438 if (S != -1) | |
439 { | |
440 SET_BIT (g->visited, T); | |
441 insert_copy_on_edge (g->e, | |
442 partition_to_var (g->map, T), | |
443 partition_to_var (g->map, S)); | |
444 } | |
445 } | |
446 | |
447 } | |
448 | |
449 | |
450 /* Eliminate all the phi nodes on edge E in graph G. */ | |
451 | |
452 static void | |
453 eliminate_phi (edge e, elim_graph g) | |
454 { | |
455 int x; | |
456 basic_block B = e->dest; | |
457 | |
458 gcc_assert (VEC_length (tree, g->const_copies) == 0); | |
459 | |
460 /* Abnormal edges already have everything coalesced. */ | |
461 if (e->flags & EDGE_ABNORMAL) | |
462 return; | |
463 | |
464 g->e = e; | |
465 | |
466 eliminate_build (g, B); | |
467 | |
468 if (elim_graph_size (g) != 0) | |
469 { | |
470 tree var; | |
471 | |
472 sbitmap_zero (g->visited); | |
473 VEC_truncate (int, g->stack, 0); | |
474 | |
475 for (x = 0; VEC_iterate (tree, g->nodes, x, var); x++) | |
476 { | |
477 int p = var_to_partition (g->map, var); | |
478 if (!TEST_BIT (g->visited, p)) | |
479 elim_forward (g, p); | |
480 } | |
481 | |
482 sbitmap_zero (g->visited); | |
483 while (VEC_length (int, g->stack) > 0) | |
484 { | |
485 x = VEC_pop (int, g->stack); | |
486 if (!TEST_BIT (g->visited, x)) | |
487 elim_create (g, x); | |
488 } | |
489 } | |
490 | |
491 /* If there are any pending constant copies, issue them now. */ | |
492 while (VEC_length (tree, g->const_copies) > 0) | |
493 { | |
494 tree src, dest; | |
495 src = VEC_pop (tree, g->const_copies); | |
496 dest = VEC_pop (tree, g->const_copies); | |
497 insert_copy_on_edge (e, dest, src); | |
498 } | |
499 } | |
500 | |
501 | |
502 /* Take the ssa-name var_map MAP, and assign real variables to each | |
503 partition. */ | |
504 | |
505 static void | |
506 assign_vars (var_map map) | |
507 { | |
508 int x, num; | |
509 tree var, root; | |
510 var_ann_t ann; | |
511 | |
512 num = num_var_partitions (map); | |
513 for (x = 0; x < num; x++) | |
514 { | |
515 var = partition_to_var (map, x); | |
516 if (TREE_CODE (var) != SSA_NAME) | |
517 { | |
518 ann = var_ann (var); | |
519 /* It must already be coalesced. */ | |
520 gcc_assert (ann->out_of_ssa_tag == 1); | |
521 if (dump_file && (dump_flags & TDF_DETAILS)) | |
522 { | |
523 fprintf (dump_file, "partition %d already has variable ", x); | |
524 print_generic_expr (dump_file, var, TDF_SLIM); | |
525 fprintf (dump_file, " assigned to it.\n"); | |
526 } | |
527 } | |
528 else | |
529 { | |
530 root = SSA_NAME_VAR (var); | |
531 ann = var_ann (root); | |
532 /* If ROOT is already associated, create a new one. */ | |
533 if (ann->out_of_ssa_tag) | |
534 { | |
535 root = create_temp (root); | |
536 ann = var_ann (root); | |
537 } | |
538 /* ROOT has not been coalesced yet, so use it. */ | |
539 if (dump_file && (dump_flags & TDF_DETAILS)) | |
540 { | |
541 fprintf (dump_file, "Partition %d is assigned to var ", x); | |
542 print_generic_stmt (dump_file, root, TDF_SLIM); | |
543 } | |
544 change_partition_var (map, root, x); | |
545 } | |
546 } | |
547 } | |
548 | |
549 | |
550 /* Replace use operand P with whatever variable it has been rewritten to based | |
551 on the partitions in MAP. EXPR is an optional expression vector over SSA | |
552 versions which is used to replace P with an expression instead of a variable. | |
553 If the stmt is changed, return true. */ | |
554 | |
555 static inline bool | |
556 replace_use_variable (var_map map, use_operand_p p, gimple *expr) | |
557 { | |
558 tree new_var; | |
559 tree var = USE_FROM_PTR (p); | |
560 | |
561 /* Check if we are replacing this variable with an expression. */ | |
562 if (expr) | |
563 { | |
564 int version = SSA_NAME_VERSION (var); | |
565 if (expr[version]) | |
566 { | |
567 SET_USE (p, gimple_assign_rhs_to_tree (expr[version])); | |
568 return true; | |
569 } | |
570 } | |
571 | |
572 new_var = var_to_partition_to_var (map, var); | |
573 if (new_var) | |
574 { | |
575 SET_USE (p, new_var); | |
576 set_is_used (new_var); | |
577 return true; | |
578 } | |
579 return false; | |
580 } | |
581 | |
582 | |
583 /* Replace def operand DEF_P with whatever variable it has been rewritten to | |
584 based on the partitions in MAP. EXPR is an optional expression vector over | |
585 SSA versions which is used to replace DEF_P with an expression instead of a | |
586 variable. If the stmt is changed, return true. */ | |
587 | |
588 static inline bool | |
589 replace_def_variable (var_map map, def_operand_p def_p, tree *expr) | |
590 { | |
591 tree new_var; | |
592 tree var = DEF_FROM_PTR (def_p); | |
593 | |
594 /* Do nothing if we are replacing this variable with an expression. */ | |
595 if (expr && expr[SSA_NAME_VERSION (var)]) | |
596 return true; | |
597 | |
598 new_var = var_to_partition_to_var (map, var); | |
599 if (new_var) | |
600 { | |
601 SET_DEF (def_p, new_var); | |
602 set_is_used (new_var); | |
603 return true; | |
604 } | |
605 return false; | |
606 } | |
607 | |
608 | |
609 /* Remove each argument from PHI. If an arg was the last use of an SSA_NAME, | |
610 check to see if this allows another PHI node to be removed. */ | |
611 | |
612 static void | |
613 remove_gimple_phi_args (gimple phi) | |
614 { | |
615 use_operand_p arg_p; | |
616 ssa_op_iter iter; | |
617 | |
618 if (dump_file && (dump_flags & TDF_DETAILS)) | |
619 { | |
620 fprintf (dump_file, "Removing Dead PHI definition: "); | |
621 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); | |
622 } | |
623 | |
624 FOR_EACH_PHI_ARG (arg_p, phi, iter, SSA_OP_USE) | |
625 { | |
626 tree arg = USE_FROM_PTR (arg_p); | |
627 if (TREE_CODE (arg) == SSA_NAME) | |
628 { | |
629 /* Remove the reference to the existing argument. */ | |
630 SET_USE (arg_p, NULL_TREE); | |
631 if (has_zero_uses (arg)) | |
632 { | |
633 gimple stmt; | |
634 gimple_stmt_iterator gsi; | |
635 | |
636 stmt = SSA_NAME_DEF_STMT (arg); | |
637 | |
638 /* Also remove the def if it is a PHI node. */ | |
639 if (gimple_code (stmt) == GIMPLE_PHI) | |
640 { | |
641 remove_gimple_phi_args (stmt); | |
642 gsi = gsi_for_stmt (stmt); | |
643 remove_phi_node (&gsi, true); | |
644 } | |
645 | |
646 } | |
647 } | |
648 } | |
649 } | |
650 | |
651 /* Remove any PHI node which is a virtual PHI, or a PHI with no uses. */ | |
652 | |
653 static void | |
654 eliminate_useless_phis (void) | |
655 { | |
656 basic_block bb; | |
657 gimple_stmt_iterator gsi; | |
658 tree result; | |
659 | |
660 FOR_EACH_BB (bb) | |
661 { | |
662 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); ) | |
663 { | |
664 gimple phi = gsi_stmt (gsi); | |
665 result = gimple_phi_result (phi); | |
666 if (!is_gimple_reg (SSA_NAME_VAR (result))) | |
667 { | |
668 #ifdef ENABLE_CHECKING | |
669 size_t i; | |
670 /* There should be no arguments which are not virtual, or the | |
671 results will be incorrect. */ | |
672 for (i = 0; i < gimple_phi_num_args (phi); i++) | |
673 { | |
674 tree arg = PHI_ARG_DEF (phi, i); | |
675 if (TREE_CODE (arg) == SSA_NAME | |
676 && is_gimple_reg (SSA_NAME_VAR (arg))) | |
677 { | |
678 fprintf (stderr, "Argument of PHI is not virtual ("); | |
679 print_generic_expr (stderr, arg, TDF_SLIM); | |
680 fprintf (stderr, "), but the result is :"); | |
681 print_gimple_stmt (stderr, phi, 0, TDF_SLIM); | |
682 internal_error ("SSA corruption"); | |
683 } | |
684 } | |
685 #endif | |
686 remove_phi_node (&gsi, true); | |
687 } | |
688 else | |
689 { | |
690 /* Also remove real PHIs with no uses. */ | |
691 if (has_zero_uses (result)) | |
692 { | |
693 remove_gimple_phi_args (phi); | |
694 remove_phi_node (&gsi, true); | |
695 } | |
696 else | |
697 gsi_next (&gsi); | |
698 } | |
699 } | |
700 } | |
701 } | |
702 | |
703 | |
704 /* This function will rewrite the current program using the variable mapping | |
705 found in MAP. If the replacement vector VALUES is provided, any | |
706 occurrences of partitions with non-null entries in the vector will be | |
707 replaced with the expression in the vector instead of its mapped | |
708 variable. */ | |
709 | |
710 static void | |
711 rewrite_trees (var_map map, gimple *values) | |
712 { | |
713 elim_graph g; | |
714 basic_block bb; | |
715 gimple_stmt_iterator gsi; | |
716 edge e; | |
717 gimple_seq phi; | |
718 bool changed; | |
719 | |
720 #ifdef ENABLE_CHECKING | |
721 /* Search for PHIs where the destination has no partition, but one | |
722 or more arguments has a partition. This should not happen and can | |
723 create incorrect code. */ | |
724 FOR_EACH_BB (bb) | |
725 { | |
726 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
727 { | |
728 gimple phi = gsi_stmt (gsi); | |
729 tree T0 = var_to_partition_to_var (map, gimple_phi_result (phi)); | |
730 if (T0 == NULL_TREE) | |
731 { | |
732 size_t i; | |
733 for (i = 0; i < gimple_phi_num_args (phi); i++) | |
734 { | |
735 tree arg = PHI_ARG_DEF (phi, i); | |
736 | |
737 if (TREE_CODE (arg) == SSA_NAME | |
738 && var_to_partition (map, arg) != NO_PARTITION) | |
739 { | |
740 fprintf (stderr, "Argument of PHI is in a partition :("); | |
741 print_generic_expr (stderr, arg, TDF_SLIM); | |
742 fprintf (stderr, "), but the result is not :"); | |
743 print_gimple_stmt (stderr, phi, 0, TDF_SLIM); | |
744 internal_error ("SSA corruption"); | |
745 } | |
746 } | |
747 } | |
748 } | |
749 } | |
750 #endif | |
751 | |
752 /* Replace PHI nodes with any required copies. */ | |
753 g = new_elim_graph (map->num_partitions); | |
754 g->map = map; | |
755 FOR_EACH_BB (bb) | |
756 { | |
757 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) | |
758 { | |
759 gimple stmt = gsi_stmt (gsi); | |
760 use_operand_p use_p, copy_use_p; | |
761 def_operand_p def_p; | |
762 bool remove = false, is_copy = false; | |
763 int num_uses = 0; | |
764 ssa_op_iter iter; | |
765 | |
766 changed = false; | |
767 | |
768 if (gimple_assign_copy_p (stmt)) | |
769 is_copy = true; | |
770 | |
771 copy_use_p = NULL_USE_OPERAND_P; | |
772 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
773 { | |
774 if (replace_use_variable (map, use_p, values)) | |
775 changed = true; | |
776 copy_use_p = use_p; | |
777 num_uses++; | |
778 } | |
779 | |
780 if (num_uses != 1) | |
781 is_copy = false; | |
782 | |
783 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF); | |
784 | |
785 if (def_p != NULL) | |
786 { | |
787 /* Mark this stmt for removal if it is the list of replaceable | |
788 expressions. */ | |
789 if (values && values[SSA_NAME_VERSION (DEF_FROM_PTR (def_p))]) | |
790 remove = true; | |
791 else | |
792 { | |
793 if (replace_def_variable (map, def_p, NULL)) | |
794 changed = true; | |
795 /* If both SSA_NAMEs coalesce to the same variable, | |
796 mark the now redundant copy for removal. */ | |
797 if (is_copy) | |
798 { | |
799 gcc_assert (copy_use_p != NULL_USE_OPERAND_P); | |
800 if (DEF_FROM_PTR (def_p) == USE_FROM_PTR (copy_use_p)) | |
801 remove = true; | |
802 } | |
803 } | |
804 } | |
805 else | |
806 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF) | |
807 if (replace_def_variable (map, def_p, NULL)) | |
808 changed = true; | |
809 | |
810 /* Remove any stmts marked for removal. */ | |
811 if (remove) | |
812 gsi_remove (&gsi, true); | |
813 else | |
814 { | |
815 if (changed) | |
816 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) | |
817 gimple_purge_dead_eh_edges (bb); | |
818 gsi_next (&gsi); | |
819 } | |
820 } | |
821 | |
822 phi = phi_nodes (bb); | |
823 if (phi) | |
824 { | |
825 edge_iterator ei; | |
826 FOR_EACH_EDGE (e, ei, bb->preds) | |
827 eliminate_phi (e, g); | |
828 } | |
829 } | |
830 | |
831 delete_elim_graph (g); | |
832 } | |
833 | |
834 /* These are the local work structures used to determine the best place to | |
835 insert the copies that were placed on edges by the SSA->normal pass.. */ | |
836 static VEC(edge,heap) *edge_leader; | |
837 static VEC(gimple_seq,heap) *stmt_list; | |
838 static bitmap leader_has_match = NULL; | |
839 static edge leader_match = NULL; | |
840 | |
841 | |
842 /* Pass this function to make_forwarder_block so that all the edges with | |
843 matching PENDING_STMT lists to 'curr_stmt_list' get redirected. E is the | |
844 edge to test for a match. */ | |
845 | |
846 static inline bool | |
847 same_stmt_list_p (edge e) | |
848 { | |
849 return (e->aux == (PTR) leader_match) ? true : false; | |
850 } | |
851 | |
852 | |
853 /* Return TRUE if S1 and S2 are equivalent copies. */ | |
854 | |
855 static inline bool | |
856 identical_copies_p (const_gimple s1, const_gimple s2) | |
857 { | |
858 #ifdef ENABLE_CHECKING | |
859 gcc_assert (is_gimple_assign (s1)); | |
860 gcc_assert (is_gimple_assign (s2)); | |
861 gcc_assert (DECL_P (gimple_assign_lhs (s1))); | |
862 gcc_assert (DECL_P (gimple_assign_lhs (s2))); | |
863 #endif | |
864 | |
865 if (gimple_assign_lhs (s1) != gimple_assign_lhs (s2)) | |
866 return false; | |
867 | |
868 if (gimple_assign_rhs1 (s1) != gimple_assign_rhs1 (s2)) | |
869 return false; | |
870 | |
871 return true; | |
872 } | |
873 | |
874 | |
875 /* Compare the PENDING_STMT list for edges E1 and E2. Return true if the lists | |
876 contain the same sequence of copies. */ | |
877 | |
878 static inline bool | |
879 identical_stmt_lists_p (const_edge e1, const_edge e2) | |
880 { | |
881 gimple_seq t1 = PENDING_STMT (e1); | |
882 gimple_seq t2 = PENDING_STMT (e2); | |
883 gimple_stmt_iterator gsi1, gsi2; | |
884 | |
885 for (gsi1 = gsi_start (t1), gsi2 = gsi_start (t2); | |
886 !gsi_end_p (gsi1) && !gsi_end_p (gsi2); | |
887 gsi_next (&gsi1), gsi_next (&gsi2)) | |
888 { | |
889 if (!identical_copies_p (gsi_stmt (gsi1), gsi_stmt (gsi2))) | |
890 break; | |
891 } | |
892 | |
893 if (!gsi_end_p (gsi1) || !gsi_end_p (gsi2)) | |
894 return false; | |
895 | |
896 return true; | |
897 } | |
898 | |
899 | |
900 /* Allocate data structures used in analyze_edges_for_bb. */ | |
901 | |
902 static void | |
903 init_analyze_edges_for_bb (void) | |
904 { | |
905 edge_leader = VEC_alloc (edge, heap, 25); | |
906 stmt_list = VEC_alloc (gimple_seq, heap, 25); | |
907 leader_has_match = BITMAP_ALLOC (NULL); | |
908 } | |
909 | |
910 | |
911 /* Free data structures used in analyze_edges_for_bb. */ | |
912 | |
913 static void | |
914 fini_analyze_edges_for_bb (void) | |
915 { | |
916 VEC_free (edge, heap, edge_leader); | |
917 VEC_free (gimple_seq, heap, stmt_list); | |
918 BITMAP_FREE (leader_has_match); | |
919 } | |
920 | |
921 /* A helper function to be called via walk_tree. Return DATA if it is | |
922 contained in subtree TP. */ | |
923 | |
924 static tree | |
925 contains_tree_r (tree * tp, int *walk_subtrees, void *data) | |
926 { | |
927 if (*tp == data) | |
928 { | |
929 *walk_subtrees = 0; | |
930 return (tree) data; | |
931 } | |
932 else | |
933 return NULL_TREE; | |
934 } | |
935 | |
936 /* A threshold for the number of insns contained in the latch block. | |
937 It is used to prevent blowing the loop with too many copies from | |
938 the latch. */ | |
939 #define MAX_STMTS_IN_LATCH 2 | |
940 | |
941 /* Return TRUE if the stmts on SINGLE-EDGE can be moved to the | |
942 body of the loop. This should be permitted only if SINGLE-EDGE is a | |
943 single-basic-block latch edge and thus cleaning the latch will help | |
944 to create a single-basic-block loop. Otherwise return FALSE. */ | |
945 | |
946 static bool | |
947 process_single_block_loop_latch (edge single_edge) | |
948 { | |
949 gimple_seq stmts; | |
950 basic_block b_exit, b_pheader, b_loop = single_edge->src; | |
951 edge_iterator ei; | |
952 edge e; | |
953 gimple_stmt_iterator gsi, gsi_exit; | |
954 gimple_stmt_iterator tsi; | |
955 tree expr; | |
956 gimple stmt; | |
957 unsigned int count = 0; | |
958 | |
959 if (single_edge == NULL || (single_edge->dest != single_edge->src) | |
960 || (EDGE_COUNT (b_loop->succs) != 2) | |
961 || (EDGE_COUNT (b_loop->preds) != 2)) | |
962 return false; | |
963 | |
964 /* Get the stmts on the latch edge. */ | |
965 stmts = PENDING_STMT (single_edge); | |
966 | |
967 /* Find the successor edge which is not the latch edge. */ | |
968 FOR_EACH_EDGE (e, ei, b_loop->succs) | |
969 if (e->dest != b_loop) | |
970 break; | |
971 | |
972 b_exit = e->dest; | |
973 | |
974 /* Check that the exit block has only the loop as a predecessor, | |
975 and that there are no pending stmts on that edge as well. */ | |
976 if (EDGE_COUNT (b_exit->preds) != 1 || PENDING_STMT (e)) | |
977 return false; | |
978 | |
979 /* Find the predecessor edge which is not the latch edge. */ | |
980 FOR_EACH_EDGE (e, ei, b_loop->preds) | |
981 if (e->src != b_loop) | |
982 break; | |
983 | |
984 b_pheader = e->src; | |
985 | |
986 if (b_exit == b_pheader || b_exit == b_loop || b_pheader == b_loop) | |
987 return false; | |
988 | |
989 gsi_exit = gsi_after_labels (b_exit); | |
990 | |
991 /* Get the last stmt in the loop body. */ | |
992 gsi = gsi_last_bb (single_edge->src); | |
993 stmt = gsi_stmt (gsi); | |
994 | |
995 if (gimple_code (stmt) != GIMPLE_COND) | |
996 return false; | |
997 | |
998 | |
999 expr = build2 (gimple_cond_code (stmt), boolean_type_node, | |
1000 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt)); | |
1001 /* Iterate over the insns on the latch and count them. */ | |
1002 for (tsi = gsi_start (stmts); !gsi_end_p (tsi); gsi_next (&tsi)) | |
1003 { | |
1004 gimple stmt1 = gsi_stmt (tsi); | |
1005 tree var; | |
1006 | |
1007 count++; | |
1008 /* Check that the condition does not contain any new definition | |
1009 created in the latch as the stmts from the latch intended | |
1010 to precede it. */ | |
1011 if (gimple_code (stmt1) != GIMPLE_ASSIGN) | |
1012 return false; | |
1013 var = gimple_assign_lhs (stmt1); | |
1014 if (TREE_THIS_VOLATILE (var) | |
1015 || TYPE_VOLATILE (TREE_TYPE (var)) | |
1016 || walk_tree (&expr, contains_tree_r, var, NULL)) | |
1017 return false; | |
1018 } | |
1019 /* Check that the latch does not contain more than MAX_STMTS_IN_LATCH | |
1020 insns. The purpose of this restriction is to prevent blowing the | |
1021 loop with too many copies from the latch. */ | |
1022 if (count > MAX_STMTS_IN_LATCH) | |
1023 return false; | |
1024 | |
1025 /* Apply the transformation - clean up the latch block: | |
1026 | |
1027 var = something; | |
1028 L1: | |
1029 x1 = expr; | |
1030 if (cond) goto L2 else goto L3; | |
1031 L2: | |
1032 var = x1; | |
1033 goto L1 | |
1034 L3: | |
1035 ... | |
1036 | |
1037 ==> | |
1038 | |
1039 var = something; | |
1040 L1: | |
1041 x1 = expr; | |
1042 tmp_var = var; | |
1043 var = x1; | |
1044 if (cond) goto L1 else goto L2; | |
1045 L2: | |
1046 var = tmp_var; | |
1047 ... | |
1048 */ | |
1049 for (tsi = gsi_start (stmts); !gsi_end_p (tsi); gsi_next (&tsi)) | |
1050 { | |
1051 gimple stmt1 = gsi_stmt (tsi); | |
1052 tree var, tmp_var; | |
1053 gimple copy; | |
1054 | |
1055 /* Create a new variable to load back the value of var in case | |
1056 we exit the loop. */ | |
1057 var = gimple_assign_lhs (stmt1); | |
1058 tmp_var = create_temp (var); | |
1059 copy = gimple_build_assign (tmp_var, var); | |
1060 set_is_used (tmp_var); | |
1061 gsi_insert_before (&gsi, copy, GSI_SAME_STMT); | |
1062 copy = gimple_build_assign (var, tmp_var); | |
1063 gsi_insert_before (&gsi_exit, copy, GSI_SAME_STMT); | |
1064 } | |
1065 | |
1066 PENDING_STMT (single_edge) = 0; | |
1067 /* Insert the new stmts to the loop body. */ | |
1068 gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT); | |
1069 | |
1070 if (dump_file) | |
1071 fprintf (dump_file, | |
1072 "\nCleaned-up latch block of loop with single BB: %d\n\n", | |
1073 single_edge->dest->index); | |
1074 | |
1075 return true; | |
1076 } | |
1077 | |
1078 /* Look at all the incoming edges to block BB, and decide where the best place | |
1079 to insert the stmts on each edge are, and perform those insertions. */ | |
1080 | |
1081 static void | |
1082 analyze_edges_for_bb (basic_block bb) | |
1083 { | |
1084 edge e; | |
1085 edge_iterator ei; | |
1086 int count; | |
1087 unsigned int x; | |
1088 bool have_opportunity; | |
1089 gimple_stmt_iterator gsi; | |
1090 gimple stmt; | |
1091 edge single_edge = NULL; | |
1092 bool is_label; | |
1093 edge leader; | |
1094 | |
1095 count = 0; | |
1096 | |
1097 /* Blocks which contain at least one abnormal edge cannot use | |
1098 make_forwarder_block. Look for these blocks, and commit any PENDING_STMTs | |
1099 found on edges in these block. */ | |
1100 have_opportunity = true; | |
1101 FOR_EACH_EDGE (e, ei, bb->preds) | |
1102 if (e->flags & EDGE_ABNORMAL) | |
1103 { | |
1104 have_opportunity = false; | |
1105 break; | |
1106 } | |
1107 | |
1108 if (!have_opportunity) | |
1109 { | |
1110 FOR_EACH_EDGE (e, ei, bb->preds) | |
1111 if (PENDING_STMT (e)) | |
1112 gsi_commit_one_edge_insert (e, NULL); | |
1113 return; | |
1114 } | |
1115 | |
1116 /* Find out how many edges there are with interesting pending stmts on them. | |
1117 Commit the stmts on edges we are not interested in. */ | |
1118 FOR_EACH_EDGE (e, ei, bb->preds) | |
1119 { | |
1120 if (PENDING_STMT (e)) | |
1121 { | |
1122 gcc_assert (!(e->flags & EDGE_ABNORMAL)); | |
1123 if (e->flags & EDGE_FALLTHRU) | |
1124 { | |
1125 gsi = gsi_start_bb (e->src); | |
1126 if (!gsi_end_p (gsi)) | |
1127 { | |
1128 stmt = gsi_stmt (gsi); | |
1129 gsi_next (&gsi); | |
1130 gcc_assert (stmt != NULL); | |
1131 is_label = (gimple_code (stmt) == GIMPLE_LABEL); | |
1132 /* Punt if it has non-label stmts, or isn't local. */ | |
1133 if (!is_label | |
1134 || DECL_NONLOCAL (gimple_label_label (stmt)) | |
1135 || !gsi_end_p (gsi)) | |
1136 { | |
1137 gsi_commit_one_edge_insert (e, NULL); | |
1138 continue; | |
1139 } | |
1140 } | |
1141 } | |
1142 single_edge = e; | |
1143 count++; | |
1144 } | |
1145 } | |
1146 | |
1147 /* If there aren't at least 2 edges, no sharing will happen. */ | |
1148 if (count < 2) | |
1149 { | |
1150 if (single_edge) | |
1151 { | |
1152 /* Add stmts to the edge unless processed specially as a | |
1153 single-block loop latch edge. */ | |
1154 if (!process_single_block_loop_latch (single_edge)) | |
1155 gsi_commit_one_edge_insert (single_edge, NULL); | |
1156 } | |
1157 return; | |
1158 } | |
1159 | |
1160 /* Ensure that we have empty worklists. */ | |
1161 #ifdef ENABLE_CHECKING | |
1162 gcc_assert (VEC_length (edge, edge_leader) == 0); | |
1163 gcc_assert (VEC_length (gimple_seq, stmt_list) == 0); | |
1164 gcc_assert (bitmap_empty_p (leader_has_match)); | |
1165 #endif | |
1166 | |
1167 /* Find the "leader" block for each set of unique stmt lists. Preference is | |
1168 given to FALLTHRU blocks since they would need a GOTO to arrive at another | |
1169 block. The leader edge destination is the block which all the other edges | |
1170 with the same stmt list will be redirected to. */ | |
1171 have_opportunity = false; | |
1172 FOR_EACH_EDGE (e, ei, bb->preds) | |
1173 { | |
1174 if (PENDING_STMT (e)) | |
1175 { | |
1176 bool found = false; | |
1177 | |
1178 /* Look for the same stmt list in edge leaders list. */ | |
1179 for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++) | |
1180 { | |
1181 if (identical_stmt_lists_p (leader, e)) | |
1182 { | |
1183 /* Give this edge the same stmt list pointer. */ | |
1184 PENDING_STMT (e) = NULL; | |
1185 e->aux = leader; | |
1186 bitmap_set_bit (leader_has_match, x); | |
1187 have_opportunity = found = true; | |
1188 break; | |
1189 } | |
1190 } | |
1191 | |
1192 /* If no similar stmt list, add this edge to the leader list. */ | |
1193 if (!found) | |
1194 { | |
1195 VEC_safe_push (edge, heap, edge_leader, e); | |
1196 VEC_safe_push (gimple_seq, heap, stmt_list, PENDING_STMT (e)); | |
1197 } | |
1198 } | |
1199 } | |
1200 | |
1201 /* If there are no similar lists, just issue the stmts. */ | |
1202 if (!have_opportunity) | |
1203 { | |
1204 for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++) | |
1205 gsi_commit_one_edge_insert (leader, NULL); | |
1206 VEC_truncate (edge, edge_leader, 0); | |
1207 VEC_truncate (gimple_seq, stmt_list, 0); | |
1208 bitmap_clear (leader_has_match); | |
1209 return; | |
1210 } | |
1211 | |
1212 if (dump_file) | |
1213 fprintf (dump_file, "\nOpportunities in BB %d for stmt/block reduction:\n", | |
1214 bb->index); | |
1215 | |
1216 /* For each common list, create a forwarding block and issue the stmt's | |
1217 in that block. */ | |
1218 for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++) | |
1219 if (bitmap_bit_p (leader_has_match, x)) | |
1220 { | |
1221 edge new_edge; | |
1222 gimple_stmt_iterator gsi; | |
1223 gimple_seq curr_stmt_list; | |
1224 | |
1225 leader_match = leader; | |
1226 | |
1227 /* The tree_* cfg manipulation routines use the PENDING_EDGE field | |
1228 for various PHI manipulations, so it gets cleared when calls are | |
1229 made to make_forwarder_block(). So make sure the edge is clear, | |
1230 and use the saved stmt list. */ | |
1231 PENDING_STMT (leader) = NULL; | |
1232 leader->aux = leader; | |
1233 curr_stmt_list = VEC_index (gimple_seq, stmt_list, x); | |
1234 | |
1235 new_edge = make_forwarder_block (leader->dest, same_stmt_list_p, | |
1236 NULL); | |
1237 bb = new_edge->dest; | |
1238 if (dump_file) | |
1239 { | |
1240 fprintf (dump_file, "Splitting BB %d for Common stmt list. ", | |
1241 leader->dest->index); | |
1242 fprintf (dump_file, "Original block is now BB%d.\n", bb->index); | |
1243 print_gimple_seq (dump_file, curr_stmt_list, 0, TDF_VOPS); | |
1244 } | |
1245 | |
1246 FOR_EACH_EDGE (e, ei, new_edge->src->preds) | |
1247 { | |
1248 e->aux = NULL; | |
1249 if (dump_file) | |
1250 fprintf (dump_file, " Edge (%d->%d) lands here.\n", | |
1251 e->src->index, e->dest->index); | |
1252 } | |
1253 | |
1254 gsi = gsi_last_bb (leader->dest); | |
1255 gsi_insert_seq_after (&gsi, curr_stmt_list, GSI_NEW_STMT); | |
1256 | |
1257 leader_match = NULL; | |
1258 /* We should never get a new block now. */ | |
1259 } | |
1260 else | |
1261 { | |
1262 PENDING_STMT (leader) = VEC_index (gimple_seq, stmt_list, x); | |
1263 gsi_commit_one_edge_insert (leader, NULL); | |
1264 } | |
1265 | |
1266 | |
1267 /* Clear the working data structures. */ | |
1268 VEC_truncate (edge, edge_leader, 0); | |
1269 VEC_truncate (gimple_seq, stmt_list, 0); | |
1270 bitmap_clear (leader_has_match); | |
1271 } | |
1272 | |
1273 | |
1274 /* This function will analyze the insertions which were performed on edges, | |
1275 and decide whether they should be left on that edge, or whether it is more | |
1276 efficient to emit some subset of them in a single block. All stmts are | |
1277 inserted somewhere. */ | |
1278 | |
1279 static void | |
1280 perform_edge_inserts (void) | |
1281 { | |
1282 basic_block bb; | |
1283 | |
1284 if (dump_file) | |
1285 fprintf(dump_file, "Analyzing Edge Insertions.\n"); | |
1286 | |
1287 /* analyze_edges_for_bb calls make_forwarder_block, which tries to | |
1288 incrementally update the dominator information. Since we don't | |
1289 need dominator information after this pass, go ahead and free the | |
1290 dominator information. */ | |
1291 free_dominance_info (CDI_DOMINATORS); | |
1292 free_dominance_info (CDI_POST_DOMINATORS); | |
1293 | |
1294 /* Allocate data structures used in analyze_edges_for_bb. */ | |
1295 init_analyze_edges_for_bb (); | |
1296 | |
1297 FOR_EACH_BB (bb) | |
1298 analyze_edges_for_bb (bb); | |
1299 | |
1300 analyze_edges_for_bb (EXIT_BLOCK_PTR); | |
1301 | |
1302 /* Free data structures used in analyze_edges_for_bb. */ | |
1303 fini_analyze_edges_for_bb (); | |
1304 | |
1305 #ifdef ENABLE_CHECKING | |
1306 { | |
1307 edge_iterator ei; | |
1308 edge e; | |
1309 FOR_EACH_BB (bb) | |
1310 { | |
1311 FOR_EACH_EDGE (e, ei, bb->preds) | |
1312 { | |
1313 if (PENDING_STMT (e)) | |
1314 error (" Pending stmts not issued on PRED edge (%d, %d)\n", | |
1315 e->src->index, e->dest->index); | |
1316 } | |
1317 FOR_EACH_EDGE (e, ei, bb->succs) | |
1318 { | |
1319 if (PENDING_STMT (e)) | |
1320 error (" Pending stmts not issued on SUCC edge (%d, %d)\n", | |
1321 e->src->index, e->dest->index); | |
1322 } | |
1323 } | |
1324 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) | |
1325 { | |
1326 if (PENDING_STMT (e)) | |
1327 error (" Pending stmts not issued on ENTRY edge (%d, %d)\n", | |
1328 e->src->index, e->dest->index); | |
1329 } | |
1330 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) | |
1331 { | |
1332 if (PENDING_STMT (e)) | |
1333 error (" Pending stmts not issued on EXIT edge (%d, %d)\n", | |
1334 e->src->index, e->dest->index); | |
1335 } | |
1336 } | |
1337 #endif | |
1338 } | |
1339 | |
1340 | |
1341 /* Remove the ssa-names in the current function and translate them into normal | |
1342 compiler variables. PERFORM_TER is true if Temporary Expression Replacement | |
1343 should also be used. */ | |
1344 | |
1345 static void | |
1346 remove_ssa_form (bool perform_ter) | |
1347 { | |
1348 basic_block bb; | |
1349 gimple *values = NULL; | |
1350 var_map map; | |
1351 gimple_stmt_iterator gsi; | |
1352 | |
1353 map = coalesce_ssa_name (); | |
1354 | |
1355 /* Return to viewing the variable list as just all reference variables after | |
1356 coalescing has been performed. */ | |
1357 partition_view_normal (map, false); | |
1358 | |
1359 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1360 { | |
1361 fprintf (dump_file, "After Coalescing:\n"); | |
1362 dump_var_map (dump_file, map); | |
1363 } | |
1364 | |
1365 if (perform_ter) | |
1366 { | |
1367 values = find_replaceable_exprs (map); | |
1368 if (values && dump_file && (dump_flags & TDF_DETAILS)) | |
1369 dump_replaceable_exprs (dump_file, values); | |
1370 } | |
1371 | |
1372 /* Assign real variables to the partitions now. */ | |
1373 assign_vars (map); | |
1374 | |
1375 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1376 { | |
1377 fprintf (dump_file, "After Base variable replacement:\n"); | |
1378 dump_var_map (dump_file, map); | |
1379 } | |
1380 | |
1381 rewrite_trees (map, values); | |
1382 | |
1383 if (values) | |
1384 free (values); | |
1385 | |
1386 /* Remove PHI nodes which have been translated back to real variables. */ | |
1387 FOR_EACH_BB (bb) | |
1388 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);) | |
1389 remove_phi_node (&gsi, true); | |
1390 | |
1391 /* If any copies were inserted on edges, analyze and insert them now. */ | |
1392 perform_edge_inserts (); | |
1393 | |
1394 delete_var_map (map); | |
1395 } | |
1396 | |
1397 | |
1398 /* Search every PHI node for arguments associated with backedges which | |
1399 we can trivially determine will need a copy (the argument is either | |
1400 not an SSA_NAME or the argument has a different underlying variable | |
1401 than the PHI result). | |
1402 | |
1403 Insert a copy from the PHI argument to a new destination at the | |
1404 end of the block with the backedge to the top of the loop. Update | |
1405 the PHI argument to reference this new destination. */ | |
1406 | |
1407 static void | |
1408 insert_backedge_copies (void) | |
1409 { | |
1410 basic_block bb; | |
1411 gimple_stmt_iterator gsi; | |
1412 | |
1413 FOR_EACH_BB (bb) | |
1414 { | |
1415 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1416 { | |
1417 gimple phi = gsi_stmt (gsi); | |
1418 tree result = gimple_phi_result (phi); | |
1419 tree result_var; | |
1420 size_t i; | |
1421 | |
1422 if (!is_gimple_reg (result)) | |
1423 continue; | |
1424 | |
1425 result_var = SSA_NAME_VAR (result); | |
1426 for (i = 0; i < gimple_phi_num_args (phi); i++) | |
1427 { | |
1428 tree arg = gimple_phi_arg_def (phi, i); | |
1429 edge e = gimple_phi_arg_edge (phi, i); | |
1430 | |
1431 /* If the argument is not an SSA_NAME, then we will need a | |
1432 constant initialization. If the argument is an SSA_NAME with | |
1433 a different underlying variable then a copy statement will be | |
1434 needed. */ | |
1435 if ((e->flags & EDGE_DFS_BACK) | |
1436 && (TREE_CODE (arg) != SSA_NAME | |
1437 || SSA_NAME_VAR (arg) != result_var)) | |
1438 { | |
1439 tree name; | |
1440 gimple stmt, last = NULL; | |
1441 gimple_stmt_iterator gsi2; | |
1442 | |
1443 gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src); | |
1444 if (!gsi_end_p (gsi2)) | |
1445 last = gsi_stmt (gsi2); | |
1446 | |
1447 /* In theory the only way we ought to get back to the | |
1448 start of a loop should be with a COND_EXPR or GOTO_EXPR. | |
1449 However, better safe than sorry. | |
1450 If the block ends with a control statement or | |
1451 something that might throw, then we have to | |
1452 insert this assignment before the last | |
1453 statement. Else insert it after the last statement. */ | |
1454 if (last && stmt_ends_bb_p (last)) | |
1455 { | |
1456 /* If the last statement in the block is the definition | |
1457 site of the PHI argument, then we can't insert | |
1458 anything after it. */ | |
1459 if (TREE_CODE (arg) == SSA_NAME | |
1460 && SSA_NAME_DEF_STMT (arg) == last) | |
1461 continue; | |
1462 } | |
1463 | |
1464 /* Create a new instance of the underlying variable of the | |
1465 PHI result. */ | |
1466 stmt = gimple_build_assign (result_var, | |
1467 gimple_phi_arg_def (phi, i)); | |
1468 name = make_ssa_name (result_var, stmt); | |
1469 gimple_assign_set_lhs (stmt, name); | |
1470 | |
1471 /* Insert the new statement into the block and update | |
1472 the PHI node. */ | |
1473 if (last && stmt_ends_bb_p (last)) | |
1474 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT); | |
1475 else | |
1476 gsi_insert_after (&gsi2, stmt, GSI_NEW_STMT); | |
1477 SET_PHI_ARG_DEF (phi, i, name); | |
1478 } | |
1479 } | |
1480 } | |
1481 } | |
1482 } | |
1483 | |
1484 /* Take the current function out of SSA form, translating PHIs as described in | |
1485 R. Morgan, ``Building an Optimizing Compiler'', | |
1486 Butterworth-Heinemann, Boston, MA, 1998. pp 176-186. */ | |
1487 | |
1488 static unsigned int | |
1489 rewrite_out_of_ssa (void) | |
1490 { | |
1491 /* If elimination of a PHI requires inserting a copy on a backedge, | |
1492 then we will have to split the backedge which has numerous | |
1493 undesirable performance effects. | |
1494 | |
1495 A significant number of such cases can be handled here by inserting | |
1496 copies into the loop itself. */ | |
1497 insert_backedge_copies (); | |
1498 | |
1499 | |
1500 /* Eliminate PHIs which are of no use, such as virtual or dead phis. */ | |
1501 eliminate_useless_phis (); | |
1502 | |
1503 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1504 gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS); | |
1505 | |
1506 remove_ssa_form (flag_tree_ter && !flag_mudflap); | |
1507 | |
1508 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1509 gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS); | |
1510 | |
1511 cfun->gimple_df->in_ssa_p = false; | |
1512 return 0; | |
1513 } | |
1514 | |
1515 | |
1516 /* Define the parameters of the out of SSA pass. */ | |
1517 | |
1518 struct gimple_opt_pass pass_del_ssa = | |
1519 { | |
1520 { | |
1521 GIMPLE_PASS, | |
1522 "optimized", /* name */ | |
1523 NULL, /* gate */ | |
1524 rewrite_out_of_ssa, /* execute */ | |
1525 NULL, /* sub */ | |
1526 NULL, /* next */ | |
1527 0, /* static_pass_number */ | |
1528 TV_TREE_SSA_TO_NORMAL, /* tv_id */ | |
1529 PROP_cfg | PROP_ssa, /* properties_required */ | |
1530 0, /* properties_provided */ | |
1531 /* ??? If TER is enabled, we also kill gimple. */ | |
1532 PROP_ssa, /* properties_destroyed */ | |
1533 TODO_verify_ssa | TODO_verify_flow | |
1534 | TODO_verify_stmts, /* todo_flags_start */ | |
1535 TODO_dump_func | |
1536 | TODO_ggc_collect | |
1537 | TODO_remove_unused_locals /* todo_flags_finish */ | |
1538 } | |
1539 }; |