Mercurial > hg > CbC > CbC_gcc
annotate gcc/tree-ssa-live.c @ 37:d096b2ff82d9
Added tag gcc.4.4.2 for changeset 855418dad1a3
author | e075725 |
---|---|
date | Tue, 22 Dec 2009 21:52:56 +0900 |
parents | 58ad6c70ea60 |
children | 77e2b8dfacca |
rev | line source |
---|---|
0 | 1 /* Liveness for SSA trees. |
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009 Free Software Foundation, | |
3 Inc. | |
4 Contributed by Andrew MacLeod <amacleod@redhat.com> | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify | |
9 it under the terms of the GNU General Public License as published by | |
10 the Free Software Foundation; either version 3, or (at your option) | |
11 any later version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, | |
14 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 GNU General Public License for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 #include "config.h" | |
23 #include "system.h" | |
24 #include "coretypes.h" | |
25 #include "tm.h" | |
26 #include "tree.h" | |
27 #include "diagnostic.h" | |
28 #include "bitmap.h" | |
29 #include "tree-flow.h" | |
30 #include "tree-dump.h" | |
31 #include "tree-ssa-live.h" | |
32 #include "toplev.h" | |
33 #include "debug.h" | |
34 #include "flags.h" | |
35 | |
36 #ifdef ENABLE_CHECKING | |
37 static void verify_live_on_entry (tree_live_info_p); | |
38 #endif | |
39 | |
40 | |
41 /* VARMAP maintains a mapping from SSA version number to real variables. | |
42 | |
43 All SSA_NAMES are divided into partitions. Initially each ssa_name is the | |
44 only member of it's own partition. Coalescing will attempt to group any | |
45 ssa_names which occur in a copy or in a PHI node into the same partition. | |
46 | |
47 At the end of out-of-ssa, each partition becomes a "real" variable and is | |
48 rewritten as a compiler variable. | |
49 | |
50 The var_map data structure is used to manage these partitions. It allows | |
51 partitions to be combined, and determines which partition belongs to what | |
52 ssa_name or variable, and vice versa. */ | |
53 | |
54 | |
55 /* This routine will initialize the basevar fields of MAP. */ | |
56 | |
57 static void | |
58 var_map_base_init (var_map map) | |
59 { | |
60 int x, num_part, num; | |
61 tree var; | |
62 var_ann_t ann; | |
63 | |
64 num = 0; | |
65 num_part = num_var_partitions (map); | |
66 | |
67 /* If a base table already exists, clear it, otherwise create it. */ | |
68 if (map->partition_to_base_index != NULL) | |
69 { | |
70 free (map->partition_to_base_index); | |
71 VEC_truncate (tree, map->basevars, 0); | |
72 } | |
73 else | |
74 map->basevars = VEC_alloc (tree, heap, MAX (40, (num_part / 10))); | |
75 | |
76 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part); | |
77 | |
78 /* Build the base variable list, and point partitions at their bases. */ | |
79 for (x = 0; x < num_part; x++) | |
80 { | |
81 var = partition_to_var (map, x); | |
82 if (TREE_CODE (var) == SSA_NAME) | |
83 var = SSA_NAME_VAR (var); | |
84 ann = var_ann (var); | |
85 /* If base variable hasn't been seen, set it up. */ | |
86 if (!ann->base_var_processed) | |
87 { | |
88 ann->base_var_processed = 1; | |
89 VAR_ANN_BASE_INDEX (ann) = num++; | |
90 VEC_safe_push (tree, heap, map->basevars, var); | |
91 } | |
92 map->partition_to_base_index[x] = VAR_ANN_BASE_INDEX (ann); | |
93 } | |
94 | |
95 map->num_basevars = num; | |
96 | |
97 /* Now clear the processed bit. */ | |
98 for (x = 0; x < num; x++) | |
99 { | |
100 var = VEC_index (tree, map->basevars, x); | |
101 var_ann (var)->base_var_processed = 0; | |
102 } | |
103 | |
104 #ifdef ENABLE_CHECKING | |
105 for (x = 0; x < num_part; x++) | |
106 { | |
107 tree var2; | |
108 var = SSA_NAME_VAR (partition_to_var (map, x)); | |
109 var2 = VEC_index (tree, map->basevars, basevar_index (map, x)); | |
110 gcc_assert (var == var2); | |
111 } | |
112 #endif | |
113 } | |
114 | |
115 | |
116 /* Remove the base table in MAP. */ | |
117 | |
118 static void | |
119 var_map_base_fini (var_map map) | |
120 { | |
121 /* Free the basevar info if it is present. */ | |
122 if (map->partition_to_base_index != NULL) | |
123 { | |
124 VEC_free (tree, heap, map->basevars); | |
125 free (map->partition_to_base_index); | |
126 map->partition_to_base_index = NULL; | |
127 map->num_basevars = 0; | |
128 } | |
129 } | |
130 /* Create a variable partition map of SIZE, initialize and return it. */ | |
131 | |
132 var_map | |
133 init_var_map (int size) | |
134 { | |
135 var_map map; | |
136 | |
137 map = (var_map) xmalloc (sizeof (struct _var_map)); | |
138 map->var_partition = partition_new (size); | |
139 map->partition_to_var | |
140 = (tree *)xmalloc (size * sizeof (tree)); | |
141 memset (map->partition_to_var, 0, size * sizeof (tree)); | |
142 | |
143 map->partition_to_view = NULL; | |
144 map->view_to_partition = NULL; | |
145 map->num_partitions = size; | |
146 map->partition_size = size; | |
147 map->num_basevars = 0; | |
148 map->partition_to_base_index = NULL; | |
149 map->basevars = NULL; | |
150 return map; | |
151 } | |
152 | |
153 | |
154 /* Free memory associated with MAP. */ | |
155 | |
156 void | |
157 delete_var_map (var_map map) | |
158 { | |
159 var_map_base_fini (map); | |
160 free (map->partition_to_var); | |
161 partition_delete (map->var_partition); | |
162 if (map->partition_to_view) | |
163 free (map->partition_to_view); | |
164 if (map->view_to_partition) | |
165 free (map->view_to_partition); | |
166 free (map); | |
167 } | |
168 | |
169 | |
170 /* This function will combine the partitions in MAP for VAR1 and VAR2. It | |
171 Returns the partition which represents the new partition. If the two | |
172 partitions cannot be combined, NO_PARTITION is returned. */ | |
173 | |
174 int | |
175 var_union (var_map map, tree var1, tree var2) | |
176 { | |
177 int p1, p2, p3; | |
178 tree root_var = NULL_TREE; | |
179 tree other_var = NULL_TREE; | |
180 | |
181 /* This is independent of partition_to_view. If partition_to_view is | |
182 on, then whichever one of these partitions is absorbed will never have a | |
183 dereference into the partition_to_view array any more. */ | |
184 | |
185 if (TREE_CODE (var1) == SSA_NAME) | |
186 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1)); | |
187 else | |
188 { | |
189 p1 = var_to_partition (map, var1); | |
190 if (map->view_to_partition) | |
191 p1 = map->view_to_partition[p1]; | |
192 root_var = var1; | |
193 } | |
194 | |
195 if (TREE_CODE (var2) == SSA_NAME) | |
196 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2)); | |
197 else | |
198 { | |
199 p2 = var_to_partition (map, var2); | |
200 if (map->view_to_partition) | |
201 p2 = map->view_to_partition[p2]; | |
202 | |
203 /* If there is no root_var set, or it's not a user variable, set the | |
204 root_var to this one. */ | |
205 if (!root_var || (DECL_P (root_var) && DECL_IGNORED_P (root_var))) | |
206 { | |
207 other_var = root_var; | |
208 root_var = var2; | |
209 } | |
210 else | |
211 other_var = var2; | |
212 } | |
213 | |
214 gcc_assert (p1 != NO_PARTITION); | |
215 gcc_assert (p2 != NO_PARTITION); | |
216 | |
217 if (p1 == p2) | |
218 p3 = p1; | |
219 else | |
220 p3 = partition_union (map->var_partition, p1, p2); | |
221 | |
222 if (map->partition_to_view) | |
223 p3 = map->partition_to_view[p3]; | |
224 | |
225 if (root_var) | |
226 change_partition_var (map, root_var, p3); | |
227 if (other_var) | |
228 change_partition_var (map, other_var, p3); | |
229 | |
230 return p3; | |
231 } | |
232 | |
233 | |
234 /* Compress the partition numbers in MAP such that they fall in the range | |
235 0..(num_partitions-1) instead of wherever they turned out during | |
236 the partitioning exercise. This removes any references to unused | |
237 partitions, thereby allowing bitmaps and other vectors to be much | |
238 denser. | |
239 | |
240 This is implemented such that compaction doesn't affect partitioning. | |
241 Ie., once partitions are created and possibly merged, running one | |
242 or more different kind of compaction will not affect the partitions | |
243 themselves. Their index might change, but all the same variables will | |
244 still be members of the same partition group. This allows work on reduced | |
245 sets, and no loss of information when a larger set is later desired. | |
246 | |
247 In particular, coalescing can work on partitions which have 2 or more | |
248 definitions, and then 'recompact' later to include all the single | |
249 definitions for assignment to program variables. */ | |
250 | |
251 | |
252 /* Set MAP back to the initial state of having no partition view. Return a | |
253 bitmap which has a bit set for each partition number which is in use in the | |
254 varmap. */ | |
255 | |
256 static bitmap | |
257 partition_view_init (var_map map) | |
258 { | |
259 bitmap used; | |
260 int tmp; | |
261 unsigned int x; | |
262 | |
263 used = BITMAP_ALLOC (NULL); | |
264 | |
265 /* Already in a view? Abandon the old one. */ | |
266 if (map->partition_to_view) | |
267 { | |
268 free (map->partition_to_view); | |
269 map->partition_to_view = NULL; | |
270 } | |
271 if (map->view_to_partition) | |
272 { | |
273 free (map->view_to_partition); | |
274 map->view_to_partition = NULL; | |
275 } | |
276 | |
277 /* Find out which partitions are actually referenced. */ | |
278 for (x = 0; x < map->partition_size; x++) | |
279 { | |
280 tmp = partition_find (map->var_partition, x); | |
281 if (map->partition_to_var[tmp] != NULL_TREE && !bitmap_bit_p (used, tmp)) | |
282 bitmap_set_bit (used, tmp); | |
283 } | |
284 | |
285 map->num_partitions = map->partition_size; | |
286 return used; | |
287 } | |
288 | |
289 | |
290 /* This routine will finalize the view data for MAP based on the partitions | |
291 set in SELECTED. This is either the same bitmap returned from | |
292 partition_view_init, or a trimmed down version if some of those partitions | |
293 were not desired in this view. SELECTED is freed before returning. */ | |
294 | |
295 static void | |
296 partition_view_fini (var_map map, bitmap selected) | |
297 { | |
298 bitmap_iterator bi; | |
299 unsigned count, i, x, limit; | |
300 tree var; | |
301 | |
302 gcc_assert (selected); | |
303 | |
304 count = bitmap_count_bits (selected); | |
305 limit = map->partition_size; | |
306 | |
307 /* If its a one-to-one ratio, we don't need any view compaction. */ | |
308 if (count < limit) | |
309 { | |
310 map->partition_to_view = (int *)xmalloc (limit * sizeof (int)); | |
311 memset (map->partition_to_view, 0xff, (limit * sizeof (int))); | |
312 map->view_to_partition = (int *)xmalloc (count * sizeof (int)); | |
313 | |
314 i = 0; | |
315 /* Give each selected partition an index. */ | |
316 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi) | |
317 { | |
318 map->partition_to_view[x] = i; | |
319 map->view_to_partition[i] = x; | |
320 var = map->partition_to_var[x]; | |
321 /* If any one of the members of a partition is not an SSA_NAME, make | |
322 sure it is the representative. */ | |
323 if (TREE_CODE (var) != SSA_NAME) | |
324 change_partition_var (map, var, i); | |
325 i++; | |
326 } | |
327 gcc_assert (i == count); | |
328 map->num_partitions = i; | |
329 } | |
330 | |
331 BITMAP_FREE (selected); | |
332 } | |
333 | |
334 | |
335 /* Create a partition view which includes all the used partitions in MAP. If | |
336 WANT_BASES is true, create the base variable map as well. */ | |
337 | |
338 extern void | |
339 partition_view_normal (var_map map, bool want_bases) | |
340 { | |
341 bitmap used; | |
342 | |
343 used = partition_view_init (map); | |
344 partition_view_fini (map, used); | |
345 | |
346 if (want_bases) | |
347 var_map_base_init (map); | |
348 else | |
349 var_map_base_fini (map); | |
350 } | |
351 | |
352 | |
353 /* Create a partition view in MAP which includes just partitions which occur in | |
354 the bitmap ONLY. If WANT_BASES is true, create the base variable map | |
355 as well. */ | |
356 | |
357 extern void | |
358 partition_view_bitmap (var_map map, bitmap only, bool want_bases) | |
359 { | |
360 bitmap used; | |
361 bitmap new_partitions = BITMAP_ALLOC (NULL); | |
362 unsigned x, p; | |
363 bitmap_iterator bi; | |
364 | |
365 used = partition_view_init (map); | |
366 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi) | |
367 { | |
368 p = partition_find (map->var_partition, x); | |
369 gcc_assert (bitmap_bit_p (used, p)); | |
370 bitmap_set_bit (new_partitions, p); | |
371 } | |
372 partition_view_fini (map, new_partitions); | |
373 | |
374 BITMAP_FREE (used); | |
375 if (want_bases) | |
376 var_map_base_init (map); | |
377 else | |
378 var_map_base_fini (map); | |
379 } | |
380 | |
381 | |
382 /* This function is used to change the representative variable in MAP for VAR's | |
383 partition to a regular non-ssa variable. This allows partitions to be | |
384 mapped back to real variables. */ | |
385 | |
386 void | |
387 change_partition_var (var_map map, tree var, int part) | |
388 { | |
389 var_ann_t ann; | |
390 | |
391 gcc_assert (TREE_CODE (var) != SSA_NAME); | |
392 | |
393 ann = var_ann (var); | |
394 ann->out_of_ssa_tag = 1; | |
395 VAR_ANN_PARTITION (ann) = part; | |
396 if (map->view_to_partition) | |
397 map->partition_to_var[map->view_to_partition[part]] = var; | |
398 } | |
399 | |
400 | |
401 static inline void mark_all_vars_used (tree *, void *data); | |
402 | |
403 /* Helper function for mark_all_vars_used, called via walk_tree. */ | |
404 | |
405 static tree | |
406 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data) | |
407 { | |
408 tree t = *tp; | |
409 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); | |
410 tree b; | |
411 | |
412 if (TREE_CODE (t) == SSA_NAME) | |
413 t = SSA_NAME_VAR (t); | |
414 | |
415 if (IS_EXPR_CODE_CLASS (c) | |
416 && (b = TREE_BLOCK (t)) != NULL) | |
417 TREE_USED (b) = true; | |
418 | |
419 /* Ignore TREE_ORIGINAL for TARGET_MEM_REFS, as well as other | |
420 fields that do not contain vars. */ | |
421 if (TREE_CODE (t) == TARGET_MEM_REF) | |
422 { | |
423 mark_all_vars_used (&TMR_SYMBOL (t), data); | |
424 mark_all_vars_used (&TMR_BASE (t), data); | |
425 mark_all_vars_used (&TMR_INDEX (t), data); | |
426 *walk_subtrees = 0; | |
427 return NULL; | |
428 } | |
429 | |
430 /* Only need to mark VAR_DECLS; parameters and return results are not | |
431 eliminated as unused. */ | |
432 if (TREE_CODE (t) == VAR_DECL) | |
433 { | |
434 if (data != NULL && bitmap_bit_p ((bitmap) data, DECL_UID (t))) | |
435 { | |
436 bitmap_clear_bit ((bitmap) data, DECL_UID (t)); | |
437 mark_all_vars_used (&DECL_INITIAL (t), data); | |
438 } | |
439 set_is_used (t); | |
440 } | |
441 | |
442 if (IS_TYPE_OR_DECL_P (t)) | |
443 *walk_subtrees = 0; | |
444 | |
445 return NULL; | |
446 } | |
447 | |
448 /* Mark the scope block SCOPE and its subblocks unused when they can be | |
449 possibly eliminated if dead. */ | |
450 | |
451 static void | |
452 mark_scope_block_unused (tree scope) | |
453 { | |
454 tree t; | |
455 TREE_USED (scope) = false; | |
456 if (!(*debug_hooks->ignore_block) (scope)) | |
457 TREE_USED (scope) = true; | |
458 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t)) | |
459 mark_scope_block_unused (t); | |
460 } | |
461 | |
462 /* Look if the block is dead (by possibly eliminating its dead subblocks) | |
463 and return true if so. | |
464 Block is declared dead if: | |
465 1) No statements are associated with it. | |
466 2) Declares no live variables | |
467 3) All subblocks are dead | |
468 or there is precisely one subblocks and the block | |
469 has same abstract origin as outer block and declares | |
470 no variables, so it is pure wrapper. | |
471 When we are not outputting full debug info, we also eliminate dead variables | |
472 out of scope blocks to let them to be recycled by GGC and to save copying work | |
473 done by the inliner. */ | |
474 | |
475 static bool | |
476 remove_unused_scope_block_p (tree scope) | |
477 { | |
478 tree *t, *next; | |
479 bool unused = !TREE_USED (scope); | |
480 var_ann_t ann; | |
481 int nsubblocks = 0; | |
482 | |
483 for (t = &BLOCK_VARS (scope); *t; t = next) | |
484 { | |
485 next = &TREE_CHAIN (*t); | |
486 | |
487 /* Debug info of nested function refers to the block of the | |
488 function. We might stil call it even if all statements | |
489 of function it was nested into was elliminated. | |
490 | |
491 TODO: We can actually look into cgraph to see if function | |
492 will be output to file. */ | |
493 if (TREE_CODE (*t) == FUNCTION_DECL) | |
494 unused = false; | |
495 /* Remove everything we don't generate debug info for. */ | |
496 else if (DECL_IGNORED_P (*t)) | |
497 { | |
498 *t = TREE_CHAIN (*t); | |
499 next = t; | |
500 } | |
501 | |
502 /* When we are outputting debug info, we usually want to output | |
503 info about optimized-out variables in the scope blocks. | |
504 Exception are the scope blocks not containing any instructions | |
505 at all so user can't get into the scopes at first place. */ | |
506 else if ((ann = var_ann (*t)) != NULL | |
507 && ann->used) | |
508 unused = false; | |
509 | |
510 /* When we are not doing full debug info, we however can keep around | |
511 only the used variables for cfgexpand's memory packing saving quite | |
512 a lot of memory. | |
513 | |
514 For sake of -g3, we keep around those vars but we don't count this as | |
515 use of block, so innermost block with no used vars and no instructions | |
516 can be considered dead. We only want to keep around blocks user can | |
517 breakpoint into and ask about value of optimized out variables. | |
518 | |
519 Similarly we need to keep around types at least until all variables of | |
520 all nested blocks are gone. We track no information on whether given | |
521 type is used or not. */ | |
522 | |
523 else if (debug_info_level == DINFO_LEVEL_NORMAL | |
524 || debug_info_level == DINFO_LEVEL_VERBOSE | |
525 /* Removing declarations before inlining is going to affect | |
526 DECL_UID that in turn is going to affect hashtables and | |
527 code generation. */ | |
528 || !cfun->after_inlining) | |
529 ; | |
530 else | |
531 { | |
532 *t = TREE_CHAIN (*t); | |
533 next = t; | |
534 } | |
535 } | |
536 | |
537 for (t = &BLOCK_SUBBLOCKS (scope); *t ;) | |
538 if (remove_unused_scope_block_p (*t)) | |
539 { | |
540 if (BLOCK_SUBBLOCKS (*t)) | |
541 { | |
542 tree next = BLOCK_CHAIN (*t); | |
543 tree supercontext = BLOCK_SUPERCONTEXT (*t); | |
544 | |
545 *t = BLOCK_SUBBLOCKS (*t); | |
546 while (BLOCK_CHAIN (*t)) | |
547 { | |
548 BLOCK_SUPERCONTEXT (*t) = supercontext; | |
549 t = &BLOCK_CHAIN (*t); | |
550 } | |
551 BLOCK_CHAIN (*t) = next; | |
552 BLOCK_SUPERCONTEXT (*t) = supercontext; | |
553 t = &BLOCK_CHAIN (*t); | |
554 nsubblocks ++; | |
555 } | |
556 else | |
557 *t = BLOCK_CHAIN (*t); | |
558 } | |
559 else | |
560 { | |
561 t = &BLOCK_CHAIN (*t); | |
562 nsubblocks ++; | |
563 } | |
564 | |
565 | |
566 if (!unused) | |
567 ; | |
568 /* Outer scope is always used. */ | |
569 else if (!BLOCK_SUPERCONTEXT (scope) | |
570 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL) | |
571 unused = false; | |
572 /* Innermost blocks with no live variables nor statements can be always | |
573 eliminated. */ | |
574 else if (!nsubblocks) | |
575 ; | |
576 /* If there are live subblocks and we still have some unused variables | |
577 or types declared, we must keep them. | |
578 Before inliing we must not depend on debug info verbosity to keep | |
579 DECL_UIDs stable. */ | |
580 else if (!cfun->after_inlining && BLOCK_VARS (scope)) | |
581 unused = false; | |
582 /* For terse debug info we can eliminate info on unused variables. */ | |
583 else if (debug_info_level == DINFO_LEVEL_NONE | |
584 || debug_info_level == DINFO_LEVEL_TERSE) | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
585 { |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
586 /* Even for -g0/-g1 don't prune outer scopes from artificial |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
587 functions, otherwise diagnostics using tree_nonartificial_location |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
588 will not be emitted properly. */ |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
589 if (inlined_function_outer_scope_p (scope)) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
590 { |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
591 tree ao = scope; |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
592 |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
593 while (ao |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
594 && TREE_CODE (ao) == BLOCK |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
595 && BLOCK_ABSTRACT_ORIGIN (ao) != ao) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
596 ao = BLOCK_ABSTRACT_ORIGIN (ao); |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
597 if (ao |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
598 && TREE_CODE (ao) == FUNCTION_DECL |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
599 && DECL_DECLARED_INLINE_P (ao) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
600 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao))) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
601 unused = false; |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
602 } |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
603 } |
0 | 604 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope)) |
605 unused = false; | |
606 /* See if this block is important for representation of inlined function. | |
607 Inlined functions are always represented by block with | |
608 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION | |
609 set... */ | |
610 else if (inlined_function_outer_scope_p (scope)) | |
611 unused = false; | |
612 else | |
613 /* Verfify that only blocks with source location set | |
614 are entry points to the inlined functions. */ | |
615 gcc_assert (BLOCK_SOURCE_LOCATION (scope) == UNKNOWN_LOCATION); | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
616 |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
617 TREE_USED (scope) = !unused; |
0 | 618 return unused; |
619 } | |
620 | |
621 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be | |
622 eliminated during the tree->rtl conversion process. */ | |
623 | |
624 static inline void | |
625 mark_all_vars_used (tree *expr_p, void *data) | |
626 { | |
627 walk_tree (expr_p, mark_all_vars_used_1, data, NULL); | |
628 } | |
629 | |
630 /* Dump scope blocks. */ | |
631 | |
632 static void | |
633 dump_scope_block (FILE *file, int indent, tree scope, int flags) | |
634 { | |
635 tree var, t; | |
636 unsigned int i; | |
637 | |
638 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope), | |
639 TREE_USED (scope) ? "" : " (unused)", | |
640 BLOCK_ABSTRACT (scope) ? " (abstract)": ""); | |
641 if (BLOCK_SOURCE_LOCATION (scope) != UNKNOWN_LOCATION) | |
642 { | |
643 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope)); | |
644 fprintf (file, " %s:%i", s.file, s.line); | |
645 } | |
646 if (BLOCK_ABSTRACT_ORIGIN (scope)) | |
647 { | |
648 tree origin = block_ultimate_origin (scope); | |
649 if (origin) | |
650 { | |
651 fprintf (file, " Originating from :"); | |
652 if (DECL_P (origin)) | |
653 print_generic_decl (file, origin, flags); | |
654 else | |
655 fprintf (file, "#%i", BLOCK_NUMBER (origin)); | |
656 } | |
657 } | |
658 fprintf (file, " \n"); | |
659 for (var = BLOCK_VARS (scope); var; var = TREE_CHAIN (var)) | |
660 { | |
661 bool used = false; | |
662 var_ann_t ann; | |
663 | |
664 if ((ann = var_ann (var)) | |
665 && ann->used) | |
666 used = true; | |
667 | |
668 fprintf (file, "%*s",indent, ""); | |
669 print_generic_decl (file, var, flags); | |
670 fprintf (file, "%s\n", used ? "" : " (unused)"); | |
671 } | |
672 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++) | |
673 { | |
674 fprintf (file, "%*s",indent, ""); | |
675 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i), | |
676 flags); | |
677 fprintf (file, " (nonlocalized)\n"); | |
678 } | |
679 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t)) | |
680 dump_scope_block (file, indent + 2, t, flags); | |
681 fprintf (file, "\n%*s}\n",indent, ""); | |
682 } | |
683 | |
684 void | |
685 dump_scope_blocks (FILE *file, int flags) | |
686 { | |
687 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags); | |
688 } | |
689 | |
690 /* Remove local variables that are not referenced in the IL. */ | |
691 | |
692 void | |
693 remove_unused_locals (void) | |
694 { | |
695 basic_block bb; | |
696 tree t, *cell; | |
697 referenced_var_iterator rvi; | |
698 var_ann_t ann; | |
699 bitmap global_unused_vars = NULL; | |
700 | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
701 /* Removing declarations from lexical blocks when not optimizing is |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
702 not only a waste of time, it actually causes differences in stack |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
703 layout. */ |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
704 if (!optimize) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
705 return; |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
706 |
0 | 707 mark_scope_block_unused (DECL_INITIAL (current_function_decl)); |
708 | |
709 /* Assume all locals are unused. */ | |
710 FOR_EACH_REFERENCED_VAR (t, rvi) | |
711 var_ann (t)->used = false; | |
712 | |
713 /* Walk the CFG marking all referenced symbols. */ | |
714 FOR_EACH_BB (bb) | |
715 { | |
716 gimple_stmt_iterator gsi; | |
717 size_t i; | |
718 edge_iterator ei; | |
719 edge e; | |
720 | |
721 /* Walk the statements. */ | |
722 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
723 { | |
724 gimple stmt = gsi_stmt (gsi); | |
725 tree b = gimple_block (stmt); | |
726 | |
727 if (b) | |
728 TREE_USED (b) = true; | |
729 | |
730 for (i = 0; i < gimple_num_ops (stmt); i++) | |
731 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i), NULL); | |
732 } | |
733 | |
734 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
735 { | |
736 use_operand_p arg_p; | |
737 ssa_op_iter i; | |
738 tree def; | |
739 gimple phi = gsi_stmt (gsi); | |
740 | |
741 /* No point processing globals. */ | |
742 if (is_global_var (SSA_NAME_VAR (gimple_phi_result (phi)))) | |
743 continue; | |
744 | |
745 def = gimple_phi_result (phi); | |
746 mark_all_vars_used (&def, NULL); | |
747 | |
748 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES) | |
749 { | |
750 tree arg = USE_FROM_PTR (arg_p); | |
751 mark_all_vars_used (&arg, NULL); | |
752 } | |
753 } | |
754 | |
755 FOR_EACH_EDGE (e, ei, bb->succs) | |
756 if (e->goto_locus) | |
757 TREE_USED (e->goto_block) = true; | |
758 } | |
759 | |
760 cfun->has_local_explicit_reg_vars = false; | |
761 | |
762 /* Remove unmarked local vars from local_decls. */ | |
763 for (cell = &cfun->local_decls; *cell; ) | |
764 { | |
765 tree var = TREE_VALUE (*cell); | |
766 | |
767 if (TREE_CODE (var) != FUNCTION_DECL | |
768 && (!(ann = var_ann (var)) | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
769 || !ann->used)) |
0 | 770 { |
771 if (is_global_var (var)) | |
772 { | |
773 if (global_unused_vars == NULL) | |
774 global_unused_vars = BITMAP_ALLOC (NULL); | |
775 bitmap_set_bit (global_unused_vars, DECL_UID (var)); | |
776 } | |
777 else | |
778 { | |
779 *cell = TREE_CHAIN (*cell); | |
780 continue; | |
781 } | |
782 } | |
783 else if (TREE_CODE (var) == VAR_DECL | |
784 && DECL_HARD_REGISTER (var) | |
785 && !is_global_var (var)) | |
786 cfun->has_local_explicit_reg_vars = true; | |
787 cell = &TREE_CHAIN (*cell); | |
788 } | |
789 | |
790 /* Remove unmarked global vars from local_decls. */ | |
791 if (global_unused_vars != NULL) | |
792 { | |
793 for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | |
794 { | |
795 tree var = TREE_VALUE (t); | |
796 | |
797 if (TREE_CODE (var) == VAR_DECL | |
798 && is_global_var (var) | |
799 && (ann = var_ann (var)) != NULL | |
800 && ann->used) | |
801 mark_all_vars_used (&DECL_INITIAL (var), global_unused_vars); | |
802 } | |
803 | |
804 for (cell = &cfun->local_decls; *cell; ) | |
805 { | |
806 tree var = TREE_VALUE (*cell); | |
807 | |
808 if (TREE_CODE (var) == VAR_DECL | |
809 && is_global_var (var) | |
810 && bitmap_bit_p (global_unused_vars, DECL_UID (var))) | |
811 *cell = TREE_CHAIN (*cell); | |
812 else | |
813 cell = &TREE_CHAIN (*cell); | |
814 } | |
815 BITMAP_FREE (global_unused_vars); | |
816 } | |
817 | |
818 /* Remove unused variables from REFERENCED_VARs. As a special | |
819 exception keep the variables that are believed to be aliased. | |
820 Those can't be easily removed from the alias sets and operand | |
821 caches. They will be removed shortly after the next may_alias | |
822 pass is performed. */ | |
823 FOR_EACH_REFERENCED_VAR (t, rvi) | |
824 if (!is_global_var (t) | |
825 && !MTAG_P (t) | |
826 && TREE_CODE (t) != PARM_DECL | |
827 && TREE_CODE (t) != RESULT_DECL | |
828 && !(ann = var_ann (t))->used | |
829 && !ann->symbol_mem_tag | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
830 && !TREE_ADDRESSABLE (t)) |
0 | 831 remove_referenced_var (t); |
832 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl)); | |
833 if (dump_file && (dump_flags & TDF_DETAILS)) | |
834 { | |
835 fprintf (dump_file, "Scope blocks after cleanups:\n"); | |
836 dump_scope_blocks (dump_file, dump_flags); | |
837 } | |
838 } | |
839 | |
840 | |
841 /* Allocate and return a new live range information object base on MAP. */ | |
842 | |
843 static tree_live_info_p | |
844 new_tree_live_info (var_map map) | |
845 { | |
846 tree_live_info_p live; | |
847 unsigned x; | |
848 | |
849 live = (tree_live_info_p) xmalloc (sizeof (struct tree_live_info_d)); | |
850 live->map = map; | |
851 live->num_blocks = last_basic_block; | |
852 | |
853 live->livein = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap)); | |
854 for (x = 0; x < (unsigned)last_basic_block; x++) | |
855 live->livein[x] = BITMAP_ALLOC (NULL); | |
856 | |
857 live->liveout = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap)); | |
858 for (x = 0; x < (unsigned)last_basic_block; x++) | |
859 live->liveout[x] = BITMAP_ALLOC (NULL); | |
860 | |
861 live->work_stack = XNEWVEC (int, last_basic_block); | |
862 live->stack_top = live->work_stack; | |
863 | |
864 live->global = BITMAP_ALLOC (NULL); | |
865 return live; | |
866 } | |
867 | |
868 | |
869 /* Free storage for live range info object LIVE. */ | |
870 | |
871 void | |
872 delete_tree_live_info (tree_live_info_p live) | |
873 { | |
874 int x; | |
875 | |
876 BITMAP_FREE (live->global); | |
877 free (live->work_stack); | |
878 | |
879 for (x = live->num_blocks - 1; x >= 0; x--) | |
880 BITMAP_FREE (live->liveout[x]); | |
881 free (live->liveout); | |
882 | |
883 for (x = live->num_blocks - 1; x >= 0; x--) | |
884 BITMAP_FREE (live->livein[x]); | |
885 free (live->livein); | |
886 | |
887 free (live); | |
888 } | |
889 | |
890 | |
891 /* Visit basic block BB and propagate any required live on entry bits from | |
892 LIVE into the predecessors. VISITED is the bitmap of visited blocks. | |
893 TMP is a temporary work bitmap which is passed in to avoid reallocating | |
894 it each time. */ | |
895 | |
896 static void | |
897 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited, | |
898 bitmap tmp) | |
899 { | |
900 edge e; | |
901 bool change; | |
902 edge_iterator ei; | |
903 basic_block pred_bb; | |
904 bitmap loe; | |
905 gcc_assert (!TEST_BIT (visited, bb->index)); | |
906 | |
907 SET_BIT (visited, bb->index); | |
908 loe = live_on_entry (live, bb); | |
909 | |
910 FOR_EACH_EDGE (e, ei, bb->preds) | |
911 { | |
912 pred_bb = e->src; | |
913 if (pred_bb == ENTRY_BLOCK_PTR) | |
914 continue; | |
915 /* TMP is variables live-on-entry from BB that aren't defined in the | |
916 predecessor block. This should be the live on entry vars to pred. | |
917 Note that liveout is the DEFs in a block while live on entry is | |
918 being calculated. */ | |
919 bitmap_and_compl (tmp, loe, live->liveout[pred_bb->index]); | |
920 | |
921 /* Add these bits to live-on-entry for the pred. if there are any | |
922 changes, and pred_bb has been visited already, add it to the | |
923 revisit stack. */ | |
924 change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp); | |
925 if (TEST_BIT (visited, pred_bb->index) && change) | |
926 { | |
927 RESET_BIT (visited, pred_bb->index); | |
928 *(live->stack_top)++ = pred_bb->index; | |
929 } | |
930 } | |
931 } | |
932 | |
933 | |
934 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses | |
935 of all the variables. */ | |
936 | |
937 static void | |
938 live_worklist (tree_live_info_p live) | |
939 { | |
940 unsigned b; | |
941 basic_block bb; | |
942 sbitmap visited = sbitmap_alloc (last_basic_block + 1); | |
943 bitmap tmp = BITMAP_ALLOC (NULL); | |
944 | |
945 sbitmap_zero (visited); | |
946 | |
947 /* Visit all the blocks in reverse order and propagate live on entry values | |
948 into the predecessors blocks. */ | |
949 FOR_EACH_BB_REVERSE (bb) | |
950 loe_visit_block (live, bb, visited, tmp); | |
951 | |
952 /* Process any blocks which require further iteration. */ | |
953 while (live->stack_top != live->work_stack) | |
954 { | |
955 b = *--(live->stack_top); | |
956 loe_visit_block (live, BASIC_BLOCK (b), visited, tmp); | |
957 } | |
958 | |
959 BITMAP_FREE (tmp); | |
960 sbitmap_free (visited); | |
961 } | |
962 | |
963 | |
964 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use | |
965 links. Set the live on entry fields in LIVE. Def's are marked temporarily | |
966 in the liveout vector. */ | |
967 | |
968 static void | |
969 set_var_live_on_entry (tree ssa_name, tree_live_info_p live) | |
970 { | |
971 int p; | |
972 gimple stmt; | |
973 use_operand_p use; | |
974 basic_block def_bb = NULL; | |
975 imm_use_iterator imm_iter; | |
976 bool global = false; | |
977 | |
978 p = var_to_partition (live->map, ssa_name); | |
979 if (p == NO_PARTITION) | |
980 return; | |
981 | |
982 stmt = SSA_NAME_DEF_STMT (ssa_name); | |
983 if (stmt) | |
984 { | |
985 def_bb = gimple_bb (stmt); | |
986 /* Mark defs in liveout bitmap temporarily. */ | |
987 if (def_bb) | |
988 bitmap_set_bit (live->liveout[def_bb->index], p); | |
989 } | |
990 else | |
991 def_bb = ENTRY_BLOCK_PTR; | |
992 | |
993 /* Visit each use of SSA_NAME and if it isn't in the same block as the def, | |
994 add it to the list of live on entry blocks. */ | |
995 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name) | |
996 { | |
997 gimple use_stmt = USE_STMT (use); | |
998 basic_block add_block = NULL; | |
999 | |
1000 if (gimple_code (use_stmt) == GIMPLE_PHI) | |
1001 { | |
1002 /* Uses in PHI's are considered to be live at exit of the SRC block | |
1003 as this is where a copy would be inserted. Check to see if it is | |
1004 defined in that block, or whether its live on entry. */ | |
1005 int index = PHI_ARG_INDEX_FROM_USE (use); | |
1006 edge e = gimple_phi_arg_edge (use_stmt, index); | |
1007 if (e->src != ENTRY_BLOCK_PTR) | |
1008 { | |
1009 if (e->src != def_bb) | |
1010 add_block = e->src; | |
1011 } | |
1012 } | |
1013 else | |
1014 { | |
1015 /* If its not defined in this block, its live on entry. */ | |
1016 basic_block use_bb = gimple_bb (use_stmt); | |
1017 if (use_bb != def_bb) | |
1018 add_block = use_bb; | |
1019 } | |
1020 | |
1021 /* If there was a live on entry use, set the bit. */ | |
1022 if (add_block) | |
1023 { | |
1024 global = true; | |
1025 bitmap_set_bit (live->livein[add_block->index], p); | |
1026 } | |
1027 } | |
1028 | |
1029 /* If SSA_NAME is live on entry to at least one block, fill in all the live | |
1030 on entry blocks between the def and all the uses. */ | |
1031 if (global) | |
1032 bitmap_set_bit (live->global, p); | |
1033 } | |
1034 | |
1035 | |
1036 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */ | |
1037 | |
1038 void | |
1039 calculate_live_on_exit (tree_live_info_p liveinfo) | |
1040 { | |
1041 basic_block bb; | |
1042 edge e; | |
1043 edge_iterator ei; | |
1044 | |
1045 /* live on entry calculations used liveout vectors for defs, clear them. */ | |
1046 FOR_EACH_BB (bb) | |
1047 bitmap_clear (liveinfo->liveout[bb->index]); | |
1048 | |
1049 /* Set all the live-on-exit bits for uses in PHIs. */ | |
1050 FOR_EACH_BB (bb) | |
1051 { | |
1052 gimple_stmt_iterator gsi; | |
1053 size_t i; | |
1054 | |
1055 /* Mark the PHI arguments which are live on exit to the pred block. */ | |
1056 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1057 { | |
1058 gimple phi = gsi_stmt (gsi); | |
1059 for (i = 0; i < gimple_phi_num_args (phi); i++) | |
1060 { | |
1061 tree t = PHI_ARG_DEF (phi, i); | |
1062 int p; | |
1063 | |
1064 if (TREE_CODE (t) != SSA_NAME) | |
1065 continue; | |
1066 | |
1067 p = var_to_partition (liveinfo->map, t); | |
1068 if (p == NO_PARTITION) | |
1069 continue; | |
1070 e = gimple_phi_arg_edge (phi, i); | |
1071 if (e->src != ENTRY_BLOCK_PTR) | |
1072 bitmap_set_bit (liveinfo->liveout[e->src->index], p); | |
1073 } | |
1074 } | |
1075 | |
1076 /* Add each successors live on entry to this bock live on exit. */ | |
1077 FOR_EACH_EDGE (e, ei, bb->succs) | |
1078 if (e->dest != EXIT_BLOCK_PTR) | |
1079 bitmap_ior_into (liveinfo->liveout[bb->index], | |
1080 live_on_entry (liveinfo, e->dest)); | |
1081 } | |
1082 } | |
1083 | |
1084 | |
1085 /* Given partition map MAP, calculate all the live on entry bitmaps for | |
1086 each partition. Return a new live info object. */ | |
1087 | |
1088 tree_live_info_p | |
1089 calculate_live_ranges (var_map map) | |
1090 { | |
1091 tree var; | |
1092 unsigned i; | |
1093 tree_live_info_p live; | |
1094 | |
1095 live = new_tree_live_info (map); | |
1096 for (i = 0; i < num_var_partitions (map); i++) | |
1097 { | |
1098 var = partition_to_var (map, i); | |
1099 if (var != NULL_TREE) | |
1100 set_var_live_on_entry (var, live); | |
1101 } | |
1102 | |
1103 live_worklist (live); | |
1104 | |
1105 #ifdef ENABLE_CHECKING | |
1106 verify_live_on_entry (live); | |
1107 #endif | |
1108 | |
1109 calculate_live_on_exit (live); | |
1110 return live; | |
1111 } | |
1112 | |
1113 | |
1114 /* Output partition map MAP to file F. */ | |
1115 | |
1116 void | |
1117 dump_var_map (FILE *f, var_map map) | |
1118 { | |
1119 int t; | |
1120 unsigned x, y; | |
1121 int p; | |
1122 | |
1123 fprintf (f, "\nPartition map \n\n"); | |
1124 | |
1125 for (x = 0; x < map->num_partitions; x++) | |
1126 { | |
1127 if (map->view_to_partition != NULL) | |
1128 p = map->view_to_partition[x]; | |
1129 else | |
1130 p = x; | |
1131 | |
1132 if (map->partition_to_var[p] == NULL_TREE) | |
1133 continue; | |
1134 | |
1135 t = 0; | |
1136 for (y = 1; y < num_ssa_names; y++) | |
1137 { | |
1138 p = partition_find (map->var_partition, y); | |
1139 if (map->partition_to_view) | |
1140 p = map->partition_to_view[p]; | |
1141 if (p == (int)x) | |
1142 { | |
1143 if (t++ == 0) | |
1144 { | |
1145 fprintf(f, "Partition %d (", x); | |
1146 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM); | |
1147 fprintf (f, " - "); | |
1148 } | |
1149 fprintf (f, "%d ", y); | |
1150 } | |
1151 } | |
1152 if (t != 0) | |
1153 fprintf (f, ")\n"); | |
1154 } | |
1155 fprintf (f, "\n"); | |
1156 } | |
1157 | |
1158 | |
1159 /* Output live range info LIVE to file F, controlled by FLAG. */ | |
1160 | |
1161 void | |
1162 dump_live_info (FILE *f, tree_live_info_p live, int flag) | |
1163 { | |
1164 basic_block bb; | |
1165 unsigned i; | |
1166 var_map map = live->map; | |
1167 bitmap_iterator bi; | |
1168 | |
1169 if ((flag & LIVEDUMP_ENTRY) && live->livein) | |
1170 { | |
1171 FOR_EACH_BB (bb) | |
1172 { | |
1173 fprintf (f, "\nLive on entry to BB%d : ", bb->index); | |
1174 EXECUTE_IF_SET_IN_BITMAP (live->livein[bb->index], 0, i, bi) | |
1175 { | |
1176 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM); | |
1177 fprintf (f, " "); | |
1178 } | |
1179 fprintf (f, "\n"); | |
1180 } | |
1181 } | |
1182 | |
1183 if ((flag & LIVEDUMP_EXIT) && live->liveout) | |
1184 { | |
1185 FOR_EACH_BB (bb) | |
1186 { | |
1187 fprintf (f, "\nLive on exit from BB%d : ", bb->index); | |
1188 EXECUTE_IF_SET_IN_BITMAP (live->liveout[bb->index], 0, i, bi) | |
1189 { | |
1190 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM); | |
1191 fprintf (f, " "); | |
1192 } | |
1193 fprintf (f, "\n"); | |
1194 } | |
1195 } | |
1196 } | |
1197 | |
1198 | |
1199 #ifdef ENABLE_CHECKING | |
1200 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */ | |
1201 | |
1202 void | |
1203 register_ssa_partition_check (tree ssa_var) | |
1204 { | |
1205 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME); | |
1206 if (!is_gimple_reg (SSA_NAME_VAR (ssa_var))) | |
1207 { | |
1208 fprintf (stderr, "Illegally registering a virtual SSA name :"); | |
1209 print_generic_expr (stderr, ssa_var, TDF_SLIM); | |
1210 fprintf (stderr, " in the SSA->Normal phase.\n"); | |
1211 internal_error ("SSA corruption"); | |
1212 } | |
1213 } | |
1214 | |
1215 | |
1216 /* Verify that the info in LIVE matches the current cfg. */ | |
1217 | |
1218 static void | |
1219 verify_live_on_entry (tree_live_info_p live) | |
1220 { | |
1221 unsigned i; | |
1222 tree var; | |
1223 gimple stmt; | |
1224 basic_block bb; | |
1225 edge e; | |
1226 int num; | |
1227 edge_iterator ei; | |
1228 var_map map = live->map; | |
1229 | |
1230 /* Check for live on entry partitions and report those with a DEF in | |
1231 the program. This will typically mean an optimization has done | |
1232 something wrong. */ | |
1233 bb = ENTRY_BLOCK_PTR; | |
1234 num = 0; | |
1235 FOR_EACH_EDGE (e, ei, bb->succs) | |
1236 { | |
1237 int entry_block = e->dest->index; | |
1238 if (e->dest == EXIT_BLOCK_PTR) | |
1239 continue; | |
1240 for (i = 0; i < (unsigned)num_var_partitions (map); i++) | |
1241 { | |
1242 basic_block tmp; | |
1243 tree d; | |
1244 bitmap loe; | |
1245 var = partition_to_var (map, i); | |
1246 stmt = SSA_NAME_DEF_STMT (var); | |
1247 tmp = gimple_bb (stmt); | |
1248 d = gimple_default_def (cfun, SSA_NAME_VAR (var)); | |
1249 | |
1250 loe = live_on_entry (live, e->dest); | |
1251 if (loe && bitmap_bit_p (loe, i)) | |
1252 { | |
1253 if (!gimple_nop_p (stmt)) | |
1254 { | |
1255 num++; | |
1256 print_generic_expr (stderr, var, TDF_SLIM); | |
1257 fprintf (stderr, " is defined "); | |
1258 if (tmp) | |
1259 fprintf (stderr, " in BB%d, ", tmp->index); | |
1260 fprintf (stderr, "by:\n"); | |
1261 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM); | |
1262 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d", | |
1263 entry_block); | |
1264 fprintf (stderr, " So it appears to have multiple defs.\n"); | |
1265 } | |
1266 else | |
1267 { | |
1268 if (d != var) | |
1269 { | |
1270 num++; | |
1271 print_generic_expr (stderr, var, TDF_SLIM); | |
1272 fprintf (stderr, " is live-on-entry to BB%d ", | |
1273 entry_block); | |
1274 if (d) | |
1275 { | |
1276 fprintf (stderr, " but is not the default def of "); | |
1277 print_generic_expr (stderr, d, TDF_SLIM); | |
1278 fprintf (stderr, "\n"); | |
1279 } | |
1280 else | |
1281 fprintf (stderr, " and there is no default def.\n"); | |
1282 } | |
1283 } | |
1284 } | |
1285 else | |
1286 if (d == var) | |
1287 { | |
1288 /* The only way this var shouldn't be marked live on entry is | |
1289 if it occurs in a PHI argument of the block. */ | |
1290 size_t z; | |
1291 bool ok = false; | |
1292 gimple_stmt_iterator gsi; | |
1293 for (gsi = gsi_start_phis (e->dest); | |
1294 !gsi_end_p (gsi) && !ok; | |
1295 gsi_next (&gsi)) | |
1296 { | |
1297 gimple phi = gsi_stmt (gsi); | |
1298 for (z = 0; z < gimple_phi_num_args (phi); z++) | |
1299 if (var == gimple_phi_arg_def (phi, z)) | |
1300 { | |
1301 ok = true; | |
1302 break; | |
1303 } | |
1304 } | |
1305 if (ok) | |
1306 continue; | |
1307 num++; | |
1308 print_generic_expr (stderr, var, TDF_SLIM); | |
1309 fprintf (stderr, " is not marked live-on-entry to entry BB%d ", | |
1310 entry_block); | |
1311 fprintf (stderr, "but it is a default def so it should be.\n"); | |
1312 } | |
1313 } | |
1314 } | |
1315 gcc_assert (num <= 0); | |
1316 } | |
1317 #endif |