Mercurial > hg > CbC > CbC_gcc
comparison gcc/cfglayout.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 77e2b8dfacca |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Basic block reordering routines for the GNU compiler. | |
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 | |
3 Free Software Foundation, Inc. | |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify it under | |
8 the terms of the GNU General Public License as published by the Free | |
9 Software Foundation; either version 3, or (at your option) any later | |
10 version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "tm.h" | |
25 #include "tree.h" | |
26 #include "rtl.h" | |
27 #include "hard-reg-set.h" | |
28 #include "obstack.h" | |
29 #include "basic-block.h" | |
30 #include "insn-config.h" | |
31 #include "output.h" | |
32 #include "function.h" | |
33 #include "cfglayout.h" | |
34 #include "cfgloop.h" | |
35 #include "target.h" | |
36 #include "ggc.h" | |
37 #include "alloc-pool.h" | |
38 #include "flags.h" | |
39 #include "tree-pass.h" | |
40 #include "df.h" | |
41 #include "vecprim.h" | |
42 | |
43 /* Holds the interesting trailing notes for the function. */ | |
44 rtx cfg_layout_function_footer; | |
45 rtx cfg_layout_function_header; | |
46 | |
47 static rtx skip_insns_after_block (basic_block); | |
48 static void record_effective_endpoints (void); | |
49 static rtx label_for_bb (basic_block); | |
50 static void fixup_reorder_chain (void); | |
51 | |
52 static void change_scope (rtx, tree, tree); | |
53 | |
54 void verify_insn_chain (void); | |
55 static void fixup_fallthru_exit_predecessor (void); | |
56 static tree insn_scope (const_rtx); | |
57 | |
58 rtx | |
59 unlink_insn_chain (rtx first, rtx last) | |
60 { | |
61 rtx prevfirst = PREV_INSN (first); | |
62 rtx nextlast = NEXT_INSN (last); | |
63 | |
64 PREV_INSN (first) = NULL; | |
65 NEXT_INSN (last) = NULL; | |
66 if (prevfirst) | |
67 NEXT_INSN (prevfirst) = nextlast; | |
68 if (nextlast) | |
69 PREV_INSN (nextlast) = prevfirst; | |
70 else | |
71 set_last_insn (prevfirst); | |
72 if (!prevfirst) | |
73 set_first_insn (nextlast); | |
74 return first; | |
75 } | |
76 | |
77 /* Skip over inter-block insns occurring after BB which are typically | |
78 associated with BB (e.g., barriers). If there are any such insns, | |
79 we return the last one. Otherwise, we return the end of BB. */ | |
80 | |
81 static rtx | |
82 skip_insns_after_block (basic_block bb) | |
83 { | |
84 rtx insn, last_insn, next_head, prev; | |
85 | |
86 next_head = NULL_RTX; | |
87 if (bb->next_bb != EXIT_BLOCK_PTR) | |
88 next_head = BB_HEAD (bb->next_bb); | |
89 | |
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; ) | |
91 { | |
92 if (insn == next_head) | |
93 break; | |
94 | |
95 switch (GET_CODE (insn)) | |
96 { | |
97 case BARRIER: | |
98 last_insn = insn; | |
99 continue; | |
100 | |
101 case NOTE: | |
102 switch (NOTE_KIND (insn)) | |
103 { | |
104 case NOTE_INSN_BLOCK_END: | |
105 gcc_unreachable (); | |
106 continue; | |
107 default: | |
108 continue; | |
109 break; | |
110 } | |
111 break; | |
112 | |
113 case CODE_LABEL: | |
114 if (NEXT_INSN (insn) | |
115 && JUMP_P (NEXT_INSN (insn)) | |
116 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC | |
117 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC)) | |
118 { | |
119 insn = NEXT_INSN (insn); | |
120 last_insn = insn; | |
121 continue; | |
122 } | |
123 break; | |
124 | |
125 default: | |
126 break; | |
127 } | |
128 | |
129 break; | |
130 } | |
131 | |
132 /* It is possible to hit contradictory sequence. For instance: | |
133 | |
134 jump_insn | |
135 NOTE_INSN_BLOCK_BEG | |
136 barrier | |
137 | |
138 Where barrier belongs to jump_insn, but the note does not. This can be | |
139 created by removing the basic block originally following | |
140 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */ | |
141 | |
142 for (insn = last_insn; insn != BB_END (bb); insn = prev) | |
143 { | |
144 prev = PREV_INSN (insn); | |
145 if (NOTE_P (insn)) | |
146 switch (NOTE_KIND (insn)) | |
147 { | |
148 case NOTE_INSN_BLOCK_END: | |
149 gcc_unreachable (); | |
150 break; | |
151 case NOTE_INSN_DELETED: | |
152 case NOTE_INSN_DELETED_LABEL: | |
153 continue; | |
154 default: | |
155 reorder_insns (insn, insn, last_insn); | |
156 } | |
157 } | |
158 | |
159 return last_insn; | |
160 } | |
161 | |
162 /* Locate or create a label for a given basic block. */ | |
163 | |
164 static rtx | |
165 label_for_bb (basic_block bb) | |
166 { | |
167 rtx label = BB_HEAD (bb); | |
168 | |
169 if (!LABEL_P (label)) | |
170 { | |
171 if (dump_file) | |
172 fprintf (dump_file, "Emitting label for block %d\n", bb->index); | |
173 | |
174 label = block_label (bb); | |
175 } | |
176 | |
177 return label; | |
178 } | |
179 | |
180 /* Locate the effective beginning and end of the insn chain for each | |
181 block, as defined by skip_insns_after_block above. */ | |
182 | |
183 static void | |
184 record_effective_endpoints (void) | |
185 { | |
186 rtx next_insn; | |
187 basic_block bb; | |
188 rtx insn; | |
189 | |
190 for (insn = get_insns (); | |
191 insn | |
192 && NOTE_P (insn) | |
193 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK; | |
194 insn = NEXT_INSN (insn)) | |
195 continue; | |
196 /* No basic blocks at all? */ | |
197 gcc_assert (insn); | |
198 | |
199 if (PREV_INSN (insn)) | |
200 cfg_layout_function_header = | |
201 unlink_insn_chain (get_insns (), PREV_INSN (insn)); | |
202 else | |
203 cfg_layout_function_header = NULL_RTX; | |
204 | |
205 next_insn = get_insns (); | |
206 FOR_EACH_BB (bb) | |
207 { | |
208 rtx end; | |
209 | |
210 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb)) | |
211 bb->il.rtl->header = unlink_insn_chain (next_insn, | |
212 PREV_INSN (BB_HEAD (bb))); | |
213 end = skip_insns_after_block (bb); | |
214 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end) | |
215 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end); | |
216 next_insn = NEXT_INSN (BB_END (bb)); | |
217 } | |
218 | |
219 cfg_layout_function_footer = next_insn; | |
220 if (cfg_layout_function_footer) | |
221 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ()); | |
222 } | |
223 | |
224 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line | |
225 numbers and files. In order to be GGC friendly we need to use separate | |
226 varrays. This also slightly improve the memory locality in binary search. | |
227 The _locs array contains locators where the given property change. The | |
228 block_locators_blocks contains the scope block that is used for all insn | |
229 locator greater than corresponding block_locators_locs value and smaller | |
230 than the following one. Similarly for the other properties. */ | |
231 static VEC(int,heap) *block_locators_locs; | |
232 static GTY(()) VEC(tree,gc) *block_locators_blocks; | |
233 static VEC(int,heap) *locations_locators_locs; | |
234 DEF_VEC_O(location_t); | |
235 DEF_VEC_ALLOC_O(location_t,heap); | |
236 static VEC(location_t,heap) *locations_locators_vals; | |
237 int prologue_locator; | |
238 int epilogue_locator; | |
239 | |
240 /* Hold current location information and last location information, so the | |
241 datastructures are built lazily only when some instructions in given | |
242 place are needed. */ | |
243 location_t curr_location, last_location; | |
244 static tree curr_block, last_block; | |
245 static int curr_rtl_loc = -1; | |
246 | |
247 /* Allocate insn locator datastructure. */ | |
248 void | |
249 insn_locators_alloc (void) | |
250 { | |
251 prologue_locator = epilogue_locator = 0; | |
252 | |
253 block_locators_locs = VEC_alloc (int, heap, 32); | |
254 block_locators_blocks = VEC_alloc (tree, gc, 32); | |
255 locations_locators_locs = VEC_alloc (int, heap, 32); | |
256 locations_locators_vals = VEC_alloc (location_t, heap, 32); | |
257 | |
258 last_location = -1; | |
259 curr_location = -1; | |
260 curr_block = NULL; | |
261 last_block = NULL; | |
262 curr_rtl_loc = 0; | |
263 } | |
264 | |
265 /* At the end of emit stage, clear current location. */ | |
266 void | |
267 insn_locators_finalize (void) | |
268 { | |
269 if (curr_rtl_loc >= 0) | |
270 epilogue_locator = curr_insn_locator (); | |
271 curr_rtl_loc = -1; | |
272 } | |
273 | |
274 /* Allocate insn locator datastructure. */ | |
275 void | |
276 insn_locators_free (void) | |
277 { | |
278 prologue_locator = epilogue_locator = 0; | |
279 | |
280 VEC_free (int, heap, block_locators_locs); | |
281 VEC_free (tree,gc, block_locators_blocks); | |
282 VEC_free (int, heap, locations_locators_locs); | |
283 VEC_free (location_t, heap, locations_locators_vals); | |
284 } | |
285 | |
286 | |
287 /* Set current location. */ | |
288 void | |
289 set_curr_insn_source_location (location_t location) | |
290 { | |
291 /* IV opts calls into RTL expansion to compute costs of operations. At this | |
292 time locators are not initialized. */ | |
293 if (curr_rtl_loc == -1) | |
294 return; | |
295 if (location == last_location) | |
296 return; | |
297 curr_location = location; | |
298 } | |
299 | |
300 /* Set current scope block. */ | |
301 void | |
302 set_curr_insn_block (tree b) | |
303 { | |
304 /* IV opts calls into RTL expansion to compute costs of operations. At this | |
305 time locators are not initialized. */ | |
306 if (curr_rtl_loc == -1) | |
307 return; | |
308 if (b) | |
309 curr_block = b; | |
310 } | |
311 | |
312 /* Return current insn locator. */ | |
313 int | |
314 curr_insn_locator (void) | |
315 { | |
316 if (curr_rtl_loc == -1) | |
317 return 0; | |
318 if (last_block != curr_block) | |
319 { | |
320 curr_rtl_loc++; | |
321 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc); | |
322 VEC_safe_push (tree, gc, block_locators_blocks, curr_block); | |
323 last_block = curr_block; | |
324 } | |
325 if (last_location != curr_location) | |
326 { | |
327 curr_rtl_loc++; | |
328 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc); | |
329 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location); | |
330 last_location = curr_location; | |
331 } | |
332 return curr_rtl_loc; | |
333 } | |
334 | |
335 static unsigned int | |
336 into_cfg_layout_mode (void) | |
337 { | |
338 cfg_layout_initialize (0); | |
339 return 0; | |
340 } | |
341 | |
342 static unsigned int | |
343 outof_cfg_layout_mode (void) | |
344 { | |
345 basic_block bb; | |
346 | |
347 FOR_EACH_BB (bb) | |
348 if (bb->next_bb != EXIT_BLOCK_PTR) | |
349 bb->aux = bb->next_bb; | |
350 | |
351 cfg_layout_finalize (); | |
352 | |
353 return 0; | |
354 } | |
355 | |
356 struct rtl_opt_pass pass_into_cfg_layout_mode = | |
357 { | |
358 { | |
359 RTL_PASS, | |
360 "into_cfglayout", /* name */ | |
361 NULL, /* gate */ | |
362 into_cfg_layout_mode, /* execute */ | |
363 NULL, /* sub */ | |
364 NULL, /* next */ | |
365 0, /* static_pass_number */ | |
366 0, /* tv_id */ | |
367 0, /* properties_required */ | |
368 0, /* properties_provided */ | |
369 0, /* properties_destroyed */ | |
370 0, /* todo_flags_start */ | |
371 TODO_dump_func, /* todo_flags_finish */ | |
372 } | |
373 }; | |
374 | |
375 struct rtl_opt_pass pass_outof_cfg_layout_mode = | |
376 { | |
377 { | |
378 RTL_PASS, | |
379 "outof_cfglayout", /* name */ | |
380 NULL, /* gate */ | |
381 outof_cfg_layout_mode, /* execute */ | |
382 NULL, /* sub */ | |
383 NULL, /* next */ | |
384 0, /* static_pass_number */ | |
385 0, /* tv_id */ | |
386 0, /* properties_required */ | |
387 0, /* properties_provided */ | |
388 0, /* properties_destroyed */ | |
389 0, /* todo_flags_start */ | |
390 TODO_dump_func, /* todo_flags_finish */ | |
391 } | |
392 }; | |
393 | |
394 /* Return scope resulting from combination of S1 and S2. */ | |
395 static tree | |
396 choose_inner_scope (tree s1, tree s2) | |
397 { | |
398 if (!s1) | |
399 return s2; | |
400 if (!s2) | |
401 return s1; | |
402 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2)) | |
403 return s1; | |
404 return s2; | |
405 } | |
406 | |
407 /* Emit lexical block notes needed to change scope from S1 to S2. */ | |
408 | |
409 static void | |
410 change_scope (rtx orig_insn, tree s1, tree s2) | |
411 { | |
412 rtx insn = orig_insn; | |
413 tree com = NULL_TREE; | |
414 tree ts1 = s1, ts2 = s2; | |
415 tree s; | |
416 | |
417 while (ts1 != ts2) | |
418 { | |
419 gcc_assert (ts1 && ts2); | |
420 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2)) | |
421 ts1 = BLOCK_SUPERCONTEXT (ts1); | |
422 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2)) | |
423 ts2 = BLOCK_SUPERCONTEXT (ts2); | |
424 else | |
425 { | |
426 ts1 = BLOCK_SUPERCONTEXT (ts1); | |
427 ts2 = BLOCK_SUPERCONTEXT (ts2); | |
428 } | |
429 } | |
430 com = ts1; | |
431 | |
432 /* Close scopes. */ | |
433 s = s1; | |
434 while (s != com) | |
435 { | |
436 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn); | |
437 NOTE_BLOCK (note) = s; | |
438 s = BLOCK_SUPERCONTEXT (s); | |
439 } | |
440 | |
441 /* Open scopes. */ | |
442 s = s2; | |
443 while (s != com) | |
444 { | |
445 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn); | |
446 NOTE_BLOCK (insn) = s; | |
447 s = BLOCK_SUPERCONTEXT (s); | |
448 } | |
449 } | |
450 | |
451 /* Return lexical scope block locator belongs to. */ | |
452 static tree | |
453 locator_scope (int loc) | |
454 { | |
455 int max = VEC_length (int, block_locators_locs); | |
456 int min = 0; | |
457 | |
458 /* When block_locators_locs was initialized, the pro- and epilogue | |
459 insns didn't exist yet and can therefore not be found this way. | |
460 But we know that they belong to the outer most block of the | |
461 current function. | |
462 Without this test, the prologue would be put inside the block of | |
463 the first valid instruction in the function and when that first | |
464 insn is part of an inlined function then the low_pc of that | |
465 inlined function is messed up. Likewise for the epilogue and | |
466 the last valid instruction. */ | |
467 if (loc == prologue_locator || loc == epilogue_locator) | |
468 return DECL_INITIAL (cfun->decl); | |
469 | |
470 if (!max || !loc) | |
471 return NULL; | |
472 while (1) | |
473 { | |
474 int pos = (min + max) / 2; | |
475 int tmp = VEC_index (int, block_locators_locs, pos); | |
476 | |
477 if (tmp <= loc && min != pos) | |
478 min = pos; | |
479 else if (tmp > loc && max != pos) | |
480 max = pos; | |
481 else | |
482 { | |
483 min = pos; | |
484 break; | |
485 } | |
486 } | |
487 return VEC_index (tree, block_locators_blocks, min); | |
488 } | |
489 | |
490 /* Return lexical scope block insn belongs to. */ | |
491 static tree | |
492 insn_scope (const_rtx insn) | |
493 { | |
494 return locator_scope (INSN_LOCATOR (insn)); | |
495 } | |
496 | |
497 /* Return line number of the statement specified by the locator. */ | |
498 location_t | |
499 locator_location (int loc) | |
500 { | |
501 int max = VEC_length (int, locations_locators_locs); | |
502 int min = 0; | |
503 | |
504 while (1) | |
505 { | |
506 int pos = (min + max) / 2; | |
507 int tmp = VEC_index (int, locations_locators_locs, pos); | |
508 | |
509 if (tmp <= loc && min != pos) | |
510 min = pos; | |
511 else if (tmp > loc && max != pos) | |
512 max = pos; | |
513 else | |
514 { | |
515 min = pos; | |
516 break; | |
517 } | |
518 } | |
519 return *VEC_index (location_t, locations_locators_vals, min); | |
520 } | |
521 | |
522 /* Return source line of the statement that produced this insn. */ | |
523 int | |
524 locator_line (int loc) | |
525 { | |
526 expanded_location xloc; | |
527 if (!loc) | |
528 return 0; | |
529 else | |
530 xloc = expand_location (locator_location (loc)); | |
531 return xloc.line; | |
532 } | |
533 | |
534 /* Return line number of the statement that produced this insn. */ | |
535 int | |
536 insn_line (const_rtx insn) | |
537 { | |
538 return locator_line (INSN_LOCATOR (insn)); | |
539 } | |
540 | |
541 /* Return source file of the statement specified by LOC. */ | |
542 const char * | |
543 locator_file (int loc) | |
544 { | |
545 expanded_location xloc; | |
546 if (!loc) | |
547 return 0; | |
548 else | |
549 xloc = expand_location (locator_location (loc)); | |
550 return xloc.file; | |
551 } | |
552 | |
553 /* Return source file of the statement that produced this insn. */ | |
554 const char * | |
555 insn_file (const_rtx insn) | |
556 { | |
557 return locator_file (INSN_LOCATOR (insn)); | |
558 } | |
559 | |
560 /* Return true if LOC1 and LOC2 locators have the same location and scope. */ | |
561 bool | |
562 locator_eq (int loc1, int loc2) | |
563 { | |
564 if (loc1 == loc2) | |
565 return true; | |
566 if (locator_location (loc1) != locator_location (loc2)) | |
567 return false; | |
568 return locator_scope (loc1) == locator_scope (loc2); | |
569 } | |
570 | |
571 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based | |
572 on the scope tree and the newly reordered instructions. */ | |
573 | |
574 void | |
575 reemit_insn_block_notes (void) | |
576 { | |
577 tree cur_block = DECL_INITIAL (cfun->decl); | |
578 rtx insn, note; | |
579 | |
580 insn = get_insns (); | |
581 if (!active_insn_p (insn)) | |
582 insn = next_active_insn (insn); | |
583 for (; insn; insn = next_active_insn (insn)) | |
584 { | |
585 tree this_block; | |
586 | |
587 /* Avoid putting scope notes between jump table and its label. */ | |
588 if (JUMP_P (insn) | |
589 && (GET_CODE (PATTERN (insn)) == ADDR_VEC | |
590 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)) | |
591 continue; | |
592 | |
593 this_block = insn_scope (insn); | |
594 /* For sequences compute scope resulting from merging all scopes | |
595 of instructions nested inside. */ | |
596 if (GET_CODE (PATTERN (insn)) == SEQUENCE) | |
597 { | |
598 int i; | |
599 rtx body = PATTERN (insn); | |
600 | |
601 this_block = NULL; | |
602 for (i = 0; i < XVECLEN (body, 0); i++) | |
603 this_block = choose_inner_scope (this_block, | |
604 insn_scope (XVECEXP (body, 0, i))); | |
605 } | |
606 if (! this_block) | |
607 continue; | |
608 | |
609 if (this_block != cur_block) | |
610 { | |
611 change_scope (insn, cur_block, this_block); | |
612 cur_block = this_block; | |
613 } | |
614 } | |
615 | |
616 /* change_scope emits before the insn, not after. */ | |
617 note = emit_note (NOTE_INSN_DELETED); | |
618 change_scope (note, cur_block, DECL_INITIAL (cfun->decl)); | |
619 delete_insn (note); | |
620 | |
621 reorder_blocks (); | |
622 } | |
623 | |
624 | |
625 /* Link the basic blocks in the correct order, compacting the basic | |
626 block queue while at it. This also clears the visited flag on | |
627 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function | |
628 also clears the basic block header and footer fields. | |
629 | |
630 This function is usually called after a pass (e.g. tracer) finishes | |
631 some transformations while in cfglayout mode. The required sequence | |
632 of the basic blocks is in a linked list along the bb->aux field. | |
633 This functions re-links the basic block prev_bb and next_bb pointers | |
634 accordingly, and it compacts and renumbers the blocks. */ | |
635 | |
636 void | |
637 relink_block_chain (bool stay_in_cfglayout_mode) | |
638 { | |
639 basic_block bb, prev_bb; | |
640 int index; | |
641 | |
642 /* Maybe dump the re-ordered sequence. */ | |
643 if (dump_file) | |
644 { | |
645 fprintf (dump_file, "Reordered sequence:\n"); | |
646 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS; | |
647 bb; | |
648 bb = (basic_block) bb->aux, index++) | |
649 { | |
650 fprintf (dump_file, " %i ", index); | |
651 if (get_bb_original (bb)) | |
652 fprintf (dump_file, "duplicate of %i ", | |
653 get_bb_original (bb)->index); | |
654 else if (forwarder_block_p (bb) | |
655 && !LABEL_P (BB_HEAD (bb))) | |
656 fprintf (dump_file, "compensation "); | |
657 else | |
658 fprintf (dump_file, "bb %i ", bb->index); | |
659 fprintf (dump_file, " [%i]\n", bb->frequency); | |
660 } | |
661 } | |
662 | |
663 /* Now reorder the blocks. */ | |
664 prev_bb = ENTRY_BLOCK_PTR; | |
665 bb = ENTRY_BLOCK_PTR->next_bb; | |
666 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux) | |
667 { | |
668 bb->prev_bb = prev_bb; | |
669 prev_bb->next_bb = bb; | |
670 } | |
671 prev_bb->next_bb = EXIT_BLOCK_PTR; | |
672 EXIT_BLOCK_PTR->prev_bb = prev_bb; | |
673 | |
674 /* Then, clean up the aux and visited fields. */ | |
675 FOR_ALL_BB (bb) | |
676 { | |
677 bb->aux = NULL; | |
678 bb->il.rtl->visited = 0; | |
679 if (!stay_in_cfglayout_mode) | |
680 bb->il.rtl->header = bb->il.rtl->footer = NULL; | |
681 } | |
682 | |
683 /* Maybe reset the original copy tables, they are not valid anymore | |
684 when we renumber the basic blocks in compact_blocks. If we are | |
685 are going out of cfglayout mode, don't re-allocate the tables. */ | |
686 free_original_copy_tables (); | |
687 if (stay_in_cfglayout_mode) | |
688 initialize_original_copy_tables (); | |
689 | |
690 /* Finally, put basic_block_info in the new order. */ | |
691 compact_blocks (); | |
692 } | |
693 | |
694 | |
695 /* Given a reorder chain, rearrange the code to match. */ | |
696 | |
697 static void | |
698 fixup_reorder_chain (void) | |
699 { | |
700 basic_block bb; | |
701 rtx insn = NULL; | |
702 | |
703 if (cfg_layout_function_header) | |
704 { | |
705 set_first_insn (cfg_layout_function_header); | |
706 insn = cfg_layout_function_header; | |
707 while (NEXT_INSN (insn)) | |
708 insn = NEXT_INSN (insn); | |
709 } | |
710 | |
711 /* First do the bulk reordering -- rechain the blocks without regard to | |
712 the needed changes to jumps and labels. */ | |
713 | |
714 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux) | |
715 { | |
716 if (bb->il.rtl->header) | |
717 { | |
718 if (insn) | |
719 NEXT_INSN (insn) = bb->il.rtl->header; | |
720 else | |
721 set_first_insn (bb->il.rtl->header); | |
722 PREV_INSN (bb->il.rtl->header) = insn; | |
723 insn = bb->il.rtl->header; | |
724 while (NEXT_INSN (insn)) | |
725 insn = NEXT_INSN (insn); | |
726 } | |
727 if (insn) | |
728 NEXT_INSN (insn) = BB_HEAD (bb); | |
729 else | |
730 set_first_insn (BB_HEAD (bb)); | |
731 PREV_INSN (BB_HEAD (bb)) = insn; | |
732 insn = BB_END (bb); | |
733 if (bb->il.rtl->footer) | |
734 { | |
735 NEXT_INSN (insn) = bb->il.rtl->footer; | |
736 PREV_INSN (bb->il.rtl->footer) = insn; | |
737 while (NEXT_INSN (insn)) | |
738 insn = NEXT_INSN (insn); | |
739 } | |
740 } | |
741 | |
742 NEXT_INSN (insn) = cfg_layout_function_footer; | |
743 if (cfg_layout_function_footer) | |
744 PREV_INSN (cfg_layout_function_footer) = insn; | |
745 | |
746 while (NEXT_INSN (insn)) | |
747 insn = NEXT_INSN (insn); | |
748 | |
749 set_last_insn (insn); | |
750 #ifdef ENABLE_CHECKING | |
751 verify_insn_chain (); | |
752 #endif | |
753 | |
754 /* Now add jumps and labels as needed to match the blocks new | |
755 outgoing edges. */ | |
756 | |
757 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux) | |
758 { | |
759 edge e_fall, e_taken, e; | |
760 rtx bb_end_insn; | |
761 basic_block nb; | |
762 edge_iterator ei; | |
763 | |
764 if (EDGE_COUNT (bb->succs) == 0) | |
765 continue; | |
766 | |
767 /* Find the old fallthru edge, and another non-EH edge for | |
768 a taken jump. */ | |
769 e_taken = e_fall = NULL; | |
770 | |
771 FOR_EACH_EDGE (e, ei, bb->succs) | |
772 if (e->flags & EDGE_FALLTHRU) | |
773 e_fall = e; | |
774 else if (! (e->flags & EDGE_EH)) | |
775 e_taken = e; | |
776 | |
777 bb_end_insn = BB_END (bb); | |
778 if (JUMP_P (bb_end_insn)) | |
779 { | |
780 if (any_condjump_p (bb_end_insn)) | |
781 { | |
782 /* If the old fallthru is still next, nothing to do. */ | |
783 if (bb->aux == e_fall->dest | |
784 || e_fall->dest == EXIT_BLOCK_PTR) | |
785 continue; | |
786 | |
787 /* The degenerated case of conditional jump jumping to the next | |
788 instruction can happen for jumps with side effects. We need | |
789 to construct a forwarder block and this will be done just | |
790 fine by force_nonfallthru below. */ | |
791 if (!e_taken) | |
792 ; | |
793 | |
794 /* There is another special case: if *neither* block is next, | |
795 such as happens at the very end of a function, then we'll | |
796 need to add a new unconditional jump. Choose the taken | |
797 edge based on known or assumed probability. */ | |
798 else if (bb->aux != e_taken->dest) | |
799 { | |
800 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0); | |
801 | |
802 if (note | |
803 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2 | |
804 && invert_jump (bb_end_insn, | |
805 (e_fall->dest == EXIT_BLOCK_PTR | |
806 ? NULL_RTX | |
807 : label_for_bb (e_fall->dest)), 0)) | |
808 { | |
809 e_fall->flags &= ~EDGE_FALLTHRU; | |
810 #ifdef ENABLE_CHECKING | |
811 gcc_assert (could_fall_through | |
812 (e_taken->src, e_taken->dest)); | |
813 #endif | |
814 e_taken->flags |= EDGE_FALLTHRU; | |
815 update_br_prob_note (bb); | |
816 e = e_fall, e_fall = e_taken, e_taken = e; | |
817 } | |
818 } | |
819 | |
820 /* If the "jumping" edge is a crossing edge, and the fall | |
821 through edge is non-crossing, leave things as they are. */ | |
822 else if ((e_taken->flags & EDGE_CROSSING) | |
823 && !(e_fall->flags & EDGE_CROSSING)) | |
824 continue; | |
825 | |
826 /* Otherwise we can try to invert the jump. This will | |
827 basically never fail, however, keep up the pretense. */ | |
828 else if (invert_jump (bb_end_insn, | |
829 (e_fall->dest == EXIT_BLOCK_PTR | |
830 ? NULL_RTX | |
831 : label_for_bb (e_fall->dest)), 0)) | |
832 { | |
833 e_fall->flags &= ~EDGE_FALLTHRU; | |
834 #ifdef ENABLE_CHECKING | |
835 gcc_assert (could_fall_through | |
836 (e_taken->src, e_taken->dest)); | |
837 #endif | |
838 e_taken->flags |= EDGE_FALLTHRU; | |
839 update_br_prob_note (bb); | |
840 continue; | |
841 } | |
842 } | |
843 else | |
844 { | |
845 /* Otherwise we have some return, switch or computed | |
846 jump. In the 99% case, there should not have been a | |
847 fallthru edge. */ | |
848 gcc_assert (returnjump_p (bb_end_insn) || !e_fall); | |
849 continue; | |
850 } | |
851 } | |
852 else | |
853 { | |
854 /* No fallthru implies a noreturn function with EH edges, or | |
855 something similarly bizarre. In any case, we don't need to | |
856 do anything. */ | |
857 if (! e_fall) | |
858 continue; | |
859 | |
860 /* If the fallthru block is still next, nothing to do. */ | |
861 if (bb->aux == e_fall->dest) | |
862 continue; | |
863 | |
864 /* A fallthru to exit block. */ | |
865 if (e_fall->dest == EXIT_BLOCK_PTR) | |
866 continue; | |
867 } | |
868 | |
869 /* We got here if we need to add a new jump insn. */ | |
870 nb = force_nonfallthru (e_fall); | |
871 if (nb) | |
872 { | |
873 nb->il.rtl->visited = 1; | |
874 nb->aux = bb->aux; | |
875 bb->aux = nb; | |
876 /* Don't process this new block. */ | |
877 bb = nb; | |
878 | |
879 /* Make sure new bb is tagged for correct section (same as | |
880 fall-thru source, since you cannot fall-throu across | |
881 section boundaries). */ | |
882 BB_COPY_PARTITION (e_fall->src, single_pred (bb)); | |
883 if (flag_reorder_blocks_and_partition | |
884 && targetm.have_named_sections | |
885 && JUMP_P (BB_END (bb)) | |
886 && !any_condjump_p (BB_END (bb)) | |
887 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING)) | |
888 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX); | |
889 } | |
890 } | |
891 | |
892 relink_block_chain (/*stay_in_cfglayout_mode=*/false); | |
893 | |
894 /* Annoying special case - jump around dead jumptables left in the code. */ | |
895 FOR_EACH_BB (bb) | |
896 { | |
897 edge e; | |
898 edge_iterator ei; | |
899 | |
900 FOR_EACH_EDGE (e, ei, bb->succs) | |
901 if (e->flags & EDGE_FALLTHRU) | |
902 break; | |
903 | |
904 if (e && !can_fallthru (e->src, e->dest)) | |
905 force_nonfallthru (e); | |
906 } | |
907 | |
908 /* Ensure goto_locus from edges has some instructions with that locus | |
909 in RTL. */ | |
910 if (!optimize) | |
911 FOR_EACH_BB (bb) | |
912 { | |
913 edge e; | |
914 edge_iterator ei; | |
915 | |
916 FOR_EACH_EDGE (e, ei, bb->succs) | |
917 if (e->goto_locus && !(e->flags & EDGE_ABNORMAL)) | |
918 { | |
919 basic_block nb; | |
920 rtx end; | |
921 | |
922 insn = BB_END (e->src); | |
923 end = PREV_INSN (BB_HEAD (e->src)); | |
924 while (insn != end | |
925 && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0)) | |
926 insn = PREV_INSN (insn); | |
927 if (insn != end | |
928 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus)) | |
929 continue; | |
930 if (simplejump_p (BB_END (e->src)) | |
931 && INSN_LOCATOR (BB_END (e->src)) == 0) | |
932 { | |
933 INSN_LOCATOR (BB_END (e->src)) = e->goto_locus; | |
934 continue; | |
935 } | |
936 if (e->dest != EXIT_BLOCK_PTR) | |
937 { | |
938 insn = BB_HEAD (e->dest); | |
939 end = NEXT_INSN (BB_END (e->dest)); | |
940 while (insn != end && !INSN_P (insn)) | |
941 insn = NEXT_INSN (insn); | |
942 if (insn != end && INSN_LOCATOR (insn) | |
943 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus)) | |
944 continue; | |
945 } | |
946 nb = split_edge (e); | |
947 if (!INSN_P (BB_END (nb))) | |
948 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb), | |
949 nb); | |
950 INSN_LOCATOR (BB_END (nb)) = e->goto_locus; | |
951 } | |
952 } | |
953 } | |
954 | |
955 /* Perform sanity checks on the insn chain. | |
956 1. Check that next/prev pointers are consistent in both the forward and | |
957 reverse direction. | |
958 2. Count insns in chain, going both directions, and check if equal. | |
959 3. Check that get_last_insn () returns the actual end of chain. */ | |
960 | |
961 void | |
962 verify_insn_chain (void) | |
963 { | |
964 rtx x, prevx, nextx; | |
965 int insn_cnt1, insn_cnt2; | |
966 | |
967 for (prevx = NULL, insn_cnt1 = 1, x = get_insns (); | |
968 x != 0; | |
969 prevx = x, insn_cnt1++, x = NEXT_INSN (x)) | |
970 gcc_assert (PREV_INSN (x) == prevx); | |
971 | |
972 gcc_assert (prevx == get_last_insn ()); | |
973 | |
974 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn (); | |
975 x != 0; | |
976 nextx = x, insn_cnt2++, x = PREV_INSN (x)) | |
977 gcc_assert (NEXT_INSN (x) == nextx); | |
978 | |
979 gcc_assert (insn_cnt1 == insn_cnt2); | |
980 } | |
981 | |
982 /* If we have assembler epilogues, the block falling through to exit must | |
983 be the last one in the reordered chain when we reach final. Ensure | |
984 that this condition is met. */ | |
985 static void | |
986 fixup_fallthru_exit_predecessor (void) | |
987 { | |
988 edge e; | |
989 edge_iterator ei; | |
990 basic_block bb = NULL; | |
991 | |
992 /* This transformation is not valid before reload, because we might | |
993 separate a call from the instruction that copies the return | |
994 value. */ | |
995 gcc_assert (reload_completed); | |
996 | |
997 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) | |
998 if (e->flags & EDGE_FALLTHRU) | |
999 bb = e->src; | |
1000 | |
1001 if (bb && bb->aux) | |
1002 { | |
1003 basic_block c = ENTRY_BLOCK_PTR->next_bb; | |
1004 | |
1005 /* If the very first block is the one with the fall-through exit | |
1006 edge, we have to split that block. */ | |
1007 if (c == bb) | |
1008 { | |
1009 bb = split_block (bb, NULL)->dest; | |
1010 bb->aux = c->aux; | |
1011 c->aux = bb; | |
1012 bb->il.rtl->footer = c->il.rtl->footer; | |
1013 c->il.rtl->footer = NULL; | |
1014 } | |
1015 | |
1016 while (c->aux != bb) | |
1017 c = (basic_block) c->aux; | |
1018 | |
1019 c->aux = bb->aux; | |
1020 while (c->aux) | |
1021 c = (basic_block) c->aux; | |
1022 | |
1023 c->aux = bb; | |
1024 bb->aux = NULL; | |
1025 } | |
1026 } | |
1027 | |
1028 /* In case there are more than one fallthru predecessors of exit, force that | |
1029 there is only one. */ | |
1030 | |
1031 static void | |
1032 force_one_exit_fallthru (void) | |
1033 { | |
1034 edge e, predecessor = NULL; | |
1035 bool more = false; | |
1036 edge_iterator ei; | |
1037 basic_block forwarder, bb; | |
1038 | |
1039 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) | |
1040 if (e->flags & EDGE_FALLTHRU) | |
1041 { | |
1042 if (predecessor == NULL) | |
1043 predecessor = e; | |
1044 else | |
1045 { | |
1046 more = true; | |
1047 break; | |
1048 } | |
1049 } | |
1050 | |
1051 if (!more) | |
1052 return; | |
1053 | |
1054 /* Exit has several fallthru predecessors. Create a forwarder block for | |
1055 them. */ | |
1056 forwarder = split_edge (predecessor); | |
1057 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); ) | |
1058 { | |
1059 if (e->src == forwarder | |
1060 || !(e->flags & EDGE_FALLTHRU)) | |
1061 ei_next (&ei); | |
1062 else | |
1063 redirect_edge_and_branch_force (e, forwarder); | |
1064 } | |
1065 | |
1066 /* Fix up the chain of blocks -- make FORWARDER immediately precede the | |
1067 exit block. */ | |
1068 FOR_EACH_BB (bb) | |
1069 { | |
1070 if (bb->aux == NULL && bb != forwarder) | |
1071 { | |
1072 bb->aux = forwarder; | |
1073 break; | |
1074 } | |
1075 } | |
1076 } | |
1077 | |
1078 /* Return true in case it is possible to duplicate the basic block BB. */ | |
1079 | |
1080 /* We do not want to declare the function in a header file, since it should | |
1081 only be used through the cfghooks interface, and we do not want to move | |
1082 it to cfgrtl.c since it would require also moving quite a lot of related | |
1083 code. */ | |
1084 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block); | |
1085 | |
1086 bool | |
1087 cfg_layout_can_duplicate_bb_p (const_basic_block bb) | |
1088 { | |
1089 /* Do not attempt to duplicate tablejumps, as we need to unshare | |
1090 the dispatch table. This is difficult to do, as the instructions | |
1091 computing jump destination may be hoisted outside the basic block. */ | |
1092 if (tablejump_p (BB_END (bb), NULL, NULL)) | |
1093 return false; | |
1094 | |
1095 /* Do not duplicate blocks containing insns that can't be copied. */ | |
1096 if (targetm.cannot_copy_insn_p) | |
1097 { | |
1098 rtx insn = BB_HEAD (bb); | |
1099 while (1) | |
1100 { | |
1101 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn)) | |
1102 return false; | |
1103 if (insn == BB_END (bb)) | |
1104 break; | |
1105 insn = NEXT_INSN (insn); | |
1106 } | |
1107 } | |
1108 | |
1109 return true; | |
1110 } | |
1111 | |
1112 rtx | |
1113 duplicate_insn_chain (rtx from, rtx to) | |
1114 { | |
1115 rtx insn, last; | |
1116 | |
1117 /* Avoid updating of boundaries of previous basic block. The | |
1118 note will get removed from insn stream in fixup. */ | |
1119 last = emit_note (NOTE_INSN_DELETED); | |
1120 | |
1121 /* Create copy at the end of INSN chain. The chain will | |
1122 be reordered later. */ | |
1123 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn)) | |
1124 { | |
1125 switch (GET_CODE (insn)) | |
1126 { | |
1127 case INSN: | |
1128 case CALL_INSN: | |
1129 case JUMP_INSN: | |
1130 /* Avoid copying of dispatch tables. We never duplicate | |
1131 tablejumps, so this can hit only in case the table got | |
1132 moved far from original jump. */ | |
1133 if (GET_CODE (PATTERN (insn)) == ADDR_VEC | |
1134 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) | |
1135 break; | |
1136 emit_copy_of_insn_after (insn, get_last_insn ()); | |
1137 break; | |
1138 | |
1139 case CODE_LABEL: | |
1140 break; | |
1141 | |
1142 case BARRIER: | |
1143 emit_barrier (); | |
1144 break; | |
1145 | |
1146 case NOTE: | |
1147 switch (NOTE_KIND (insn)) | |
1148 { | |
1149 /* In case prologue is empty and function contain label | |
1150 in first BB, we may want to copy the block. */ | |
1151 case NOTE_INSN_PROLOGUE_END: | |
1152 | |
1153 case NOTE_INSN_DELETED: | |
1154 case NOTE_INSN_DELETED_LABEL: | |
1155 /* No problem to strip these. */ | |
1156 case NOTE_INSN_EPILOGUE_BEG: | |
1157 /* Debug code expect these notes to exist just once. | |
1158 Keep them in the master copy. | |
1159 ??? It probably makes more sense to duplicate them for each | |
1160 epilogue copy. */ | |
1161 case NOTE_INSN_FUNCTION_BEG: | |
1162 /* There is always just single entry to function. */ | |
1163 case NOTE_INSN_BASIC_BLOCK: | |
1164 break; | |
1165 | |
1166 case NOTE_INSN_SWITCH_TEXT_SECTIONS: | |
1167 emit_note_copy (insn); | |
1168 break; | |
1169 | |
1170 default: | |
1171 /* All other notes should have already been eliminated. | |
1172 */ | |
1173 gcc_unreachable (); | |
1174 } | |
1175 break; | |
1176 default: | |
1177 gcc_unreachable (); | |
1178 } | |
1179 } | |
1180 insn = NEXT_INSN (last); | |
1181 delete_insn (last); | |
1182 return insn; | |
1183 } | |
1184 /* Create a duplicate of the basic block BB. */ | |
1185 | |
1186 /* We do not want to declare the function in a header file, since it should | |
1187 only be used through the cfghooks interface, and we do not want to move | |
1188 it to cfgrtl.c since it would require also moving quite a lot of related | |
1189 code. */ | |
1190 extern basic_block cfg_layout_duplicate_bb (basic_block); | |
1191 | |
1192 basic_block | |
1193 cfg_layout_duplicate_bb (basic_block bb) | |
1194 { | |
1195 rtx insn; | |
1196 basic_block new_bb; | |
1197 | |
1198 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb)); | |
1199 new_bb = create_basic_block (insn, | |
1200 insn ? get_last_insn () : NULL, | |
1201 EXIT_BLOCK_PTR->prev_bb); | |
1202 | |
1203 BB_COPY_PARTITION (new_bb, bb); | |
1204 if (bb->il.rtl->header) | |
1205 { | |
1206 insn = bb->il.rtl->header; | |
1207 while (NEXT_INSN (insn)) | |
1208 insn = NEXT_INSN (insn); | |
1209 insn = duplicate_insn_chain (bb->il.rtl->header, insn); | |
1210 if (insn) | |
1211 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ()); | |
1212 } | |
1213 | |
1214 if (bb->il.rtl->footer) | |
1215 { | |
1216 insn = bb->il.rtl->footer; | |
1217 while (NEXT_INSN (insn)) | |
1218 insn = NEXT_INSN (insn); | |
1219 insn = duplicate_insn_chain (bb->il.rtl->footer, insn); | |
1220 if (insn) | |
1221 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ()); | |
1222 } | |
1223 | |
1224 return new_bb; | |
1225 } | |
1226 | |
1227 | |
1228 /* Main entry point to this module - initialize the datastructures for | |
1229 CFG layout changes. It keeps LOOPS up-to-date if not null. | |
1230 | |
1231 FLAGS is a set of additional flags to pass to cleanup_cfg(). */ | |
1232 | |
1233 void | |
1234 cfg_layout_initialize (unsigned int flags) | |
1235 { | |
1236 rtx x; | |
1237 basic_block bb; | |
1238 | |
1239 initialize_original_copy_tables (); | |
1240 | |
1241 cfg_layout_rtl_register_cfg_hooks (); | |
1242 | |
1243 record_effective_endpoints (); | |
1244 | |
1245 /* Make sure that the targets of non local gotos are marked. */ | |
1246 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1)) | |
1247 { | |
1248 bb = BLOCK_FOR_INSN (XEXP (x, 0)); | |
1249 bb->flags |= BB_NON_LOCAL_GOTO_TARGET; | |
1250 } | |
1251 | |
1252 cleanup_cfg (CLEANUP_CFGLAYOUT | flags); | |
1253 } | |
1254 | |
1255 /* Splits superblocks. */ | |
1256 void | |
1257 break_superblocks (void) | |
1258 { | |
1259 sbitmap superblocks; | |
1260 bool need = false; | |
1261 basic_block bb; | |
1262 | |
1263 superblocks = sbitmap_alloc (last_basic_block); | |
1264 sbitmap_zero (superblocks); | |
1265 | |
1266 FOR_EACH_BB (bb) | |
1267 if (bb->flags & BB_SUPERBLOCK) | |
1268 { | |
1269 bb->flags &= ~BB_SUPERBLOCK; | |
1270 SET_BIT (superblocks, bb->index); | |
1271 need = true; | |
1272 } | |
1273 | |
1274 if (need) | |
1275 { | |
1276 rebuild_jump_labels (get_insns ()); | |
1277 find_many_sub_basic_blocks (superblocks); | |
1278 } | |
1279 | |
1280 free (superblocks); | |
1281 } | |
1282 | |
1283 /* Finalize the changes: reorder insn list according to the sequence specified | |
1284 by aux pointers, enter compensation code, rebuild scope forest. */ | |
1285 | |
1286 void | |
1287 cfg_layout_finalize (void) | |
1288 { | |
1289 #ifdef ENABLE_CHECKING | |
1290 verify_flow_info (); | |
1291 #endif | |
1292 force_one_exit_fallthru (); | |
1293 rtl_register_cfg_hooks (); | |
1294 if (reload_completed | |
1295 #ifdef HAVE_epilogue | |
1296 && !HAVE_epilogue | |
1297 #endif | |
1298 ) | |
1299 fixup_fallthru_exit_predecessor (); | |
1300 fixup_reorder_chain (); | |
1301 | |
1302 rebuild_jump_labels (get_insns ()); | |
1303 delete_dead_jumptables (); | |
1304 | |
1305 #ifdef ENABLE_CHECKING | |
1306 verify_insn_chain (); | |
1307 verify_flow_info (); | |
1308 #endif | |
1309 } | |
1310 | |
1311 /* Checks whether all N blocks in BBS array can be copied. */ | |
1312 bool | |
1313 can_copy_bbs_p (basic_block *bbs, unsigned n) | |
1314 { | |
1315 unsigned i; | |
1316 edge e; | |
1317 int ret = true; | |
1318 | |
1319 for (i = 0; i < n; i++) | |
1320 bbs[i]->flags |= BB_DUPLICATED; | |
1321 | |
1322 for (i = 0; i < n; i++) | |
1323 { | |
1324 /* In case we should redirect abnormal edge during duplication, fail. */ | |
1325 edge_iterator ei; | |
1326 FOR_EACH_EDGE (e, ei, bbs[i]->succs) | |
1327 if ((e->flags & EDGE_ABNORMAL) | |
1328 && (e->dest->flags & BB_DUPLICATED)) | |
1329 { | |
1330 ret = false; | |
1331 goto end; | |
1332 } | |
1333 | |
1334 if (!can_duplicate_block_p (bbs[i])) | |
1335 { | |
1336 ret = false; | |
1337 break; | |
1338 } | |
1339 } | |
1340 | |
1341 end: | |
1342 for (i = 0; i < n; i++) | |
1343 bbs[i]->flags &= ~BB_DUPLICATED; | |
1344 | |
1345 return ret; | |
1346 } | |
1347 | |
1348 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks | |
1349 are placed into array NEW_BBS in the same order. Edges from basic blocks | |
1350 in BBS are also duplicated and copies of those of them | |
1351 that lead into BBS are redirected to appropriate newly created block. The | |
1352 function assigns bbs into loops (copy of basic block bb is assigned to | |
1353 bb->loop_father->copy loop, so this must be set up correctly in advance) | |
1354 and updates dominators locally (LOOPS structure that contains the information | |
1355 about dominators is passed to enable this). | |
1356 | |
1357 BASE is the superloop to that basic block belongs; if its header or latch | |
1358 is copied, we do not set the new blocks as header or latch. | |
1359 | |
1360 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES, | |
1361 also in the same order. | |
1362 | |
1363 Newly created basic blocks are put after the basic block AFTER in the | |
1364 instruction stream, and the order of the blocks in BBS array is preserved. */ | |
1365 | |
1366 void | |
1367 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs, | |
1368 edge *edges, unsigned num_edges, edge *new_edges, | |
1369 struct loop *base, basic_block after) | |
1370 { | |
1371 unsigned i, j; | |
1372 basic_block bb, new_bb, dom_bb; | |
1373 edge e; | |
1374 | |
1375 /* Duplicate bbs, update dominators, assign bbs to loops. */ | |
1376 for (i = 0; i < n; i++) | |
1377 { | |
1378 /* Duplicate. */ | |
1379 bb = bbs[i]; | |
1380 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after); | |
1381 after = new_bb; | |
1382 bb->flags |= BB_DUPLICATED; | |
1383 /* Possibly set loop header. */ | |
1384 if (bb->loop_father->header == bb && bb->loop_father != base) | |
1385 new_bb->loop_father->header = new_bb; | |
1386 /* Or latch. */ | |
1387 if (bb->loop_father->latch == bb && bb->loop_father != base) | |
1388 new_bb->loop_father->latch = new_bb; | |
1389 } | |
1390 | |
1391 /* Set dominators. */ | |
1392 for (i = 0; i < n; i++) | |
1393 { | |
1394 bb = bbs[i]; | |
1395 new_bb = new_bbs[i]; | |
1396 | |
1397 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
1398 if (dom_bb->flags & BB_DUPLICATED) | |
1399 { | |
1400 dom_bb = get_bb_copy (dom_bb); | |
1401 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb); | |
1402 } | |
1403 } | |
1404 | |
1405 /* Redirect edges. */ | |
1406 for (j = 0; j < num_edges; j++) | |
1407 new_edges[j] = NULL; | |
1408 for (i = 0; i < n; i++) | |
1409 { | |
1410 edge_iterator ei; | |
1411 new_bb = new_bbs[i]; | |
1412 bb = bbs[i]; | |
1413 | |
1414 FOR_EACH_EDGE (e, ei, new_bb->succs) | |
1415 { | |
1416 for (j = 0; j < num_edges; j++) | |
1417 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest) | |
1418 new_edges[j] = e; | |
1419 | |
1420 if (!(e->dest->flags & BB_DUPLICATED)) | |
1421 continue; | |
1422 redirect_edge_and_branch_force (e, get_bb_copy (e->dest)); | |
1423 } | |
1424 } | |
1425 | |
1426 /* Clear information about duplicates. */ | |
1427 for (i = 0; i < n; i++) | |
1428 bbs[i]->flags &= ~BB_DUPLICATED; | |
1429 } | |
1430 | |
1431 #include "gt-cfglayout.h" |