Mercurial > hg > CbC > CbC_gcc
comparison gcc/ddg.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
68:561a7518be6b | 111:04ced10e8804 |
---|---|
1 /* DDG - Data Dependence Graph implementation. | 1 /* DDG - Data Dependence Graph implementation. |
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010 | 2 Copyright (C) 2004-2017 Free Software Foundation, Inc. |
3 Free Software Foundation, Inc. | |
4 Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com> | 3 Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com> |
5 | 4 |
6 This file is part of GCC. | 5 This file is part of GCC. |
7 | 6 |
8 GCC is free software; you can redistribute it and/or modify it under | 7 GCC is free software; you can redistribute it and/or modify it under |
21 | 20 |
22 | 21 |
23 #include "config.h" | 22 #include "config.h" |
24 #include "system.h" | 23 #include "system.h" |
25 #include "coretypes.h" | 24 #include "coretypes.h" |
26 #include "tm.h" | 25 #include "backend.h" |
27 #include "diagnostic-core.h" | |
28 #include "rtl.h" | 26 #include "rtl.h" |
29 #include "tm_p.h" | 27 #include "df.h" |
30 #include "hard-reg-set.h" | |
31 #include "regs.h" | |
32 #include "function.h" | |
33 #include "flags.h" | |
34 #include "insn-config.h" | |
35 #include "insn-attr.h" | 28 #include "insn-attr.h" |
36 #include "except.h" | |
37 #include "recog.h" | |
38 #include "sched-int.h" | 29 #include "sched-int.h" |
39 #include "target.h" | |
40 #include "cfglayout.h" | |
41 #include "cfgloop.h" | |
42 #include "sbitmap.h" | |
43 #include "expr.h" | |
44 #include "bitmap.h" | |
45 #include "ddg.h" | 30 #include "ddg.h" |
31 #include "rtl-iter.h" | |
46 | 32 |
47 #ifdef INSN_SCHEDULING | 33 #ifdef INSN_SCHEDULING |
48 | 34 |
49 /* A flag indicating that a ddg edge belongs to an SCC or not. */ | 35 /* A flag indicating that a ddg edge belongs to an SCC or not. */ |
50 enum edge_flag {NOT_IN_SCC = 0, IN_SCC}; | 36 enum edge_flag {NOT_IN_SCC = 0, IN_SCC}; |
63 | 49 |
64 /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */ | 50 /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */ |
65 static bool mem_ref_p; | 51 static bool mem_ref_p; |
66 | 52 |
67 /* Auxiliary function for mem_read_insn_p. */ | 53 /* Auxiliary function for mem_read_insn_p. */ |
68 static int | 54 static void |
69 mark_mem_use (rtx *x, void *data ATTRIBUTE_UNUSED) | 55 mark_mem_use (rtx *x, void *) |
70 { | 56 { |
71 if (MEM_P (*x)) | 57 subrtx_iterator::array_type array; |
72 mem_ref_p = true; | 58 FOR_EACH_SUBRTX (iter, array, *x, NONCONST) |
73 return 0; | 59 if (MEM_P (*iter)) |
74 } | 60 { |
75 | 61 mem_ref_p = true; |
76 /* Auxiliary function for mem_read_insn_p. */ | 62 break; |
77 static void | 63 } |
78 mark_mem_use_1 (rtx *x, void *data) | |
79 { | |
80 for_each_rtx (x, mark_mem_use, data); | |
81 } | 64 } |
82 | 65 |
83 /* Returns nonzero if INSN reads from memory. */ | 66 /* Returns nonzero if INSN reads from memory. */ |
84 static bool | 67 static bool |
85 mem_read_insn_p (rtx insn) | 68 mem_read_insn_p (rtx_insn *insn) |
86 { | 69 { |
87 mem_ref_p = false; | 70 mem_ref_p = false; |
88 note_uses (&PATTERN (insn), mark_mem_use_1, NULL); | 71 note_uses (&PATTERN (insn), mark_mem_use, NULL); |
89 return mem_ref_p; | 72 return mem_ref_p; |
90 } | 73 } |
91 | 74 |
92 static void | 75 static void |
93 mark_mem_store (rtx loc, const_rtx setter ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED) | 76 mark_mem_store (rtx loc, const_rtx setter ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED) |
96 mem_ref_p = true; | 79 mem_ref_p = true; |
97 } | 80 } |
98 | 81 |
99 /* Returns nonzero if INSN writes to memory. */ | 82 /* Returns nonzero if INSN writes to memory. */ |
100 static bool | 83 static bool |
101 mem_write_insn_p (rtx insn) | 84 mem_write_insn_p (rtx_insn *insn) |
102 { | 85 { |
103 mem_ref_p = false; | 86 mem_ref_p = false; |
104 note_stores (PATTERN (insn), mark_mem_store, NULL); | 87 note_stores (PATTERN (insn), mark_mem_store, NULL); |
105 return mem_ref_p; | 88 return mem_ref_p; |
106 } | 89 } |
138 return false; | 121 return false; |
139 } | 122 } |
140 | 123 |
141 /* Returns nonzero if INSN reads to or writes from memory. */ | 124 /* Returns nonzero if INSN reads to or writes from memory. */ |
142 static bool | 125 static bool |
143 mem_access_insn_p (rtx insn) | 126 mem_access_insn_p (rtx_insn *insn) |
144 { | 127 { |
145 return rtx_mem_access_p (PATTERN (insn)); | 128 return rtx_mem_access_p (PATTERN (insn)); |
129 } | |
130 | |
131 /* Return true if DEF_INSN contains address being auto-inc or auto-dec | |
132 which is used in USE_INSN. Otherwise return false. The result is | |
133 being used to decide whether to remove the edge between def_insn and | |
134 use_insn when -fmodulo-sched-allow-regmoves is set. This function | |
135 doesn't need to consider the specific address register; no reg_moves | |
136 will be allowed for any life range defined by def_insn and used | |
137 by use_insn, if use_insn uses an address register auto-inc'ed by | |
138 def_insn. */ | |
139 bool | |
140 autoinc_var_is_used_p (rtx_insn *def_insn, rtx_insn *use_insn) | |
141 { | |
142 rtx note; | |
143 | |
144 for (note = REG_NOTES (def_insn); note; note = XEXP (note, 1)) | |
145 if (REG_NOTE_KIND (note) == REG_INC | |
146 && reg_referenced_p (XEXP (note, 0), PATTERN (use_insn))) | |
147 return true; | |
148 | |
149 return false; | |
150 } | |
151 | |
152 /* Return true if one of the definitions in INSN has MODE_CC. Otherwise | |
153 return false. */ | |
154 static bool | |
155 def_has_ccmode_p (rtx_insn *insn) | |
156 { | |
157 df_ref def; | |
158 | |
159 FOR_EACH_INSN_DEF (def, insn) | |
160 { | |
161 machine_mode mode = GET_MODE (DF_REF_REG (def)); | |
162 | |
163 if (GET_MODE_CLASS (mode) == MODE_CC) | |
164 return true; | |
165 } | |
166 | |
167 return false; | |
146 } | 168 } |
147 | 169 |
148 /* Computes the dependence parameters (latency, distance etc.), creates | 170 /* Computes the dependence parameters (latency, distance etc.), creates |
149 a ddg_edge and adds it to the given DDG. */ | 171 a ddg_edge and adds it to the given DDG. */ |
150 static void | 172 static void |
171 | 193 |
172 /* We currently choose not to create certain anti-deps edges and | 194 /* We currently choose not to create certain anti-deps edges and |
173 compensate for that by generating reg-moves based on the life-range | 195 compensate for that by generating reg-moves based on the life-range |
174 analysis. The anti-deps that will be deleted are the ones which | 196 analysis. The anti-deps that will be deleted are the ones which |
175 have true-deps edges in the opposite direction (in other words | 197 have true-deps edges in the opposite direction (in other words |
176 the kernel has only one def of the relevant register). TODO: | 198 the kernel has only one def of the relevant register). |
177 support the removal of all anti-deps edges, i.e. including those | 199 If the address that is being auto-inc or auto-dec in DEST_NODE |
200 is used in SRC_NODE then do not remove the edge to make sure | |
201 reg-moves will not be created for this address. | |
202 TODO: support the removal of all anti-deps edges, i.e. including those | |
178 whose register has multiple defs in the loop. */ | 203 whose register has multiple defs in the loop. */ |
179 if (flag_modulo_sched_allow_regmoves && (t == ANTI_DEP && dt == REG_DEP)) | 204 if (flag_modulo_sched_allow_regmoves |
205 && (t == ANTI_DEP && dt == REG_DEP) | |
206 && !def_has_ccmode_p (dest_node->insn) | |
207 && !autoinc_var_is_used_p (dest_node->insn, src_node->insn)) | |
180 { | 208 { |
181 rtx set; | 209 rtx set; |
182 | 210 |
183 set = single_set (dest_node->insn); | 211 set = single_set (dest_node->insn); |
184 /* TODO: Handle registers that REG_P is not true for them, i.e. | 212 /* TODO: Handle registers that REG_P is not true for them, i.e. |
248 add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def) | 276 add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def) |
249 { | 277 { |
250 int regno = DF_REF_REGNO (last_def); | 278 int regno = DF_REF_REGNO (last_def); |
251 struct df_link *r_use; | 279 struct df_link *r_use; |
252 int has_use_in_bb_p = false; | 280 int has_use_in_bb_p = false; |
253 rtx def_insn = DF_REF_INSN (last_def); | 281 rtx_insn *def_insn = DF_REF_INSN (last_def); |
254 ddg_node_ptr last_def_node = get_node_of_insn (g, def_insn); | 282 ddg_node_ptr last_def_node = get_node_of_insn (g, def_insn); |
255 ddg_node_ptr use_node; | 283 ddg_node_ptr use_node; |
256 #ifdef ENABLE_CHECKING | |
257 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb); | |
258 #endif | |
259 df_ref first_def = df_bb_regno_first_def_find (g->bb, regno); | 284 df_ref first_def = df_bb_regno_first_def_find (g->bb, regno); |
260 | 285 |
261 gcc_assert (last_def_node); | 286 gcc_assert (last_def_node); |
262 gcc_assert (first_def); | 287 gcc_assert (first_def); |
263 | 288 |
264 #ifdef ENABLE_CHECKING | 289 if (flag_checking && DF_REF_ID (last_def) != DF_REF_ID (first_def)) |
265 if (DF_REF_ID (last_def) != DF_REF_ID (first_def)) | 290 { |
266 gcc_assert (!bitmap_bit_p (&bb_info->gen, | 291 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb); |
267 DF_REF_ID (first_def))); | 292 gcc_assert (!bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def))); |
268 #endif | 293 } |
269 | 294 |
270 /* Create inter-loop true dependences and anti dependences. */ | 295 /* Create inter-loop true dependences and anti dependences. */ |
271 for (r_use = DF_REF_CHAIN (last_def); r_use != NULL; r_use = r_use->next) | 296 for (r_use = DF_REF_CHAIN (last_def); r_use != NULL; r_use = r_use->next) |
272 { | 297 { |
273 rtx use_insn = DF_REF_INSN (r_use->ref); | 298 rtx_insn *use_insn = DF_REF_INSN (r_use->ref); |
274 | 299 |
275 if (BLOCK_FOR_INSN (use_insn) != g->bb) | 300 if (BLOCK_FOR_INSN (use_insn) != g->bb) |
276 continue; | 301 continue; |
277 | 302 |
278 /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */ | 303 /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */ |
299 ddg_node_ptr first_def_node = get_node_of_insn (g, | 324 ddg_node_ptr first_def_node = get_node_of_insn (g, |
300 DF_REF_INSN (first_def)); | 325 DF_REF_INSN (first_def)); |
301 | 326 |
302 gcc_assert (first_def_node); | 327 gcc_assert (first_def_node); |
303 | 328 |
329 /* Always create the edge if the use node is a branch in | |
330 order to prevent the creation of reg-moves. | |
331 If the address that is being auto-inc or auto-dec in LAST_DEF | |
332 is used in USE_INSN then do not remove the edge to make sure | |
333 reg-moves will not be created for that address. */ | |
304 if (DF_REF_ID (last_def) != DF_REF_ID (first_def) | 334 if (DF_REF_ID (last_def) != DF_REF_ID (first_def) |
305 || !flag_modulo_sched_allow_regmoves) | 335 || !flag_modulo_sched_allow_regmoves |
336 || JUMP_P (use_node->insn) | |
337 || autoinc_var_is_used_p (DF_REF_INSN (last_def), use_insn) | |
338 || def_has_ccmode_p (DF_REF_INSN (last_def))) | |
306 create_ddg_dep_no_link (g, use_node, first_def_node, ANTI_DEP, | 339 create_ddg_dep_no_link (g, use_node, first_def_node, ANTI_DEP, |
307 REG_DEP, 1); | 340 REG_DEP, 1); |
308 | 341 |
309 } | 342 } |
310 } | 343 } |
346 add_cross_iteration_register_deps (g, rd); | 379 add_cross_iteration_register_deps (g, rd); |
347 } | 380 } |
348 } | 381 } |
349 | 382 |
350 | 383 |
351 static int | 384 /* Return true if two specified instructions have mem expr with conflict |
352 walk_mems_2 (rtx *x, rtx mem) | 385 alias sets. */ |
353 { | 386 static bool |
354 if (MEM_P (*x)) | 387 insns_may_alias_p (rtx_insn *insn1, rtx_insn *insn2) |
355 { | 388 { |
356 if (may_alias_p (*x, mem)) | 389 subrtx_iterator::array_type array1; |
357 return 1; | 390 subrtx_iterator::array_type array2; |
358 | 391 FOR_EACH_SUBRTX (iter1, array1, PATTERN (insn1), NONCONST) |
359 return -1; | 392 { |
360 } | 393 const_rtx x1 = *iter1; |
361 return 0; | 394 if (MEM_P (x1)) |
362 } | 395 FOR_EACH_SUBRTX (iter2, array2, PATTERN (insn2), NONCONST) |
363 | 396 { |
364 static int | 397 const_rtx x2 = *iter2; |
365 walk_mems_1 (rtx *x, rtx *pat) | 398 if (MEM_P (x2) && may_alias_p (x2, x1)) |
366 { | 399 return true; |
367 if (MEM_P (*x)) | 400 } |
368 { | 401 } |
369 /* Visit all MEMs in *PAT and check indepedence. */ | 402 return false; |
370 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x)) | 403 } |
371 /* Indicate that dependence was determined and stop traversal. */ | 404 |
372 return 1; | 405 /* Given two nodes, analyze their RTL insns and add intra-loop mem deps |
373 | 406 to ddg G. */ |
374 return -1; | 407 static void |
375 } | 408 add_intra_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to) |
376 return 0; | 409 { |
377 } | 410 |
378 | 411 if ((from->cuid == to->cuid) |
379 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/ | 412 || !insns_may_alias_p (from->insn, to->insn)) |
380 static int | 413 /* Do not create edge if memory references have disjoint alias sets |
381 insns_may_alias_p (rtx insn1, rtx insn2) | 414 or 'to' and 'from' are the same instruction. */ |
382 { | 415 return; |
383 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */ | 416 |
384 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1, | 417 if (mem_write_insn_p (from->insn)) |
385 &PATTERN (insn2)); | 418 { |
419 if (mem_read_insn_p (to->insn)) | |
420 create_ddg_dep_no_link (g, from, to, | |
421 DEBUG_INSN_P (to->insn) | |
422 ? ANTI_DEP : TRUE_DEP, MEM_DEP, 0); | |
423 else | |
424 create_ddg_dep_no_link (g, from, to, | |
425 DEBUG_INSN_P (to->insn) | |
426 ? ANTI_DEP : OUTPUT_DEP, MEM_DEP, 0); | |
427 } | |
428 else if (!mem_read_insn_p (to->insn)) | |
429 create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 0); | |
386 } | 430 } |
387 | 431 |
388 /* Given two nodes, analyze their RTL insns and add inter-loop mem deps | 432 /* Given two nodes, analyze their RTL insns and add inter-loop mem deps |
389 to ddg G. */ | 433 to ddg G. */ |
390 static void | 434 static void |
427 build_intra_loop_deps (ddg_ptr g) | 471 build_intra_loop_deps (ddg_ptr g) |
428 { | 472 { |
429 int i; | 473 int i; |
430 /* Hold the dependency analysis state during dependency calculations. */ | 474 /* Hold the dependency analysis state during dependency calculations. */ |
431 struct deps_desc tmp_deps; | 475 struct deps_desc tmp_deps; |
432 rtx head, tail; | 476 rtx_insn *head, *tail; |
433 | 477 |
434 /* Build the dependence information, using the sched_analyze function. */ | 478 /* Build the dependence information, using the sched_analyze function. */ |
435 init_deps_global (); | 479 init_deps_global (); |
436 init_deps (&tmp_deps, false); | 480 init_deps (&tmp_deps, false); |
437 | 481 |
450 if (! INSN_P (dest_node->insn)) | 494 if (! INSN_P (dest_node->insn)) |
451 continue; | 495 continue; |
452 | 496 |
453 FOR_EACH_DEP (dest_node->insn, SD_LIST_BACK, sd_it, dep) | 497 FOR_EACH_DEP (dest_node->insn, SD_LIST_BACK, sd_it, dep) |
454 { | 498 { |
455 ddg_node_ptr src_node = get_node_of_insn (g, DEP_PRO (dep)); | 499 rtx_insn *src_insn = DEP_PRO (dep); |
500 ddg_node_ptr src_node; | |
501 | |
502 /* Don't add dependencies on debug insns to non-debug insns | |
503 to avoid codegen differences between -g and -g0. */ | |
504 if (DEBUG_INSN_P (src_insn) && !DEBUG_INSN_P (dest_node->insn)) | |
505 continue; | |
506 | |
507 src_node = get_node_of_insn (g, src_insn); | |
456 | 508 |
457 if (!src_node) | 509 if (!src_node) |
458 continue; | 510 continue; |
459 | 511 |
460 create_ddg_dep_from_intra_loop_link (g, src_node, dest_node, dep); | 512 create_ddg_dep_from_intra_loop_link (g, src_node, dest_node, dep); |
470 { | 522 { |
471 ddg_node_ptr j_node = &g->nodes[j]; | 523 ddg_node_ptr j_node = &g->nodes[j]; |
472 if (DEBUG_INSN_P (j_node->insn)) | 524 if (DEBUG_INSN_P (j_node->insn)) |
473 continue; | 525 continue; |
474 if (mem_access_insn_p (j_node->insn)) | 526 if (mem_access_insn_p (j_node->insn)) |
475 /* Don't bother calculating inter-loop dep if an intra-loop dep | 527 { |
476 already exists. */ | 528 /* Don't bother calculating inter-loop dep if an intra-loop dep |
477 if (! TEST_BIT (dest_node->successors, j)) | 529 already exists. */ |
530 if (! bitmap_bit_p (dest_node->successors, j)) | |
478 add_inter_loop_mem_dep (g, dest_node, j_node); | 531 add_inter_loop_mem_dep (g, dest_node, j_node); |
532 /* If -fmodulo-sched-allow-regmoves | |
533 is set certain anti-dep edges are not created. | |
534 It might be that these anti-dep edges are on the | |
535 path from one memory instruction to another such that | |
536 removing these edges could cause a violation of the | |
537 memory dependencies. Thus we add intra edges between | |
538 every two memory instructions in this case. */ | |
539 if (flag_modulo_sched_allow_regmoves | |
540 && !bitmap_bit_p (dest_node->predecessors, j)) | |
541 add_intra_loop_mem_dep (g, j_node, dest_node); | |
542 } | |
479 } | 543 } |
480 } | 544 } |
481 } | 545 } |
482 | 546 |
483 /* Free the INSN_LISTs. */ | 547 /* Free the INSN_LISTs. */ |
494 Initialize the ddg structure fields to the appropriate values. */ | 558 Initialize the ddg structure fields to the appropriate values. */ |
495 ddg_ptr | 559 ddg_ptr |
496 create_ddg (basic_block bb, int closing_branch_deps) | 560 create_ddg (basic_block bb, int closing_branch_deps) |
497 { | 561 { |
498 ddg_ptr g; | 562 ddg_ptr g; |
499 rtx insn, first_note; | 563 rtx_insn *insn, *first_note; |
500 int i; | 564 int i; |
501 int num_nodes = 0; | 565 int num_nodes = 0; |
502 | 566 |
503 g = (ddg_ptr) xcalloc (1, sizeof (struct ddg)); | 567 g = (ddg_ptr) xcalloc (1, sizeof (struct ddg)); |
504 | 568 |
534 /* Allocate the nodes array, and initialize the nodes. */ | 598 /* Allocate the nodes array, and initialize the nodes. */ |
535 g->num_nodes = num_nodes; | 599 g->num_nodes = num_nodes; |
536 g->nodes = (ddg_node_ptr) xcalloc (num_nodes, sizeof (struct ddg_node)); | 600 g->nodes = (ddg_node_ptr) xcalloc (num_nodes, sizeof (struct ddg_node)); |
537 g->closing_branch = NULL; | 601 g->closing_branch = NULL; |
538 i = 0; | 602 i = 0; |
539 first_note = NULL_RTX; | 603 first_note = NULL; |
540 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); | 604 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); |
541 insn = NEXT_INSN (insn)) | 605 insn = NEXT_INSN (insn)) |
542 { | 606 { |
543 if (! INSN_P (insn)) | 607 if (! INSN_P (insn)) |
544 { | 608 { |
559 continue; | 623 continue; |
560 } | 624 } |
561 | 625 |
562 g->nodes[i].cuid = i; | 626 g->nodes[i].cuid = i; |
563 g->nodes[i].successors = sbitmap_alloc (num_nodes); | 627 g->nodes[i].successors = sbitmap_alloc (num_nodes); |
564 sbitmap_zero (g->nodes[i].successors); | 628 bitmap_clear (g->nodes[i].successors); |
565 g->nodes[i].predecessors = sbitmap_alloc (num_nodes); | 629 g->nodes[i].predecessors = sbitmap_alloc (num_nodes); |
566 sbitmap_zero (g->nodes[i].predecessors); | 630 bitmap_clear (g->nodes[i].predecessors); |
567 g->nodes[i].first_note = (first_note ? first_note : insn); | 631 g->nodes[i].first_note = (first_note ? first_note : insn); |
568 g->nodes[i++].insn = insn; | 632 g->nodes[i++].insn = insn; |
569 first_note = NULL_RTX; | 633 first_note = NULL; |
570 } | 634 } |
571 | 635 |
572 /* We must have found a branch in DDG. */ | 636 /* We must have found a branch in DDG. */ |
573 gcc_assert (g->closing_branch); | 637 gcc_assert (g->closing_branch); |
574 | 638 |
652 fprintf (file, "\n"); | 716 fprintf (file, "\n"); |
653 } | 717 } |
654 } | 718 } |
655 | 719 |
656 /* Print the given DDG in VCG format. */ | 720 /* Print the given DDG in VCG format. */ |
657 void | 721 DEBUG_FUNCTION void |
658 vcg_print_ddg (FILE *file, ddg_ptr g) | 722 vcg_print_ddg (FILE *file, ddg_ptr g) |
659 { | 723 { |
660 int src_cuid; | 724 int src_cuid; |
661 | 725 |
662 fprintf (file, "graph: {\n"); | 726 fprintf (file, "graph: {\n"); |
700 | 764 |
701 fprintf (file, "\n;; Number of SCC nodes - %d\n", sccs->num_sccs); | 765 fprintf (file, "\n;; Number of SCC nodes - %d\n", sccs->num_sccs); |
702 for (i = 0; i < sccs->num_sccs; i++) | 766 for (i = 0; i < sccs->num_sccs; i++) |
703 { | 767 { |
704 fprintf (file, "SCC number: %d\n", i); | 768 fprintf (file, "SCC number: %d\n", i); |
705 EXECUTE_IF_SET_IN_SBITMAP (sccs->sccs[i]->nodes, 0, u, sbi) | 769 EXECUTE_IF_SET_IN_BITMAP (sccs->sccs[i]->nodes, 0, u, sbi) |
706 { | 770 { |
707 fprintf (file, "insn num %d\n", u); | 771 fprintf (file, "insn num %d\n", u); |
708 print_rtl_single (file, g->nodes[u].insn); | 772 print_rtl_single (file, g->nodes[u].insn); |
709 } | 773 } |
710 } | 774 } |
737 ddg_node_ptr dest = e->dest; | 801 ddg_node_ptr dest = e->dest; |
738 | 802 |
739 /* Should have allocated the sbitmaps. */ | 803 /* Should have allocated the sbitmaps. */ |
740 gcc_assert (src->successors && dest->predecessors); | 804 gcc_assert (src->successors && dest->predecessors); |
741 | 805 |
742 SET_BIT (src->successors, dest->cuid); | 806 bitmap_set_bit (src->successors, dest->cuid); |
743 SET_BIT (dest->predecessors, src->cuid); | 807 bitmap_set_bit (dest->predecessors, src->cuid); |
744 e->next_in = dest->in; | 808 e->next_in = dest->in; |
745 dest->in = e; | 809 dest->in = e; |
746 e->next_out = src->out; | 810 e->next_out = src->out; |
747 src->out = e; | 811 src->out = e; |
748 } | 812 } |
789 | 853 |
790 scc = (ddg_scc_ptr) xmalloc (sizeof (struct ddg_scc)); | 854 scc = (ddg_scc_ptr) xmalloc (sizeof (struct ddg_scc)); |
791 scc->backarcs = NULL; | 855 scc->backarcs = NULL; |
792 scc->num_backarcs = 0; | 856 scc->num_backarcs = 0; |
793 scc->nodes = sbitmap_alloc (g->num_nodes); | 857 scc->nodes = sbitmap_alloc (g->num_nodes); |
794 sbitmap_copy (scc->nodes, nodes); | 858 bitmap_copy (scc->nodes, nodes); |
795 | 859 |
796 /* Mark the backarcs that belong to this SCC. */ | 860 /* Mark the backarcs that belong to this SCC. */ |
797 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, u, sbi) | 861 EXECUTE_IF_SET_IN_BITMAP (nodes, 0, u, sbi) |
798 { | 862 { |
799 ddg_edge_ptr e; | 863 ddg_edge_ptr e; |
800 ddg_node_ptr n = &g->nodes[u]; | 864 ddg_node_ptr n = &g->nodes[u]; |
801 | 865 |
802 for (e = n->out; e; e = e->next_out) | 866 for (e = n->out; e; e = e->next_out) |
803 if (TEST_BIT (nodes, e->dest->cuid)) | 867 if (bitmap_bit_p (nodes, e->dest->cuid)) |
804 { | 868 { |
805 e->aux.count = IN_SCC; | 869 e->aux.count = IN_SCC; |
806 if (e->distance > 0) | 870 if (e->distance > 0) |
807 add_backarc_to_scc (scc, e); | 871 add_backarc_to_scc (scc, e); |
808 } | 872 } |
857 g->sccs[g->num_sccs++] = scc; | 921 g->sccs[g->num_sccs++] = scc; |
858 } | 922 } |
859 | 923 |
860 /* Given the instruction INSN return the node that represents it. */ | 924 /* Given the instruction INSN return the node that represents it. */ |
861 ddg_node_ptr | 925 ddg_node_ptr |
862 get_node_of_insn (ddg_ptr g, rtx insn) | 926 get_node_of_insn (ddg_ptr g, rtx_insn *insn) |
863 { | 927 { |
864 int i; | 928 int i; |
865 | 929 |
866 for (i = 0; i < g->num_nodes; i++) | 930 for (i = 0; i < g->num_nodes; i++) |
867 if (insn == g->nodes[i].insn) | 931 if (insn == g->nodes[i].insn) |
876 find_successors (sbitmap succ, ddg_ptr g, sbitmap ops) | 940 find_successors (sbitmap succ, ddg_ptr g, sbitmap ops) |
877 { | 941 { |
878 unsigned int i = 0; | 942 unsigned int i = 0; |
879 sbitmap_iterator sbi; | 943 sbitmap_iterator sbi; |
880 | 944 |
881 EXECUTE_IF_SET_IN_SBITMAP (ops, 0, i, sbi) | 945 EXECUTE_IF_SET_IN_BITMAP (ops, 0, i, sbi) |
882 { | 946 { |
883 const sbitmap node_succ = NODE_SUCCESSORS (&g->nodes[i]); | 947 const sbitmap node_succ = NODE_SUCCESSORS (&g->nodes[i]); |
884 sbitmap_a_or_b (succ, succ, node_succ); | 948 bitmap_ior (succ, succ, node_succ); |
885 }; | 949 }; |
886 | 950 |
887 /* We want those that are not in ops. */ | 951 /* We want those that are not in ops. */ |
888 sbitmap_difference (succ, succ, ops); | 952 bitmap_and_compl (succ, succ, ops); |
889 } | 953 } |
890 | 954 |
891 /* Given a set OPS of nodes in the DDG, find the set of their predecessors | 955 /* Given a set OPS of nodes in the DDG, find the set of their predecessors |
892 which are not in OPS, and set their bits in PREDS. Bits corresponding to | 956 which are not in OPS, and set their bits in PREDS. Bits corresponding to |
893 OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */ | 957 OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */ |
895 find_predecessors (sbitmap preds, ddg_ptr g, sbitmap ops) | 959 find_predecessors (sbitmap preds, ddg_ptr g, sbitmap ops) |
896 { | 960 { |
897 unsigned int i = 0; | 961 unsigned int i = 0; |
898 sbitmap_iterator sbi; | 962 sbitmap_iterator sbi; |
899 | 963 |
900 EXECUTE_IF_SET_IN_SBITMAP (ops, 0, i, sbi) | 964 EXECUTE_IF_SET_IN_BITMAP (ops, 0, i, sbi) |
901 { | 965 { |
902 const sbitmap node_preds = NODE_PREDECESSORS (&g->nodes[i]); | 966 const sbitmap node_preds = NODE_PREDECESSORS (&g->nodes[i]); |
903 sbitmap_a_or_b (preds, preds, node_preds); | 967 bitmap_ior (preds, preds, node_preds); |
904 }; | 968 }; |
905 | 969 |
906 /* We want those that are not in ops. */ | 970 /* We want those that are not in ops. */ |
907 sbitmap_difference (preds, preds, ops); | 971 bitmap_and_compl (preds, preds, ops); |
908 } | 972 } |
909 | 973 |
910 | 974 |
911 /* Compare function to be passed to qsort to order the backarcs in descending | 975 /* Compare function to be passed to qsort to order the backarcs in descending |
912 recMII order. */ | 976 recMII order. */ |
925 { | 989 { |
926 qsort (g->sccs, g->num_sccs, sizeof (ddg_scc_ptr), | 990 qsort (g->sccs, g->num_sccs, sizeof (ddg_scc_ptr), |
927 (int (*) (const void *, const void *)) compare_sccs); | 991 (int (*) (const void *, const void *)) compare_sccs); |
928 } | 992 } |
929 | 993 |
930 #ifdef ENABLE_CHECKING | |
931 /* Check that every node in SCCS belongs to exactly one strongly connected | 994 /* Check that every node in SCCS belongs to exactly one strongly connected |
932 component and that no element of SCCS is empty. */ | 995 component and that no element of SCCS is empty. */ |
933 static void | 996 static void |
934 check_sccs (ddg_all_sccs_ptr sccs, int num_nodes) | 997 check_sccs (ddg_all_sccs_ptr sccs, int num_nodes) |
935 { | 998 { |
936 int i = 0; | 999 int i = 0; |
937 sbitmap tmp = sbitmap_alloc (num_nodes); | 1000 auto_sbitmap tmp (num_nodes); |
938 | 1001 |
939 sbitmap_zero (tmp); | 1002 bitmap_clear (tmp); |
940 for (i = 0; i < sccs->num_sccs; i++) | 1003 for (i = 0; i < sccs->num_sccs; i++) |
941 { | 1004 { |
942 gcc_assert (!sbitmap_empty_p (sccs->sccs[i]->nodes)); | 1005 gcc_assert (!bitmap_empty_p (sccs->sccs[i]->nodes)); |
943 /* Verify that every node in sccs is in exactly one strongly | 1006 /* Verify that every node in sccs is in exactly one strongly |
944 connected component. */ | 1007 connected component. */ |
945 gcc_assert (!sbitmap_any_common_bits (tmp, sccs->sccs[i]->nodes)); | 1008 gcc_assert (!bitmap_intersect_p (tmp, sccs->sccs[i]->nodes)); |
946 sbitmap_a_or_b (tmp, tmp, sccs->sccs[i]->nodes); | 1009 bitmap_ior (tmp, tmp, sccs->sccs[i]->nodes); |
947 } | 1010 } |
948 sbitmap_free (tmp); | 1011 } |
949 } | |
950 #endif | |
951 | 1012 |
952 /* Perform the Strongly Connected Components decomposing algorithm on the | 1013 /* Perform the Strongly Connected Components decomposing algorithm on the |
953 DDG and return DDG_ALL_SCCS structure that contains them. */ | 1014 DDG and return DDG_ALL_SCCS structure that contains them. */ |
954 ddg_all_sccs_ptr | 1015 ddg_all_sccs_ptr |
955 create_ddg_all_sccs (ddg_ptr g) | 1016 create_ddg_all_sccs (ddg_ptr g) |
956 { | 1017 { |
957 int i; | 1018 int i; |
958 int num_nodes = g->num_nodes; | 1019 int num_nodes = g->num_nodes; |
959 sbitmap from = sbitmap_alloc (num_nodes); | 1020 auto_sbitmap from (num_nodes); |
960 sbitmap to = sbitmap_alloc (num_nodes); | 1021 auto_sbitmap to (num_nodes); |
961 sbitmap scc_nodes = sbitmap_alloc (num_nodes); | 1022 auto_sbitmap scc_nodes (num_nodes); |
962 ddg_all_sccs_ptr sccs = (ddg_all_sccs_ptr) | 1023 ddg_all_sccs_ptr sccs = (ddg_all_sccs_ptr) |
963 xmalloc (sizeof (struct ddg_all_sccs)); | 1024 xmalloc (sizeof (struct ddg_all_sccs)); |
964 | 1025 |
965 sccs->ddg = g; | 1026 sccs->ddg = g; |
966 sccs->sccs = NULL; | 1027 sccs->sccs = NULL; |
975 | 1036 |
976 /* If the backarc already belongs to an SCC, continue. */ | 1037 /* If the backarc already belongs to an SCC, continue. */ |
977 if (backarc->aux.count == IN_SCC) | 1038 if (backarc->aux.count == IN_SCC) |
978 continue; | 1039 continue; |
979 | 1040 |
980 sbitmap_zero (scc_nodes); | 1041 bitmap_clear (scc_nodes); |
981 sbitmap_zero (from); | 1042 bitmap_clear (from); |
982 sbitmap_zero (to); | 1043 bitmap_clear (to); |
983 SET_BIT (from, dest->cuid); | 1044 bitmap_set_bit (from, dest->cuid); |
984 SET_BIT (to, src->cuid); | 1045 bitmap_set_bit (to, src->cuid); |
985 | 1046 |
986 if (find_nodes_on_paths (scc_nodes, g, from, to)) | 1047 if (find_nodes_on_paths (scc_nodes, g, from, to)) |
987 { | 1048 { |
988 scc = create_scc (g, scc_nodes); | 1049 scc = create_scc (g, scc_nodes); |
989 add_scc_to_ddg (sccs, scc); | 1050 add_scc_to_ddg (sccs, scc); |
990 } | 1051 } |
991 } | 1052 } |
992 order_sccs (sccs); | 1053 order_sccs (sccs); |
993 sbitmap_free (from); | 1054 |
994 sbitmap_free (to); | 1055 if (flag_checking) |
995 sbitmap_free (scc_nodes); | 1056 check_sccs (sccs, num_nodes); |
996 #ifdef ENABLE_CHECKING | 1057 |
997 check_sccs (sccs, num_nodes); | |
998 #endif | |
999 return sccs; | 1058 return sccs; |
1000 } | 1059 } |
1001 | 1060 |
1002 /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */ | 1061 /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */ |
1003 void | 1062 void |
1009 return; | 1068 return; |
1010 | 1069 |
1011 for (i = 0; i < all_sccs->num_sccs; i++) | 1070 for (i = 0; i < all_sccs->num_sccs; i++) |
1012 free_scc (all_sccs->sccs[i]); | 1071 free_scc (all_sccs->sccs[i]); |
1013 | 1072 |
1073 free (all_sccs->sccs); | |
1014 free (all_sccs); | 1074 free (all_sccs); |
1015 } | 1075 } |
1016 | 1076 |
1017 | 1077 |
1018 /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination | 1078 /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination |
1019 nodes - find all nodes that lie on paths from FROM to TO (not excluding | 1079 nodes - find all nodes that lie on paths from FROM to TO (not excluding |
1020 nodes from FROM and TO). Return nonzero if nodes exist. */ | 1080 nodes from FROM and TO). Return nonzero if nodes exist. */ |
1021 int | 1081 int |
1022 find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to) | 1082 find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to) |
1023 { | 1083 { |
1024 int answer; | |
1025 int change; | 1084 int change; |
1026 unsigned int u = 0; | 1085 unsigned int u = 0; |
1027 int num_nodes = g->num_nodes; | 1086 int num_nodes = g->num_nodes; |
1028 sbitmap_iterator sbi; | 1087 sbitmap_iterator sbi; |
1029 | 1088 |
1030 sbitmap workset = sbitmap_alloc (num_nodes); | 1089 auto_sbitmap workset (num_nodes); |
1031 sbitmap reachable_from = sbitmap_alloc (num_nodes); | 1090 auto_sbitmap reachable_from (num_nodes); |
1032 sbitmap reach_to = sbitmap_alloc (num_nodes); | 1091 auto_sbitmap reach_to (num_nodes); |
1033 sbitmap tmp = sbitmap_alloc (num_nodes); | 1092 auto_sbitmap tmp (num_nodes); |
1034 | 1093 |
1035 sbitmap_copy (reachable_from, from); | 1094 bitmap_copy (reachable_from, from); |
1036 sbitmap_copy (tmp, from); | 1095 bitmap_copy (tmp, from); |
1037 | 1096 |
1038 change = 1; | 1097 change = 1; |
1039 while (change) | 1098 while (change) |
1040 { | 1099 { |
1041 change = 0; | 1100 change = 0; |
1042 sbitmap_copy (workset, tmp); | 1101 bitmap_copy (workset, tmp); |
1043 sbitmap_zero (tmp); | 1102 bitmap_clear (tmp); |
1044 EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi) | 1103 EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi) |
1045 { | 1104 { |
1046 ddg_edge_ptr e; | 1105 ddg_edge_ptr e; |
1047 ddg_node_ptr u_node = &g->nodes[u]; | 1106 ddg_node_ptr u_node = &g->nodes[u]; |
1048 | 1107 |
1049 for (e = u_node->out; e != (ddg_edge_ptr) 0; e = e->next_out) | 1108 for (e = u_node->out; e != (ddg_edge_ptr) 0; e = e->next_out) |
1050 { | 1109 { |
1051 ddg_node_ptr v_node = e->dest; | 1110 ddg_node_ptr v_node = e->dest; |
1052 int v = v_node->cuid; | 1111 int v = v_node->cuid; |
1053 | 1112 |
1054 if (!TEST_BIT (reachable_from, v)) | 1113 if (!bitmap_bit_p (reachable_from, v)) |
1055 { | 1114 { |
1056 SET_BIT (reachable_from, v); | 1115 bitmap_set_bit (reachable_from, v); |
1057 SET_BIT (tmp, v); | 1116 bitmap_set_bit (tmp, v); |
1058 change = 1; | 1117 change = 1; |
1059 } | 1118 } |
1060 } | 1119 } |
1061 } | 1120 } |
1062 } | 1121 } |
1063 | 1122 |
1064 sbitmap_copy (reach_to, to); | 1123 bitmap_copy (reach_to, to); |
1065 sbitmap_copy (tmp, to); | 1124 bitmap_copy (tmp, to); |
1066 | 1125 |
1067 change = 1; | 1126 change = 1; |
1068 while (change) | 1127 while (change) |
1069 { | 1128 { |
1070 change = 0; | 1129 change = 0; |
1071 sbitmap_copy (workset, tmp); | 1130 bitmap_copy (workset, tmp); |
1072 sbitmap_zero (tmp); | 1131 bitmap_clear (tmp); |
1073 EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi) | 1132 EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi) |
1074 { | 1133 { |
1075 ddg_edge_ptr e; | 1134 ddg_edge_ptr e; |
1076 ddg_node_ptr u_node = &g->nodes[u]; | 1135 ddg_node_ptr u_node = &g->nodes[u]; |
1077 | 1136 |
1078 for (e = u_node->in; e != (ddg_edge_ptr) 0; e = e->next_in) | 1137 for (e = u_node->in; e != (ddg_edge_ptr) 0; e = e->next_in) |
1079 { | 1138 { |
1080 ddg_node_ptr v_node = e->src; | 1139 ddg_node_ptr v_node = e->src; |
1081 int v = v_node->cuid; | 1140 int v = v_node->cuid; |
1082 | 1141 |
1083 if (!TEST_BIT (reach_to, v)) | 1142 if (!bitmap_bit_p (reach_to, v)) |
1084 { | 1143 { |
1085 SET_BIT (reach_to, v); | 1144 bitmap_set_bit (reach_to, v); |
1086 SET_BIT (tmp, v); | 1145 bitmap_set_bit (tmp, v); |
1087 change = 1; | 1146 change = 1; |
1088 } | 1147 } |
1089 } | 1148 } |
1090 } | 1149 } |
1091 } | 1150 } |
1092 | 1151 |
1093 answer = sbitmap_a_and_b_cg (result, reachable_from, reach_to); | 1152 return bitmap_and (result, reachable_from, reach_to); |
1094 sbitmap_free (workset); | |
1095 sbitmap_free (reachable_from); | |
1096 sbitmap_free (reach_to); | |
1097 sbitmap_free (tmp); | |
1098 return answer; | |
1099 } | 1153 } |
1100 | 1154 |
1101 | 1155 |
1102 /* Updates the counts of U_NODE's successors (that belong to NODES) to be | 1156 /* Updates the counts of U_NODE's successors (that belong to NODES) to be |
1103 at-least as large as the count of U_NODE plus the latency between them. | 1157 at-least as large as the count of U_NODE plus the latency between them. |
1112 for (e = u_node->out; e; e = e->next_out) | 1166 for (e = u_node->out; e; e = e->next_out) |
1113 { | 1167 { |
1114 ddg_node_ptr v_node = e->dest; | 1168 ddg_node_ptr v_node = e->dest; |
1115 int v = v_node->cuid; | 1169 int v = v_node->cuid; |
1116 | 1170 |
1117 if (TEST_BIT (nodes, v) | 1171 if (bitmap_bit_p (nodes, v) |
1118 && (e->distance == 0) | 1172 && (e->distance == 0) |
1119 && (v_node->aux.count < u_node->aux.count + e->latency)) | 1173 && (v_node->aux.count < u_node->aux.count + e->latency)) |
1120 { | 1174 { |
1121 v_node->aux.count = u_node->aux.count + e->latency; | 1175 v_node->aux.count = u_node->aux.count + e->latency; |
1122 SET_BIT (tmp, v); | 1176 bitmap_set_bit (tmp, v); |
1123 result = 1; | 1177 result = 1; |
1124 } | 1178 } |
1125 } | 1179 } |
1126 return result; | 1180 return result; |
1127 } | 1181 } |
1133 longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes) | 1187 longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes) |
1134 { | 1188 { |
1135 int i; | 1189 int i; |
1136 unsigned int u = 0; | 1190 unsigned int u = 0; |
1137 int change = 1; | 1191 int change = 1; |
1138 int result; | |
1139 int num_nodes = g->num_nodes; | 1192 int num_nodes = g->num_nodes; |
1140 sbitmap workset = sbitmap_alloc (num_nodes); | 1193 auto_sbitmap workset (num_nodes); |
1141 sbitmap tmp = sbitmap_alloc (num_nodes); | 1194 auto_sbitmap tmp (num_nodes); |
1142 | 1195 |
1143 | 1196 |
1144 /* Data will hold the distance of the longest path found so far from | 1197 /* Data will hold the distance of the longest path found so far from |
1145 src to each node. Initialize to -1 = less than minimum. */ | 1198 src to each node. Initialize to -1 = less than minimum. */ |
1146 for (i = 0; i < g->num_nodes; i++) | 1199 for (i = 0; i < g->num_nodes; i++) |
1147 g->nodes[i].aux.count = -1; | 1200 g->nodes[i].aux.count = -1; |
1148 g->nodes[src].aux.count = 0; | 1201 g->nodes[src].aux.count = 0; |
1149 | 1202 |
1150 sbitmap_zero (tmp); | 1203 bitmap_clear (tmp); |
1151 SET_BIT (tmp, src); | 1204 bitmap_set_bit (tmp, src); |
1152 | 1205 |
1153 while (change) | 1206 while (change) |
1154 { | 1207 { |
1155 sbitmap_iterator sbi; | 1208 sbitmap_iterator sbi; |
1156 | 1209 |
1157 change = 0; | 1210 change = 0; |
1158 sbitmap_copy (workset, tmp); | 1211 bitmap_copy (workset, tmp); |
1159 sbitmap_zero (tmp); | 1212 bitmap_clear (tmp); |
1160 EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi) | 1213 EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi) |
1161 { | 1214 { |
1162 ddg_node_ptr u_node = &g->nodes[u]; | 1215 ddg_node_ptr u_node = &g->nodes[u]; |
1163 | 1216 |
1164 change |= update_dist_to_successors (u_node, nodes, tmp); | 1217 change |= update_dist_to_successors (u_node, nodes, tmp); |
1165 } | 1218 } |
1166 } | 1219 } |
1167 result = g->nodes[dest].aux.count; | 1220 return g->nodes[dest].aux.count; |
1168 sbitmap_free (workset); | |
1169 sbitmap_free (tmp); | |
1170 return result; | |
1171 } | 1221 } |
1172 | 1222 |
1173 #endif /* INSN_SCHEDULING */ | 1223 #endif /* INSN_SCHEDULING */ |