Mercurial > hg > CbC > CbC_gcc
annotate gcc/postreload-gcse.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
rev | line source |
---|---|
0 | 1 /* Post reload partially redundant load elimination |
111 | 2 Copyright (C) 2004-2017 Free Software Foundation, Inc. |
0 | 3 |
4 This file is part of GCC. | |
5 | |
6 GCC is free software; you can redistribute it and/or modify it under | |
7 the terms of the GNU General Public License as published by the Free | |
8 Software Foundation; either version 3, or (at your option) any later | |
9 version. | |
10 | |
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 for more details. | |
15 | |
16 You should have received a copy of the GNU General Public License | |
17 along with GCC; see the file COPYING3. If not see | |
18 <http://www.gnu.org/licenses/>. */ | |
19 | |
20 #include "config.h" | |
21 #include "system.h" | |
22 #include "coretypes.h" | |
111 | 23 #include "backend.h" |
24 #include "target.h" | |
0 | 25 #include "rtl.h" |
26 #include "tree.h" | |
111 | 27 #include "predict.h" |
28 #include "df.h" | |
29 #include "memmodel.h" | |
0 | 30 #include "tm_p.h" |
31 #include "insn-config.h" | |
111 | 32 #include "emit-rtl.h" |
0 | 33 #include "recog.h" |
111 | 34 |
35 #include "cfgrtl.h" | |
36 #include "profile.h" | |
0 | 37 #include "expr.h" |
38 #include "params.h" | |
39 #include "tree-pass.h" | |
40 #include "dbgcnt.h" | |
111 | 41 #include "gcse-common.h" |
0 | 42 |
43 /* The following code implements gcse after reload, the purpose of this | |
44 pass is to cleanup redundant loads generated by reload and other | |
45 optimizations that come after gcse. It searches for simple inter-block | |
46 redundancies and tries to eliminate them by adding moves and loads | |
47 in cold places. | |
48 | |
49 Perform partially redundant load elimination, try to eliminate redundant | |
50 loads created by the reload pass. We try to look for full or partial | |
51 redundant loads fed by one or more loads/stores in predecessor BBs, | |
52 and try adding loads to make them fully redundant. We also check if | |
53 it's worth adding loads to be able to delete the redundant load. | |
54 | |
55 Algorithm: | |
56 1. Build available expressions hash table: | |
57 For each load/store instruction, if the loaded/stored memory didn't | |
58 change until the end of the basic block add this memory expression to | |
59 the hash table. | |
60 2. Perform Redundancy elimination: | |
61 For each load instruction do the following: | |
62 perform partial redundancy elimination, check if it's worth adding | |
63 loads to make the load fully redundant. If so add loads and | |
64 register copies and delete the load. | |
65 3. Delete instructions made redundant in step 2. | |
66 | |
67 Future enhancement: | |
68 If the loaded register is used/defined between load and some store, | |
69 look for some other free register between load and all its stores, | |
70 and replace the load with a copy from this register to the loaded | |
71 register. | |
72 */ | |
73 | |
74 | |
75 /* Keep statistics of this pass. */ | |
76 static struct | |
77 { | |
78 int moves_inserted; | |
79 int copies_inserted; | |
80 int insns_deleted; | |
81 } stats; | |
82 | |
83 /* We need to keep a hash table of expressions. The table entries are of | |
84 type 'struct expr', and for each expression there is a single linked | |
85 list of occurrences. */ | |
86 | |
87 /* Expression elements in the hash table. */ | |
88 struct expr | |
89 { | |
90 /* The expression (SET_SRC for expressions, PATTERN for assignments). */ | |
91 rtx expr; | |
92 | |
93 /* The same hash for this entry. */ | |
94 hashval_t hash; | |
95 | |
111 | 96 /* Index in the transparent bitmaps. */ |
97 unsigned int bitmap_index; | |
98 | |
0 | 99 /* List of available occurrence in basic blocks in the function. */ |
100 struct occr *avail_occr; | |
101 }; | |
102 | |
111 | 103 /* Hashtable helpers. */ |
104 | |
105 struct expr_hasher : nofree_ptr_hash <expr> | |
106 { | |
107 static inline hashval_t hash (const expr *); | |
108 static inline bool equal (const expr *, const expr *); | |
109 }; | |
110 | |
111 | |
112 /* Hash expression X. | |
113 DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found | |
114 or if the expression contains something we don't want to insert in the | |
115 table. */ | |
116 | |
117 static hashval_t | |
118 hash_expr (rtx x, int *do_not_record_p) | |
119 { | |
120 *do_not_record_p = 0; | |
121 return hash_rtx (x, GET_MODE (x), do_not_record_p, | |
122 NULL, /*have_reg_qty=*/false); | |
123 } | |
124 | |
125 /* Callback for hashtab. | |
126 Return the hash value for expression EXP. We don't actually hash | |
127 here, we just return the cached hash value. */ | |
128 | |
129 inline hashval_t | |
130 expr_hasher::hash (const expr *exp) | |
131 { | |
132 return exp->hash; | |
133 } | |
134 | |
135 /* Callback for hashtab. | |
136 Return nonzero if exp1 is equivalent to exp2. */ | |
137 | |
138 inline bool | |
139 expr_hasher::equal (const expr *exp1, const expr *exp2) | |
140 { | |
141 int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true); | |
142 | |
143 gcc_assert (!equiv_p || exp1->hash == exp2->hash); | |
144 return equiv_p; | |
145 } | |
146 | |
147 /* The table itself. */ | |
148 static hash_table<expr_hasher> *expr_table; | |
149 | |
150 | |
0 | 151 static struct obstack expr_obstack; |
152 | |
153 /* Occurrence of an expression. | |
154 There is at most one occurrence per basic block. If a pattern appears | |
155 more than once, the last appearance is used. */ | |
156 | |
157 struct occr | |
158 { | |
159 /* Next occurrence of this expression. */ | |
160 struct occr *next; | |
161 /* The insn that computes the expression. */ | |
111 | 162 rtx_insn *insn; |
0 | 163 /* Nonzero if this [anticipatable] occurrence has been deleted. */ |
164 char deleted_p; | |
165 }; | |
166 | |
167 static struct obstack occr_obstack; | |
168 | |
169 /* The following structure holds the information about the occurrences of | |
170 the redundant instructions. */ | |
171 struct unoccr | |
172 { | |
173 struct unoccr *next; | |
174 edge pred; | |
111 | 175 rtx_insn *insn; |
0 | 176 }; |
177 | |
178 static struct obstack unoccr_obstack; | |
179 | |
180 /* Array where each element is the CUID if the insn that last set the hard | |
181 register with the number of the element, since the start of the current | |
182 basic block. | |
183 | |
184 This array is used during the building of the hash table (step 1) to | |
185 determine if a reg is killed before the end of a basic block. | |
186 | |
187 It is also used when eliminating partial redundancies (step 2) to see | |
188 if a reg was modified since the start of a basic block. */ | |
189 static int *reg_avail_info; | |
190 | |
191 /* A list of insns that may modify memory within the current basic block. */ | |
192 struct modifies_mem | |
193 { | |
111 | 194 rtx_insn *insn; |
0 | 195 struct modifies_mem *next; |
196 }; | |
197 static struct modifies_mem *modifies_mem_list; | |
198 | |
199 /* The modifies_mem structs also go on an obstack, only this obstack is | |
200 freed each time after completing the analysis or transformations on | |
201 a basic block. So we allocate a dummy modifies_mem_obstack_bottom | |
202 object on the obstack to keep track of the bottom of the obstack. */ | |
203 static struct obstack modifies_mem_obstack; | |
204 static struct modifies_mem *modifies_mem_obstack_bottom; | |
205 | |
206 /* Mapping of insn UIDs to CUIDs. | |
207 CUIDs are like UIDs except they increase monotonically in each basic | |
208 block, have no gaps, and only apply to real insns. */ | |
209 static int *uid_cuid; | |
210 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)]) | |
111 | 211 |
212 /* Bitmap of blocks which have memory stores. */ | |
213 static bitmap modify_mem_list_set; | |
214 | |
215 /* Bitmap of blocks which have calls. */ | |
216 static bitmap blocks_with_calls; | |
217 | |
218 /* Vector indexed by block # with a list of all the insns that | |
219 modify memory within the block. */ | |
220 static vec<rtx_insn *> *modify_mem_list; | |
221 | |
222 /* Vector indexed by block # with a canonicalized list of insns | |
223 that modify memory in the block. */ | |
224 static vec<modify_pair> *canon_modify_mem_list; | |
225 | |
226 /* Vector of simple bitmaps indexed by block number. Each component sbitmap | |
227 indicates which expressions are transparent through the block. */ | |
228 static sbitmap *transp; | |
0 | 229 |
230 | |
231 /* Helpers for memory allocation/freeing. */ | |
232 static void alloc_mem (void); | |
233 static void free_mem (void); | |
234 | |
235 /* Support for hash table construction and transformations. */ | |
111 | 236 static bool oprs_unchanged_p (rtx, rtx_insn *, bool); |
237 static void record_last_reg_set_info (rtx_insn *, rtx); | |
238 static void record_last_reg_set_info_regno (rtx_insn *, int); | |
239 static void record_last_mem_set_info (rtx_insn *); | |
0 | 240 static void record_last_set_info (rtx, const_rtx, void *); |
111 | 241 static void record_opr_changes (rtx_insn *); |
0 | 242 |
243 static void find_mem_conflicts (rtx, const_rtx, void *); | |
244 static int load_killed_in_block_p (int, rtx, bool); | |
245 static void reset_opr_set_tables (void); | |
246 | |
247 /* Hash table support. */ | |
248 static hashval_t hash_expr (rtx, int *); | |
111 | 249 static void insert_expr_in_table (rtx, rtx_insn *); |
0 | 250 static struct expr *lookup_expr_in_table (rtx); |
251 static void dump_hash_table (FILE *); | |
252 | |
253 /* Helpers for eliminate_partially_redundant_load. */ | |
254 static bool reg_killed_on_edge (rtx, edge); | |
255 static bool reg_used_on_edge (rtx, edge); | |
256 | |
111 | 257 static rtx get_avail_load_store_reg (rtx_insn *); |
0 | 258 |
259 static bool bb_has_well_behaved_predecessors (basic_block); | |
111 | 260 static struct occr* get_bb_avail_insn (basic_block, struct occr *, int); |
261 static void hash_scan_set (rtx_insn *); | |
0 | 262 static void compute_hash_table (void); |
263 | |
264 /* The work horses of this pass. */ | |
265 static void eliminate_partially_redundant_load (basic_block, | |
111 | 266 rtx_insn *, |
0 | 267 struct expr *); |
268 static void eliminate_partially_redundant_loads (void); | |
269 | |
270 | |
271 /* Allocate memory for the CUID mapping array and register/memory | |
272 tracking tables. */ | |
273 | |
274 static void | |
275 alloc_mem (void) | |
276 { | |
277 int i; | |
278 basic_block bb; | |
111 | 279 rtx_insn *insn; |
0 | 280 |
281 /* Find the largest UID and create a mapping from UIDs to CUIDs. */ | |
282 uid_cuid = XCNEWVEC (int, get_max_uid () + 1); | |
283 i = 1; | |
111 | 284 FOR_EACH_BB_FN (bb, cfun) |
0 | 285 FOR_BB_INSNS (bb, insn) |
286 { | |
287 if (INSN_P (insn)) | |
288 uid_cuid[INSN_UID (insn)] = i++; | |
289 else | |
290 uid_cuid[INSN_UID (insn)] = i; | |
291 } | |
292 | |
293 /* Allocate the available expressions hash table. We don't want to | |
294 make the hash table too small, but unnecessarily making it too large | |
295 also doesn't help. The i/4 is a gcse.c relic, and seems like a | |
296 reasonable choice. */ | |
111 | 297 expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13)); |
0 | 298 |
299 /* We allocate everything on obstacks because we often can roll back | |
300 the whole obstack to some point. Freeing obstacks is very fast. */ | |
301 gcc_obstack_init (&expr_obstack); | |
302 gcc_obstack_init (&occr_obstack); | |
303 gcc_obstack_init (&unoccr_obstack); | |
304 gcc_obstack_init (&modifies_mem_obstack); | |
305 | |
306 /* Working array used to track the last set for each register | |
307 in the current block. */ | |
308 reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int)); | |
309 | |
310 /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we | |
311 can roll it back in reset_opr_set_tables. */ | |
312 modifies_mem_obstack_bottom = | |
313 (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack, | |
314 sizeof (struct modifies_mem)); | |
111 | 315 |
316 blocks_with_calls = BITMAP_ALLOC (NULL); | |
317 modify_mem_list_set = BITMAP_ALLOC (NULL); | |
318 | |
319 modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun), | |
320 sizeof (vec_rtx_heap)); | |
321 canon_modify_mem_list | |
322 = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun), | |
323 sizeof (vec_modify_pair_heap)); | |
0 | 324 } |
325 | |
326 /* Free memory allocated by alloc_mem. */ | |
327 | |
328 static void | |
329 free_mem (void) | |
330 { | |
331 free (uid_cuid); | |
332 | |
111 | 333 delete expr_table; |
334 expr_table = NULL; | |
0 | 335 |
336 obstack_free (&expr_obstack, NULL); | |
337 obstack_free (&occr_obstack, NULL); | |
338 obstack_free (&unoccr_obstack, NULL); | |
339 obstack_free (&modifies_mem_obstack, NULL); | |
340 | |
111 | 341 unsigned i; |
342 bitmap_iterator bi; | |
343 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi) | |
344 { | |
345 modify_mem_list[i].release (); | |
346 canon_modify_mem_list[i].release (); | |
347 } | |
0 | 348 |
111 | 349 BITMAP_FREE (blocks_with_calls); |
350 BITMAP_FREE (modify_mem_list_set); | |
351 free (reg_avail_info); | |
352 free (modify_mem_list); | |
353 free (canon_modify_mem_list); | |
0 | 354 } |
355 | |
356 | |
357 /* Insert expression X in INSN in the hash TABLE. | |
358 If it is already present, record it as the last occurrence in INSN's | |
359 basic block. */ | |
360 | |
361 static void | |
111 | 362 insert_expr_in_table (rtx x, rtx_insn *insn) |
0 | 363 { |
364 int do_not_record_p; | |
365 hashval_t hash; | |
366 struct expr *cur_expr, **slot; | |
367 struct occr *avail_occr, *last_occr = NULL; | |
368 | |
369 hash = hash_expr (x, &do_not_record_p); | |
370 | |
371 /* Do not insert expression in the table if it contains volatile operands, | |
372 or if hash_expr determines the expression is something we don't want | |
373 to or can't handle. */ | |
374 if (do_not_record_p) | |
375 return; | |
376 | |
377 /* We anticipate that redundant expressions are rare, so for convenience | |
378 allocate a new hash table element here already and set its fields. | |
379 If we don't do this, we need a hack with a static struct expr. Anyway, | |
380 obstack_free is really fast and one more obstack_alloc doesn't hurt if | |
381 we're going to see more expressions later on. */ | |
382 cur_expr = (struct expr *) obstack_alloc (&expr_obstack, | |
383 sizeof (struct expr)); | |
384 cur_expr->expr = x; | |
385 cur_expr->hash = hash; | |
386 cur_expr->avail_occr = NULL; | |
387 | |
111 | 388 slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT); |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
389 |
0 | 390 if (! (*slot)) |
111 | 391 { |
392 /* The expression isn't found, so insert it. */ | |
393 *slot = cur_expr; | |
394 | |
395 /* Anytime we add an entry to the table, record the index | |
396 of the new entry. The bitmap index starts counting | |
397 at zero. */ | |
398 cur_expr->bitmap_index = expr_table->elements () - 1; | |
399 } | |
0 | 400 else |
401 { | |
402 /* The expression is already in the table, so roll back the | |
403 obstack and use the existing table entry. */ | |
404 obstack_free (&expr_obstack, cur_expr); | |
405 cur_expr = *slot; | |
406 } | |
407 | |
408 /* Search for another occurrence in the same basic block. */ | |
409 avail_occr = cur_expr->avail_occr; | |
63
b7f97abdc517
update gcc from gcc-4.5.0 to gcc-4.6
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
55
diff
changeset
|
410 while (avail_occr |
b7f97abdc517
update gcc from gcc-4.5.0 to gcc-4.6
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
55
diff
changeset
|
411 && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn)) |
0 | 412 { |
413 /* If an occurrence isn't found, save a pointer to the end of | |
414 the list. */ | |
415 last_occr = avail_occr; | |
416 avail_occr = avail_occr->next; | |
417 } | |
418 | |
419 if (avail_occr) | |
420 /* Found another instance of the expression in the same basic block. | |
421 Prefer this occurrence to the currently recorded one. We want | |
422 the last one in the block and the block is scanned from start | |
423 to end. */ | |
424 avail_occr->insn = insn; | |
425 else | |
426 { | |
427 /* First occurrence of this expression in this basic block. */ | |
428 avail_occr = (struct occr *) obstack_alloc (&occr_obstack, | |
429 sizeof (struct occr)); | |
430 | |
431 /* First occurrence of this expression in any block? */ | |
432 if (cur_expr->avail_occr == NULL) | |
433 cur_expr->avail_occr = avail_occr; | |
434 else | |
435 last_occr->next = avail_occr; | |
436 | |
437 avail_occr->insn = insn; | |
438 avail_occr->next = NULL; | |
439 avail_occr->deleted_p = 0; | |
440 } | |
441 } | |
442 | |
443 | |
444 /* Lookup pattern PAT in the expression hash table. | |
445 The result is a pointer to the table entry, or NULL if not found. */ | |
446 | |
447 static struct expr * | |
448 lookup_expr_in_table (rtx pat) | |
449 { | |
450 int do_not_record_p; | |
451 struct expr **slot, *tmp_expr; | |
452 hashval_t hash = hash_expr (pat, &do_not_record_p); | |
453 | |
454 if (do_not_record_p) | |
455 return NULL; | |
456 | |
457 tmp_expr = (struct expr *) obstack_alloc (&expr_obstack, | |
458 sizeof (struct expr)); | |
459 tmp_expr->expr = pat; | |
460 tmp_expr->hash = hash; | |
461 tmp_expr->avail_occr = NULL; | |
462 | |
111 | 463 slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT); |
0 | 464 obstack_free (&expr_obstack, tmp_expr); |
465 | |
466 if (!slot) | |
467 return NULL; | |
468 else | |
469 return (*slot); | |
470 } | |
471 | |
472 | |
473 /* Dump all expressions and occurrences that are currently in the | |
474 expression hash table to FILE. */ | |
475 | |
476 /* This helper is called via htab_traverse. */ | |
111 | 477 int |
478 dump_expr_hash_table_entry (expr **slot, FILE *file) | |
0 | 479 { |
111 | 480 struct expr *exprs = *slot; |
0 | 481 struct occr *occr; |
482 | |
483 fprintf (file, "expr: "); | |
111 | 484 print_rtl (file, exprs->expr); |
485 fprintf (file,"\nhashcode: %u\n", exprs->hash); | |
0 | 486 fprintf (file,"list of occurrences:\n"); |
111 | 487 occr = exprs->avail_occr; |
0 | 488 while (occr) |
489 { | |
111 | 490 rtx_insn *insn = occr->insn; |
0 | 491 print_rtl_single (file, insn); |
492 fprintf (file, "\n"); | |
493 occr = occr->next; | |
494 } | |
495 fprintf (file, "\n"); | |
496 return 1; | |
497 } | |
498 | |
499 static void | |
500 dump_hash_table (FILE *file) | |
501 { | |
502 fprintf (file, "\n\nexpression hash table\n"); | |
503 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n", | |
111 | 504 (long) expr_table->size (), |
505 (long) expr_table->elements (), | |
506 expr_table->collisions ()); | |
507 if (expr_table->elements () > 0) | |
0 | 508 { |
509 fprintf (file, "\n\ntable entries:\n"); | |
111 | 510 expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file); |
0 | 511 } |
512 fprintf (file, "\n"); | |
513 } | |
514 | |
515 /* Return true if register X is recorded as being set by an instruction | |
516 whose CUID is greater than the one given. */ | |
517 | |
518 static bool | |
519 reg_changed_after_insn_p (rtx x, int cuid) | |
520 { | |
521 unsigned int regno, end_regno; | |
522 | |
523 regno = REGNO (x); | |
111 | 524 end_regno = END_REGNO (x); |
0 | 525 do |
526 if (reg_avail_info[regno] > cuid) | |
527 return true; | |
528 while (++regno < end_regno); | |
529 return false; | |
530 } | |
531 | |
532 /* Return nonzero if the operands of expression X are unchanged | |
533 1) from the start of INSN's basic block up to but not including INSN | |
534 if AFTER_INSN is false, or | |
535 2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */ | |
536 | |
537 static bool | |
111 | 538 oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn) |
0 | 539 { |
540 int i, j; | |
541 enum rtx_code code; | |
542 const char *fmt; | |
543 | |
544 if (x == 0) | |
545 return 1; | |
546 | |
547 code = GET_CODE (x); | |
548 switch (code) | |
549 { | |
550 case REG: | |
551 /* We are called after register allocation. */ | |
552 gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER); | |
553 if (after_insn) | |
554 return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1); | |
555 else | |
556 return !reg_changed_after_insn_p (x, 0); | |
557 | |
558 case MEM: | |
559 if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn)) | |
560 return 0; | |
561 else | |
562 return oprs_unchanged_p (XEXP (x, 0), insn, after_insn); | |
563 | |
564 case PC: | |
565 case CC0: /*FIXME*/ | |
566 case CONST: | |
111 | 567 CASE_CONST_ANY: |
0 | 568 case SYMBOL_REF: |
569 case LABEL_REF: | |
570 case ADDR_VEC: | |
571 case ADDR_DIFF_VEC: | |
572 return 1; | |
573 | |
574 case PRE_DEC: | |
575 case PRE_INC: | |
576 case POST_DEC: | |
577 case POST_INC: | |
578 case PRE_MODIFY: | |
579 case POST_MODIFY: | |
580 if (after_insn) | |
581 return 0; | |
582 break; | |
583 | |
584 default: | |
585 break; | |
586 } | |
587 | |
588 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) | |
589 { | |
590 if (fmt[i] == 'e') | |
591 { | |
592 if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn)) | |
593 return 0; | |
594 } | |
595 else if (fmt[i] == 'E') | |
596 for (j = 0; j < XVECLEN (x, i); j++) | |
597 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn)) | |
598 return 0; | |
599 } | |
600 | |
601 return 1; | |
602 } | |
603 | |
604 | |
605 /* Used for communication between find_mem_conflicts and | |
606 load_killed_in_block_p. Nonzero if find_mem_conflicts finds a | |
607 conflict between two memory references. | |
608 This is a bit of a hack to work around the limitations of note_stores. */ | |
609 static int mems_conflict_p; | |
610 | |
611 /* DEST is the output of an instruction. If it is a memory reference, and | |
612 possibly conflicts with the load found in DATA, then set mems_conflict_p | |
613 to a nonzero value. */ | |
614 | |
615 static void | |
616 find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, | |
617 void *data) | |
618 { | |
619 rtx mem_op = (rtx) data; | |
620 | |
621 while (GET_CODE (dest) == SUBREG | |
622 || GET_CODE (dest) == ZERO_EXTRACT | |
623 || GET_CODE (dest) == STRICT_LOW_PART) | |
624 dest = XEXP (dest, 0); | |
625 | |
626 /* If DEST is not a MEM, then it will not conflict with the load. Note | |
627 that function calls are assumed to clobber memory, but are handled | |
628 elsewhere. */ | |
629 if (! MEM_P (dest)) | |
630 return; | |
631 | |
111 | 632 if (true_dependence (dest, GET_MODE (dest), mem_op)) |
0 | 633 mems_conflict_p = 1; |
634 } | |
635 | |
636 | |
637 /* Return nonzero if the expression in X (a memory reference) is killed | |
638 in the current basic block before (if AFTER_INSN is false) or after | |
639 (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT. | |
640 | |
641 This function assumes that the modifies_mem table is flushed when | |
642 the hash table construction or redundancy elimination phases start | |
643 processing a new basic block. */ | |
644 | |
645 static int | |
646 load_killed_in_block_p (int uid_limit, rtx x, bool after_insn) | |
647 { | |
648 struct modifies_mem *list_entry = modifies_mem_list; | |
649 | |
650 while (list_entry) | |
651 { | |
111 | 652 rtx_insn *setter = list_entry->insn; |
0 | 653 |
654 /* Ignore entries in the list that do not apply. */ | |
655 if ((after_insn | |
656 && INSN_CUID (setter) < uid_limit) | |
657 || (! after_insn | |
658 && INSN_CUID (setter) > uid_limit)) | |
659 { | |
660 list_entry = list_entry->next; | |
661 continue; | |
662 } | |
663 | |
664 /* If SETTER is a call everything is clobbered. Note that calls | |
665 to pure functions are never put on the list, so we need not | |
666 worry about them. */ | |
667 if (CALL_P (setter)) | |
668 return 1; | |
669 | |
670 /* SETTER must be an insn of some kind that sets memory. Call | |
671 note_stores to examine each hunk of memory that is modified. | |
672 It will set mems_conflict_p to nonzero if there may be a | |
673 conflict between X and SETTER. */ | |
674 mems_conflict_p = 0; | |
675 note_stores (PATTERN (setter), find_mem_conflicts, x); | |
676 if (mems_conflict_p) | |
677 return 1; | |
678 | |
679 list_entry = list_entry->next; | |
680 } | |
681 return 0; | |
682 } | |
683 | |
684 | |
685 /* Record register first/last/block set information for REGNO in INSN. */ | |
686 | |
687 static inline void | |
111 | 688 record_last_reg_set_info (rtx_insn *insn, rtx reg) |
0 | 689 { |
690 unsigned int regno, end_regno; | |
691 | |
692 regno = REGNO (reg); | |
111 | 693 end_regno = END_REGNO (reg); |
0 | 694 do |
695 reg_avail_info[regno] = INSN_CUID (insn); | |
696 while (++regno < end_regno); | |
697 } | |
698 | |
699 static inline void | |
111 | 700 record_last_reg_set_info_regno (rtx_insn *insn, int regno) |
0 | 701 { |
702 reg_avail_info[regno] = INSN_CUID (insn); | |
703 } | |
704 | |
705 | |
706 /* Record memory modification information for INSN. We do not actually care | |
707 about the memory location(s) that are set, or even how they are set (consider | |
708 a CALL_INSN). We merely need to record which insns modify memory. */ | |
709 | |
710 static void | |
111 | 711 record_last_mem_set_info (rtx_insn *insn) |
0 | 712 { |
713 struct modifies_mem *list_entry; | |
714 | |
715 list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack, | |
716 sizeof (struct modifies_mem)); | |
717 list_entry->insn = insn; | |
718 list_entry->next = modifies_mem_list; | |
719 modifies_mem_list = list_entry; | |
111 | 720 |
721 record_last_mem_set_info_common (insn, modify_mem_list, | |
722 canon_modify_mem_list, | |
723 modify_mem_list_set, | |
724 blocks_with_calls); | |
0 | 725 } |
726 | |
727 /* Called from compute_hash_table via note_stores to handle one | |
728 SET or CLOBBER in an insn. DATA is really the instruction in which | |
729 the SET is taking place. */ | |
730 | |
731 static void | |
732 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data) | |
733 { | |
111 | 734 rtx_insn *last_set_insn = (rtx_insn *) data; |
0 | 735 |
736 if (GET_CODE (dest) == SUBREG) | |
737 dest = SUBREG_REG (dest); | |
738 | |
739 if (REG_P (dest)) | |
740 record_last_reg_set_info (last_set_insn, dest); | |
741 else if (MEM_P (dest)) | |
742 { | |
743 /* Ignore pushes, they don't clobber memory. They may still | |
744 clobber the stack pointer though. Some targets do argument | |
745 pushes without adding REG_INC notes. See e.g. PR25196, | |
746 where a pushsi2 on i386 doesn't have REG_INC notes. Note | |
747 such changes here too. */ | |
748 if (! push_operand (dest, GET_MODE (dest))) | |
749 record_last_mem_set_info (last_set_insn); | |
750 else | |
751 record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM); | |
752 } | |
753 } | |
754 | |
755 | |
756 /* Reset tables used to keep track of what's still available since the | |
757 start of the block. */ | |
758 | |
759 static void | |
760 reset_opr_set_tables (void) | |
761 { | |
762 memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int)); | |
763 obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom); | |
764 modifies_mem_list = NULL; | |
765 } | |
766 | |
767 | |
768 /* Record things set by INSN. | |
769 This data is used by oprs_unchanged_p. */ | |
770 | |
771 static void | |
111 | 772 record_opr_changes (rtx_insn *insn) |
0 | 773 { |
774 rtx note; | |
775 | |
776 /* Find all stores and record them. */ | |
777 note_stores (PATTERN (insn), record_last_set_info, insn); | |
778 | |
779 /* Also record autoincremented REGs for this insn as changed. */ | |
780 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
781 if (REG_NOTE_KIND (note) == REG_INC) | |
782 record_last_reg_set_info (insn, XEXP (note, 0)); | |
783 | |
784 /* Finally, if this is a call, record all call clobbers. */ | |
785 if (CALL_P (insn)) | |
786 { | |
787 unsigned int regno; | |
788 rtx link, x; | |
111 | 789 hard_reg_set_iterator hrsi; |
790 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi) | |
791 record_last_reg_set_info_regno (insn, regno); | |
0 | 792 |
793 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
794 if (GET_CODE (XEXP (link, 0)) == CLOBBER) | |
795 { | |
796 x = XEXP (XEXP (link, 0), 0); | |
797 if (REG_P (x)) | |
798 { | |
799 gcc_assert (HARD_REGISTER_P (x)); | |
800 record_last_reg_set_info (insn, x); | |
801 } | |
802 } | |
803 | |
804 if (! RTL_CONST_OR_PURE_CALL_P (insn)) | |
805 record_last_mem_set_info (insn); | |
806 } | |
807 } | |
808 | |
809 | |
810 /* Scan the pattern of INSN and add an entry to the hash TABLE. | |
811 After reload we are interested in loads/stores only. */ | |
812 | |
813 static void | |
111 | 814 hash_scan_set (rtx_insn *insn) |
0 | 815 { |
816 rtx pat = PATTERN (insn); | |
817 rtx src = SET_SRC (pat); | |
818 rtx dest = SET_DEST (pat); | |
819 | |
820 /* We are only interested in loads and stores. */ | |
821 if (! MEM_P (src) && ! MEM_P (dest)) | |
822 return; | |
823 | |
824 /* Don't mess with jumps and nops. */ | |
825 if (JUMP_P (insn) || set_noop_p (pat)) | |
826 return; | |
827 | |
828 if (REG_P (dest)) | |
829 { | |
830 if (/* Don't CSE something if we can't do a reg/reg copy. */ | |
831 can_copy_p (GET_MODE (dest)) | |
832 /* Is SET_SRC something we want to gcse? */ | |
833 && general_operand (src, GET_MODE (src)) | |
834 #ifdef STACK_REGS | |
835 /* Never consider insns touching the register stack. It may | |
836 create situations that reg-stack cannot handle (e.g. a stack | |
837 register live across an abnormal edge). */ | |
838 && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG) | |
839 #endif | |
840 /* An expression is not available if its operands are | |
841 subsequently modified, including this insn. */ | |
842 && oprs_unchanged_p (src, insn, true)) | |
843 { | |
844 insert_expr_in_table (src, insn); | |
845 } | |
846 } | |
847 else if (REG_P (src)) | |
848 { | |
849 /* Only record sets of pseudo-regs in the hash table. */ | |
850 if (/* Don't CSE something if we can't do a reg/reg copy. */ | |
851 can_copy_p (GET_MODE (src)) | |
852 /* Is SET_DEST something we want to gcse? */ | |
853 && general_operand (dest, GET_MODE (dest)) | |
854 #ifdef STACK_REGS | |
855 /* As above for STACK_REGS. */ | |
856 && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG) | |
857 #endif | |
858 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest))) | |
859 /* Check if the memory expression is killed after insn. */ | |
860 && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true) | |
861 && oprs_unchanged_p (XEXP (dest, 0), insn, true)) | |
862 { | |
863 insert_expr_in_table (dest, insn); | |
864 } | |
865 } | |
866 } | |
867 | |
868 | |
869 /* Create hash table of memory expressions available at end of basic | |
870 blocks. Basically you should think of this hash table as the | |
871 representation of AVAIL_OUT. This is the set of expressions that | |
872 is generated in a basic block and not killed before the end of the | |
873 same basic block. Notice that this is really a local computation. */ | |
874 | |
875 static void | |
876 compute_hash_table (void) | |
877 { | |
878 basic_block bb; | |
879 | |
111 | 880 FOR_EACH_BB_FN (bb, cfun) |
0 | 881 { |
111 | 882 rtx_insn *insn; |
0 | 883 |
884 /* First pass over the instructions records information used to | |
885 determine when registers and memory are last set. | |
886 Since we compute a "local" AVAIL_OUT, reset the tables that | |
887 help us keep track of what has been modified since the start | |
888 of the block. */ | |
889 reset_opr_set_tables (); | |
890 FOR_BB_INSNS (bb, insn) | |
891 { | |
892 if (INSN_P (insn)) | |
893 record_opr_changes (insn); | |
894 } | |
895 | |
896 /* The next pass actually builds the hash table. */ | |
897 FOR_BB_INSNS (bb, insn) | |
898 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET) | |
899 hash_scan_set (insn); | |
900 } | |
901 } | |
902 | |
903 | |
904 /* Check if register REG is killed in any insn waiting to be inserted on | |
905 edge E. This function is required to check that our data flow analysis | |
906 is still valid prior to commit_edge_insertions. */ | |
907 | |
908 static bool | |
909 reg_killed_on_edge (rtx reg, edge e) | |
910 { | |
111 | 911 rtx_insn *insn; |
0 | 912 |
913 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn)) | |
914 if (INSN_P (insn) && reg_set_p (reg, insn)) | |
915 return true; | |
916 | |
917 return false; | |
918 } | |
919 | |
920 /* Similar to above - check if register REG is used in any insn waiting | |
921 to be inserted on edge E. | |
922 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p | |
923 with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p. */ | |
924 | |
925 static bool | |
926 reg_used_on_edge (rtx reg, edge e) | |
927 { | |
111 | 928 rtx_insn *insn; |
0 | 929 |
930 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn)) | |
931 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn))) | |
932 return true; | |
933 | |
934 return false; | |
935 } | |
936 | |
937 /* Return the loaded/stored register of a load/store instruction. */ | |
938 | |
939 static rtx | |
111 | 940 get_avail_load_store_reg (rtx_insn *insn) |
0 | 941 { |
942 if (REG_P (SET_DEST (PATTERN (insn)))) | |
943 /* A load. */ | |
111 | 944 return SET_DEST (PATTERN (insn)); |
0 | 945 else |
946 { | |
947 /* A store. */ | |
948 gcc_assert (REG_P (SET_SRC (PATTERN (insn)))); | |
949 return SET_SRC (PATTERN (insn)); | |
950 } | |
951 } | |
952 | |
953 /* Return nonzero if the predecessors of BB are "well behaved". */ | |
954 | |
955 static bool | |
956 bb_has_well_behaved_predecessors (basic_block bb) | |
957 { | |
958 edge pred; | |
959 edge_iterator ei; | |
960 | |
961 if (EDGE_COUNT (bb->preds) == 0) | |
962 return false; | |
963 | |
964 FOR_EACH_EDGE (pred, ei, bb->preds) | |
965 { | |
111 | 966 /* commit_one_edge_insertion refuses to insert on abnormal edges even if |
967 the source has only one successor so EDGE_CRITICAL_P is too weak. */ | |
968 if ((pred->flags & EDGE_ABNORMAL) && !single_pred_p (pred->dest)) | |
0 | 969 return false; |
970 | |
111 | 971 if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label) |
972 return false; | |
973 | |
974 if (tablejump_p (BB_END (pred->src), NULL, NULL)) | |
0 | 975 return false; |
976 } | |
977 return true; | |
978 } | |
979 | |
980 | |
981 /* Search for the occurrences of expression in BB. */ | |
982 | |
983 static struct occr* | |
111 | 984 get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index) |
0 | 985 { |
111 | 986 struct occr *occr = orig_occr; |
987 | |
0 | 988 for (; occr != NULL; occr = occr->next) |
989 if (BLOCK_FOR_INSN (occr->insn) == bb) | |
990 return occr; | |
111 | 991 |
992 /* If we could not find an occurrence in BB, see if BB | |
993 has a single predecessor with an occurrence that is | |
994 transparent through BB. */ | |
995 if (single_pred_p (bb) | |
996 && bitmap_bit_p (transp[bb->index], bitmap_index) | |
997 && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index))) | |
998 { | |
999 rtx avail_reg = get_avail_load_store_reg (occr->insn); | |
1000 if (!reg_set_between_p (avail_reg, | |
1001 PREV_INSN (BB_HEAD (bb)), | |
1002 NEXT_INSN (BB_END (bb))) | |
1003 && !reg_killed_on_edge (avail_reg, single_pred_edge (bb))) | |
1004 return occr; | |
1005 } | |
1006 | |
0 | 1007 return NULL; |
1008 } | |
1009 | |
1010 | |
111 | 1011 /* This helper is called via htab_traverse. */ |
1012 int | |
1013 compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED) | |
1014 { | |
1015 struct expr *expr = *slot; | |
1016 | |
1017 compute_transp (expr->expr, expr->bitmap_index, transp, | |
1018 blocks_with_calls, modify_mem_list_set, | |
1019 canon_modify_mem_list); | |
1020 return 1; | |
1021 } | |
1022 | |
0 | 1023 /* This handles the case where several stores feed a partially redundant |
1024 load. It checks if the redundancy elimination is possible and if it's | |
1025 worth it. | |
1026 | |
1027 Redundancy elimination is possible if, | |
1028 1) None of the operands of an insn have been modified since the start | |
1029 of the current basic block. | |
1030 2) In any predecessor of the current basic block, the same expression | |
1031 is generated. | |
1032 | |
1033 See the function body for the heuristics that determine if eliminating | |
1034 a redundancy is also worth doing, assuming it is possible. */ | |
1035 | |
1036 static void | |
111 | 1037 eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn, |
0 | 1038 struct expr *expr) |
1039 { | |
1040 edge pred; | |
111 | 1041 rtx_insn *avail_insn = NULL; |
0 | 1042 rtx avail_reg; |
1043 rtx dest, pat; | |
1044 struct occr *a_occr; | |
1045 struct unoccr *occr, *avail_occrs = NULL; | |
1046 struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL; | |
1047 int npred_ok = 0; | |
111 | 1048 profile_count ok_count = profile_count::zero (); |
1049 /* Redundant load execution count. */ | |
1050 profile_count critical_count = profile_count::zero (); | |
1051 /* Execution count of critical edges. */ | |
0 | 1052 edge_iterator ei; |
1053 bool critical_edge_split = false; | |
1054 | |
1055 /* The execution count of the loads to be added to make the | |
1056 load fully redundant. */ | |
111 | 1057 profile_count not_ok_count = profile_count::zero (); |
0 | 1058 basic_block pred_bb; |
1059 | |
1060 pat = PATTERN (insn); | |
1061 dest = SET_DEST (pat); | |
1062 | |
1063 /* Check that the loaded register is not used, set, or killed from the | |
1064 beginning of the block. */ | |
1065 if (reg_changed_after_insn_p (dest, 0) | |
1066 || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn)) | |
1067 return; | |
1068 | |
1069 /* Check potential for replacing load with copy for predecessors. */ | |
1070 FOR_EACH_EDGE (pred, ei, bb->preds) | |
1071 { | |
111 | 1072 rtx_insn *next_pred_bb_end; |
0 | 1073 |
111 | 1074 avail_insn = NULL; |
0 | 1075 avail_reg = NULL_RTX; |
1076 pred_bb = pred->src; | |
111 | 1077 for (a_occr = get_bb_avail_insn (pred_bb, |
1078 expr->avail_occr, | |
1079 expr->bitmap_index); | |
1080 a_occr; | |
1081 a_occr = get_bb_avail_insn (pred_bb, | |
1082 a_occr->next, | |
1083 expr->bitmap_index)) | |
0 | 1084 { |
1085 /* Check if the loaded register is not used. */ | |
1086 avail_insn = a_occr->insn; | |
1087 avail_reg = get_avail_load_store_reg (avail_insn); | |
1088 gcc_assert (avail_reg); | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1089 |
0 | 1090 /* Make sure we can generate a move from register avail_reg to |
1091 dest. */ | |
111 | 1092 rtx_insn *move = gen_move_insn (copy_rtx (dest), |
1093 copy_rtx (avail_reg)); | |
1094 extract_insn (move); | |
1095 if (! constrain_operands (1, get_preferred_alternatives (insn, | |
1096 pred_bb)) | |
0 | 1097 || reg_killed_on_edge (avail_reg, pred) |
1098 || reg_used_on_edge (dest, pred)) | |
1099 { | |
1100 avail_insn = NULL; | |
1101 continue; | |
1102 } | |
111 | 1103 next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn))); |
0 | 1104 if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end)) |
1105 /* AVAIL_INSN remains non-null. */ | |
1106 break; | |
1107 else | |
1108 avail_insn = NULL; | |
1109 } | |
1110 | |
111 | 1111 if (EDGE_CRITICAL_P (pred) && pred->count ().initialized_p ()) |
1112 critical_count += pred->count (); | |
0 | 1113 |
1114 if (avail_insn != NULL_RTX) | |
1115 { | |
1116 npred_ok++; | |
111 | 1117 if (pred->count ().initialized_p ()) |
1118 ok_count = ok_count + pred->count (); | |
0 | 1119 if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest), |
1120 copy_rtx (avail_reg))))) | |
1121 { | |
1122 /* Check if there is going to be a split. */ | |
1123 if (EDGE_CRITICAL_P (pred)) | |
1124 critical_edge_split = true; | |
1125 } | |
1126 else /* Its a dead move no need to generate. */ | |
1127 continue; | |
1128 occr = (struct unoccr *) obstack_alloc (&unoccr_obstack, | |
1129 sizeof (struct unoccr)); | |
1130 occr->insn = avail_insn; | |
1131 occr->pred = pred; | |
1132 occr->next = avail_occrs; | |
1133 avail_occrs = occr; | |
1134 if (! rollback_unoccr) | |
1135 rollback_unoccr = occr; | |
1136 } | |
1137 else | |
1138 { | |
1139 /* Adding a load on a critical edge will cause a split. */ | |
1140 if (EDGE_CRITICAL_P (pred)) | |
1141 critical_edge_split = true; | |
111 | 1142 if (pred->count ().initialized_p ()) |
1143 not_ok_count = not_ok_count + pred->count (); | |
0 | 1144 unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack, |
1145 sizeof (struct unoccr)); | |
111 | 1146 unoccr->insn = NULL; |
0 | 1147 unoccr->pred = pred; |
1148 unoccr->next = unavail_occrs; | |
1149 unavail_occrs = unoccr; | |
1150 if (! rollback_unoccr) | |
1151 rollback_unoccr = unoccr; | |
1152 } | |
1153 } | |
1154 | |
1155 if (/* No load can be replaced by copy. */ | |
1156 npred_ok == 0 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1157 /* Prevent exploding the code. */ |
0 | 1158 || (optimize_bb_for_size_p (bb) && npred_ok > 1) |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1159 /* If we don't have profile information we cannot tell if splitting |
0 | 1160 a critical edge is profitable or not so don't do it. */ |
111 | 1161 || ((! profile_info || profile_status_for_fn (cfun) != PROFILE_READ |
0 | 1162 || targetm.cannot_modify_jumps_p ()) |
1163 && critical_edge_split)) | |
1164 goto cleanup; | |
1165 | |
1166 /* Check if it's worth applying the partial redundancy elimination. */ | |
111 | 1167 if (ok_count.to_gcov_type () |
1168 < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count.to_gcov_type ()) | |
0 | 1169 goto cleanup; |
111 | 1170 if (ok_count.to_gcov_type () |
1171 < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count.to_gcov_type ()) | |
0 | 1172 goto cleanup; |
1173 | |
1174 /* Generate moves to the loaded register from where | |
1175 the memory is available. */ | |
1176 for (occr = avail_occrs; occr; occr = occr->next) | |
1177 { | |
1178 avail_insn = occr->insn; | |
1179 pred = occr->pred; | |
1180 /* Set avail_reg to be the register having the value of the | |
1181 memory. */ | |
1182 avail_reg = get_avail_load_store_reg (avail_insn); | |
1183 gcc_assert (avail_reg); | |
1184 | |
1185 insert_insn_on_edge (gen_move_insn (copy_rtx (dest), | |
1186 copy_rtx (avail_reg)), | |
1187 pred); | |
1188 stats.moves_inserted++; | |
1189 | |
1190 if (dump_file) | |
1191 fprintf (dump_file, | |
1192 "generating move from %d to %d on edge from %d to %d\n", | |
1193 REGNO (avail_reg), | |
1194 REGNO (dest), | |
1195 pred->src->index, | |
1196 pred->dest->index); | |
1197 } | |
1198 | |
1199 /* Regenerate loads where the memory is unavailable. */ | |
1200 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next) | |
1201 { | |
1202 pred = unoccr->pred; | |
1203 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred); | |
1204 stats.copies_inserted++; | |
1205 | |
1206 if (dump_file) | |
1207 { | |
1208 fprintf (dump_file, | |
1209 "generating on edge from %d to %d a copy of load: ", | |
1210 pred->src->index, | |
1211 pred->dest->index); | |
1212 print_rtl (dump_file, PATTERN (insn)); | |
1213 fprintf (dump_file, "\n"); | |
1214 } | |
1215 } | |
1216 | |
1217 /* Delete the insn if it is not available in this block and mark it | |
1218 for deletion if it is available. If insn is available it may help | |
1219 discover additional redundancies, so mark it for later deletion. */ | |
111 | 1220 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index); |
0 | 1221 a_occr && (a_occr->insn != insn); |
111 | 1222 a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index)) |
1223 ; | |
0 | 1224 |
1225 if (!a_occr) | |
1226 { | |
1227 stats.insns_deleted++; | |
1228 | |
1229 if (dump_file) | |
1230 { | |
1231 fprintf (dump_file, "deleting insn:\n"); | |
1232 print_rtl_single (dump_file, insn); | |
1233 fprintf (dump_file, "\n"); | |
1234 } | |
1235 delete_insn (insn); | |
1236 } | |
1237 else | |
1238 a_occr->deleted_p = 1; | |
1239 | |
1240 cleanup: | |
1241 if (rollback_unoccr) | |
1242 obstack_free (&unoccr_obstack, rollback_unoccr); | |
1243 } | |
1244 | |
1245 /* Performing the redundancy elimination as described before. */ | |
1246 | |
1247 static void | |
1248 eliminate_partially_redundant_loads (void) | |
1249 { | |
111 | 1250 rtx_insn *insn; |
0 | 1251 basic_block bb; |
1252 | |
1253 /* Note we start at block 1. */ | |
1254 | |
111 | 1255 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
0 | 1256 return; |
1257 | |
1258 FOR_BB_BETWEEN (bb, | |
111 | 1259 ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb, |
1260 EXIT_BLOCK_PTR_FOR_FN (cfun), | |
0 | 1261 next_bb) |
1262 { | |
1263 /* Don't try anything on basic blocks with strange predecessors. */ | |
1264 if (! bb_has_well_behaved_predecessors (bb)) | |
1265 continue; | |
1266 | |
1267 /* Do not try anything on cold basic blocks. */ | |
1268 if (optimize_bb_for_size_p (bb)) | |
1269 continue; | |
1270 | |
1271 /* Reset the table of things changed since the start of the current | |
1272 basic block. */ | |
1273 reset_opr_set_tables (); | |
1274 | |
1275 /* Look at all insns in the current basic block and see if there are | |
1276 any loads in it that we can record. */ | |
1277 FOR_BB_INSNS (bb, insn) | |
1278 { | |
1279 /* Is it a load - of the form (set (reg) (mem))? */ | |
1280 if (NONJUMP_INSN_P (insn) | |
1281 && GET_CODE (PATTERN (insn)) == SET | |
1282 && REG_P (SET_DEST (PATTERN (insn))) | |
1283 && MEM_P (SET_SRC (PATTERN (insn)))) | |
1284 { | |
1285 rtx pat = PATTERN (insn); | |
1286 rtx src = SET_SRC (pat); | |
1287 struct expr *expr; | |
1288 | |
1289 if (!MEM_VOLATILE_P (src) | |
1290 && GET_MODE (src) != BLKmode | |
1291 && general_operand (src, GET_MODE (src)) | |
1292 /* Are the operands unchanged since the start of the | |
1293 block? */ | |
1294 && oprs_unchanged_p (src, insn, false) | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
1295 && !(cfun->can_throw_non_call_exceptions && may_trap_p (src)) |
0 | 1296 && !side_effects_p (src) |
1297 /* Is the expression recorded? */ | |
1298 && (expr = lookup_expr_in_table (src)) != NULL) | |
1299 { | |
1300 /* We now have a load (insn) and an available memory at | |
1301 its BB start (expr). Try to remove the loads if it is | |
1302 redundant. */ | |
1303 eliminate_partially_redundant_load (bb, insn, expr); | |
1304 } | |
1305 } | |
1306 | |
1307 /* Keep track of everything modified by this insn, so that we | |
1308 know what has been modified since the start of the current | |
1309 basic block. */ | |
1310 if (INSN_P (insn)) | |
1311 record_opr_changes (insn); | |
1312 } | |
1313 } | |
1314 | |
1315 commit_edge_insertions (); | |
1316 } | |
1317 | |
1318 /* Go over the expression hash table and delete insns that were | |
1319 marked for later deletion. */ | |
1320 | |
1321 /* This helper is called via htab_traverse. */ | |
111 | 1322 int |
1323 delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED) | |
0 | 1324 { |
111 | 1325 struct expr *exprs = *slot; |
0 | 1326 struct occr *occr; |
1327 | |
111 | 1328 for (occr = exprs->avail_occr; occr != NULL; occr = occr->next) |
0 | 1329 { |
1330 if (occr->deleted_p && dbg_cnt (gcse2_delete)) | |
1331 { | |
1332 delete_insn (occr->insn); | |
1333 stats.insns_deleted++; | |
1334 | |
1335 if (dump_file) | |
1336 { | |
1337 fprintf (dump_file, "deleting insn:\n"); | |
1338 print_rtl_single (dump_file, occr->insn); | |
1339 fprintf (dump_file, "\n"); | |
1340 } | |
1341 } | |
1342 } | |
1343 | |
1344 return 1; | |
1345 } | |
1346 | |
1347 static void | |
1348 delete_redundant_insns (void) | |
1349 { | |
111 | 1350 expr_table->traverse <void *, delete_redundant_insns_1> (NULL); |
0 | 1351 if (dump_file) |
1352 fprintf (dump_file, "\n"); | |
1353 } | |
1354 | |
1355 /* Main entry point of the GCSE after reload - clean some redundant loads | |
1356 due to spilling. */ | |
1357 | |
1358 static void | |
1359 gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED) | |
1360 { | |
1361 | |
1362 memset (&stats, 0, sizeof (stats)); | |
1363 | |
1364 /* Allocate memory for this pass. | |
1365 Also computes and initializes the insns' CUIDs. */ | |
1366 alloc_mem (); | |
1367 | |
1368 /* We need alias analysis. */ | |
1369 init_alias_analysis (); | |
1370 | |
1371 compute_hash_table (); | |
1372 | |
1373 if (dump_file) | |
1374 dump_hash_table (dump_file); | |
1375 | |
111 | 1376 if (expr_table->elements () > 0) |
0 | 1377 { |
111 | 1378 /* Knowing which MEMs are transparent through a block can signifiantly |
1379 increase the number of redundant loads found. So compute transparency | |
1380 information for each memory expression in the hash table. */ | |
1381 df_analyze (); | |
1382 /* This can not be part of the normal allocation routine because | |
1383 we have to know the number of elements in the hash table. */ | |
1384 transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), | |
1385 expr_table->elements ()); | |
1386 bitmap_vector_ones (transp, last_basic_block_for_fn (cfun)); | |
1387 expr_table->traverse <FILE *, compute_expr_transp> (dump_file); | |
0 | 1388 eliminate_partially_redundant_loads (); |
1389 delete_redundant_insns (); | |
111 | 1390 sbitmap_vector_free (transp); |
0 | 1391 |
1392 if (dump_file) | |
1393 { | |
1394 fprintf (dump_file, "GCSE AFTER RELOAD stats:\n"); | |
1395 fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted); | |
1396 fprintf (dump_file, "moves inserted: %d\n", stats.moves_inserted); | |
1397 fprintf (dump_file, "insns deleted: %d\n", stats.insns_deleted); | |
1398 fprintf (dump_file, "\n\n"); | |
1399 } | |
111 | 1400 |
1401 statistics_counter_event (cfun, "copies inserted", | |
1402 stats.copies_inserted); | |
1403 statistics_counter_event (cfun, "moves inserted", | |
1404 stats.moves_inserted); | |
1405 statistics_counter_event (cfun, "insns deleted", | |
1406 stats.insns_deleted); | |
0 | 1407 } |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1408 |
0 | 1409 /* We are finished with alias. */ |
1410 end_alias_analysis (); | |
1411 | |
1412 free_mem (); | |
1413 } | |
1414 | |
1415 | |
1416 | |
1417 static unsigned int | |
1418 rest_of_handle_gcse2 (void) | |
1419 { | |
1420 gcse_after_reload_main (get_insns ()); | |
1421 rebuild_jump_labels (get_insns ()); | |
1422 return 0; | |
1423 } | |
1424 | |
111 | 1425 namespace { |
1426 | |
1427 const pass_data pass_data_gcse2 = | |
0 | 1428 { |
111 | 1429 RTL_PASS, /* type */ |
1430 "gcse2", /* name */ | |
1431 OPTGROUP_NONE, /* optinfo_flags */ | |
1432 TV_GCSE_AFTER_RELOAD, /* tv_id */ | |
1433 0, /* properties_required */ | |
1434 0, /* properties_provided */ | |
1435 0, /* properties_destroyed */ | |
1436 0, /* todo_flags_start */ | |
1437 0, /* todo_flags_finish */ | |
0 | 1438 }; |
1439 | |
111 | 1440 class pass_gcse2 : public rtl_opt_pass |
1441 { | |
1442 public: | |
1443 pass_gcse2 (gcc::context *ctxt) | |
1444 : rtl_opt_pass (pass_data_gcse2, ctxt) | |
1445 {} | |
1446 | |
1447 /* opt_pass methods: */ | |
1448 virtual bool gate (function *fun) | |
1449 { | |
1450 return (optimize > 0 && flag_gcse_after_reload | |
1451 && optimize_function_for_speed_p (fun)); | |
1452 } | |
1453 | |
1454 virtual unsigned int execute (function *) { return rest_of_handle_gcse2 (); } | |
1455 | |
1456 }; // class pass_gcse2 | |
1457 | |
1458 } // anon namespace | |
1459 | |
1460 rtl_opt_pass * | |
1461 make_pass_gcse2 (gcc::context *ctxt) | |
1462 { | |
1463 return new pass_gcse2 (ctxt); | |
1464 } |