0
|
1 /* Post reload partially redundant load elimination
|
|
2 Copyright (C) 2004, 2005, 2006, 2007, 2008
|
|
3 Free Software Foundation, Inc.
|
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify it under
|
|
8 the terms of the GNU General Public License as published by the Free
|
|
9 Software Foundation; either version 3, or (at your option) any later
|
|
10 version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
15 for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21 #include "config.h"
|
|
22 #include "system.h"
|
|
23 #include "coretypes.h"
|
|
24 #include "tm.h"
|
|
25 #include "toplev.h"
|
|
26
|
|
27 #include "rtl.h"
|
|
28 #include "tree.h"
|
|
29 #include "tm_p.h"
|
|
30 #include "regs.h"
|
|
31 #include "hard-reg-set.h"
|
|
32 #include "flags.h"
|
|
33 #include "real.h"
|
|
34 #include "insn-config.h"
|
|
35 #include "recog.h"
|
|
36 #include "basic-block.h"
|
|
37 #include "output.h"
|
|
38 #include "function.h"
|
|
39 #include "expr.h"
|
|
40 #include "except.h"
|
|
41 #include "intl.h"
|
|
42 #include "obstack.h"
|
|
43 #include "hashtab.h"
|
|
44 #include "params.h"
|
|
45 #include "target.h"
|
|
46 #include "timevar.h"
|
|
47 #include "tree-pass.h"
|
|
48 #include "dbgcnt.h"
|
|
49
|
|
50 /* The following code implements gcse after reload, the purpose of this
|
|
51 pass is to cleanup redundant loads generated by reload and other
|
|
52 optimizations that come after gcse. It searches for simple inter-block
|
|
53 redundancies and tries to eliminate them by adding moves and loads
|
|
54 in cold places.
|
|
55
|
|
56 Perform partially redundant load elimination, try to eliminate redundant
|
|
57 loads created by the reload pass. We try to look for full or partial
|
|
58 redundant loads fed by one or more loads/stores in predecessor BBs,
|
|
59 and try adding loads to make them fully redundant. We also check if
|
|
60 it's worth adding loads to be able to delete the redundant load.
|
|
61
|
|
62 Algorithm:
|
|
63 1. Build available expressions hash table:
|
|
64 For each load/store instruction, if the loaded/stored memory didn't
|
|
65 change until the end of the basic block add this memory expression to
|
|
66 the hash table.
|
|
67 2. Perform Redundancy elimination:
|
|
68 For each load instruction do the following:
|
|
69 perform partial redundancy elimination, check if it's worth adding
|
|
70 loads to make the load fully redundant. If so add loads and
|
|
71 register copies and delete the load.
|
|
72 3. Delete instructions made redundant in step 2.
|
|
73
|
|
74 Future enhancement:
|
|
75 If the loaded register is used/defined between load and some store,
|
|
76 look for some other free register between load and all its stores,
|
|
77 and replace the load with a copy from this register to the loaded
|
|
78 register.
|
|
79 */
|
|
80
|
|
81
|
|
82 /* Keep statistics of this pass. */
|
|
83 static struct
|
|
84 {
|
|
85 int moves_inserted;
|
|
86 int copies_inserted;
|
|
87 int insns_deleted;
|
|
88 } stats;
|
|
89
|
|
90 /* We need to keep a hash table of expressions. The table entries are of
|
|
91 type 'struct expr', and for each expression there is a single linked
|
|
92 list of occurrences. */
|
|
93
|
|
94 /* The table itself. */
|
|
95 static htab_t expr_table;
|
|
96
|
|
97 /* Expression elements in the hash table. */
|
|
98 struct expr
|
|
99 {
|
|
100 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
|
|
101 rtx expr;
|
|
102
|
|
103 /* The same hash for this entry. */
|
|
104 hashval_t hash;
|
|
105
|
|
106 /* List of available occurrence in basic blocks in the function. */
|
|
107 struct occr *avail_occr;
|
|
108 };
|
|
109
|
|
110 static struct obstack expr_obstack;
|
|
111
|
|
112 /* Occurrence of an expression.
|
|
113 There is at most one occurrence per basic block. If a pattern appears
|
|
114 more than once, the last appearance is used. */
|
|
115
|
|
116 struct occr
|
|
117 {
|
|
118 /* Next occurrence of this expression. */
|
|
119 struct occr *next;
|
|
120 /* The insn that computes the expression. */
|
|
121 rtx insn;
|
|
122 /* Nonzero if this [anticipatable] occurrence has been deleted. */
|
|
123 char deleted_p;
|
|
124 };
|
|
125
|
|
126 static struct obstack occr_obstack;
|
|
127
|
|
128 /* The following structure holds the information about the occurrences of
|
|
129 the redundant instructions. */
|
|
130 struct unoccr
|
|
131 {
|
|
132 struct unoccr *next;
|
|
133 edge pred;
|
|
134 rtx insn;
|
|
135 };
|
|
136
|
|
137 static struct obstack unoccr_obstack;
|
|
138
|
|
139 /* Array where each element is the CUID if the insn that last set the hard
|
|
140 register with the number of the element, since the start of the current
|
|
141 basic block.
|
|
142
|
|
143 This array is used during the building of the hash table (step 1) to
|
|
144 determine if a reg is killed before the end of a basic block.
|
|
145
|
|
146 It is also used when eliminating partial redundancies (step 2) to see
|
|
147 if a reg was modified since the start of a basic block. */
|
|
148 static int *reg_avail_info;
|
|
149
|
|
150 /* A list of insns that may modify memory within the current basic block. */
|
|
151 struct modifies_mem
|
|
152 {
|
|
153 rtx insn;
|
|
154 struct modifies_mem *next;
|
|
155 };
|
|
156 static struct modifies_mem *modifies_mem_list;
|
|
157
|
|
158 /* The modifies_mem structs also go on an obstack, only this obstack is
|
|
159 freed each time after completing the analysis or transformations on
|
|
160 a basic block. So we allocate a dummy modifies_mem_obstack_bottom
|
|
161 object on the obstack to keep track of the bottom of the obstack. */
|
|
162 static struct obstack modifies_mem_obstack;
|
|
163 static struct modifies_mem *modifies_mem_obstack_bottom;
|
|
164
|
|
165 /* Mapping of insn UIDs to CUIDs.
|
|
166 CUIDs are like UIDs except they increase monotonically in each basic
|
|
167 block, have no gaps, and only apply to real insns. */
|
|
168 static int *uid_cuid;
|
|
169 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
|
|
170
|
|
171
|
|
172 /* Helpers for memory allocation/freeing. */
|
|
173 static void alloc_mem (void);
|
|
174 static void free_mem (void);
|
|
175
|
|
176 /* Support for hash table construction and transformations. */
|
|
177 static bool oprs_unchanged_p (rtx, rtx, bool);
|
|
178 static void record_last_reg_set_info (rtx, rtx);
|
|
179 static void record_last_reg_set_info_regno (rtx, int);
|
|
180 static void record_last_mem_set_info (rtx);
|
|
181 static void record_last_set_info (rtx, const_rtx, void *);
|
|
182 static void record_opr_changes (rtx);
|
|
183
|
|
184 static void find_mem_conflicts (rtx, const_rtx, void *);
|
|
185 static int load_killed_in_block_p (int, rtx, bool);
|
|
186 static void reset_opr_set_tables (void);
|
|
187
|
|
188 /* Hash table support. */
|
|
189 static hashval_t hash_expr (rtx, int *);
|
|
190 static hashval_t hash_expr_for_htab (const void *);
|
|
191 static int expr_equiv_p (const void *, const void *);
|
|
192 static void insert_expr_in_table (rtx, rtx);
|
|
193 static struct expr *lookup_expr_in_table (rtx);
|
|
194 static int dump_hash_table_entry (void **, void *);
|
|
195 static void dump_hash_table (FILE *);
|
|
196
|
|
197 /* Helpers for eliminate_partially_redundant_load. */
|
|
198 static bool reg_killed_on_edge (rtx, edge);
|
|
199 static bool reg_used_on_edge (rtx, edge);
|
|
200
|
|
201 static rtx get_avail_load_store_reg (rtx);
|
|
202
|
|
203 static bool bb_has_well_behaved_predecessors (basic_block);
|
|
204 static struct occr* get_bb_avail_insn (basic_block, struct occr *);
|
|
205 static void hash_scan_set (rtx);
|
|
206 static void compute_hash_table (void);
|
|
207
|
|
208 /* The work horses of this pass. */
|
|
209 static void eliminate_partially_redundant_load (basic_block,
|
|
210 rtx,
|
|
211 struct expr *);
|
|
212 static void eliminate_partially_redundant_loads (void);
|
|
213
|
|
214
|
|
215 /* Allocate memory for the CUID mapping array and register/memory
|
|
216 tracking tables. */
|
|
217
|
|
218 static void
|
|
219 alloc_mem (void)
|
|
220 {
|
|
221 int i;
|
|
222 basic_block bb;
|
|
223 rtx insn;
|
|
224
|
|
225 /* Find the largest UID and create a mapping from UIDs to CUIDs. */
|
|
226 uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
|
|
227 i = 1;
|
|
228 FOR_EACH_BB (bb)
|
|
229 FOR_BB_INSNS (bb, insn)
|
|
230 {
|
|
231 if (INSN_P (insn))
|
|
232 uid_cuid[INSN_UID (insn)] = i++;
|
|
233 else
|
|
234 uid_cuid[INSN_UID (insn)] = i;
|
|
235 }
|
|
236
|
|
237 /* Allocate the available expressions hash table. We don't want to
|
|
238 make the hash table too small, but unnecessarily making it too large
|
|
239 also doesn't help. The i/4 is a gcse.c relic, and seems like a
|
|
240 reasonable choice. */
|
|
241 expr_table = htab_create (MAX (i / 4, 13),
|
|
242 hash_expr_for_htab, expr_equiv_p, NULL);
|
|
243
|
|
244 /* We allocate everything on obstacks because we often can roll back
|
|
245 the whole obstack to some point. Freeing obstacks is very fast. */
|
|
246 gcc_obstack_init (&expr_obstack);
|
|
247 gcc_obstack_init (&occr_obstack);
|
|
248 gcc_obstack_init (&unoccr_obstack);
|
|
249 gcc_obstack_init (&modifies_mem_obstack);
|
|
250
|
|
251 /* Working array used to track the last set for each register
|
|
252 in the current block. */
|
|
253 reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
|
|
254
|
|
255 /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
|
|
256 can roll it back in reset_opr_set_tables. */
|
|
257 modifies_mem_obstack_bottom =
|
|
258 (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
|
|
259 sizeof (struct modifies_mem));
|
|
260 }
|
|
261
|
|
262 /* Free memory allocated by alloc_mem. */
|
|
263
|
|
264 static void
|
|
265 free_mem (void)
|
|
266 {
|
|
267 free (uid_cuid);
|
|
268
|
|
269 htab_delete (expr_table);
|
|
270
|
|
271 obstack_free (&expr_obstack, NULL);
|
|
272 obstack_free (&occr_obstack, NULL);
|
|
273 obstack_free (&unoccr_obstack, NULL);
|
|
274 obstack_free (&modifies_mem_obstack, NULL);
|
|
275
|
|
276 free (reg_avail_info);
|
|
277 }
|
|
278
|
|
279
|
|
280 /* Hash expression X.
|
|
281 DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
|
|
282 or if the expression contains something we don't want to insert in the
|
|
283 table. */
|
|
284
|
|
285 static hashval_t
|
|
286 hash_expr (rtx x, int *do_not_record_p)
|
|
287 {
|
|
288 *do_not_record_p = 0;
|
|
289 return hash_rtx (x, GET_MODE (x), do_not_record_p,
|
|
290 NULL, /*have_reg_qty=*/false);
|
|
291 }
|
|
292
|
|
293 /* Callback for hashtab.
|
|
294 Return the hash value for expression EXP. We don't actually hash
|
|
295 here, we just return the cached hash value. */
|
|
296
|
|
297 static hashval_t
|
|
298 hash_expr_for_htab (const void *expp)
|
|
299 {
|
|
300 const struct expr *const exp = (const struct expr *) expp;
|
|
301 return exp->hash;
|
|
302 }
|
|
303
|
|
304 /* Callback for hashtab.
|
|
305 Return nonzero if exp1 is equivalent to exp2. */
|
|
306
|
|
307 static int
|
|
308 expr_equiv_p (const void *exp1p, const void *exp2p)
|
|
309 {
|
|
310 const struct expr *const exp1 = (const struct expr *) exp1p;
|
|
311 const struct expr *const exp2 = (const struct expr *) exp2p;
|
|
312 int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
|
|
313
|
|
314 gcc_assert (!equiv_p || exp1->hash == exp2->hash);
|
|
315 return equiv_p;
|
|
316 }
|
|
317
|
|
318
|
|
319 /* Insert expression X in INSN in the hash TABLE.
|
|
320 If it is already present, record it as the last occurrence in INSN's
|
|
321 basic block. */
|
|
322
|
|
323 static void
|
|
324 insert_expr_in_table (rtx x, rtx insn)
|
|
325 {
|
|
326 int do_not_record_p;
|
|
327 hashval_t hash;
|
|
328 struct expr *cur_expr, **slot;
|
|
329 struct occr *avail_occr, *last_occr = NULL;
|
|
330
|
|
331 hash = hash_expr (x, &do_not_record_p);
|
|
332
|
|
333 /* Do not insert expression in the table if it contains volatile operands,
|
|
334 or if hash_expr determines the expression is something we don't want
|
|
335 to or can't handle. */
|
|
336 if (do_not_record_p)
|
|
337 return;
|
|
338
|
|
339 /* We anticipate that redundant expressions are rare, so for convenience
|
|
340 allocate a new hash table element here already and set its fields.
|
|
341 If we don't do this, we need a hack with a static struct expr. Anyway,
|
|
342 obstack_free is really fast and one more obstack_alloc doesn't hurt if
|
|
343 we're going to see more expressions later on. */
|
|
344 cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
|
|
345 sizeof (struct expr));
|
|
346 cur_expr->expr = x;
|
|
347 cur_expr->hash = hash;
|
|
348 cur_expr->avail_occr = NULL;
|
|
349
|
|
350 slot = (struct expr **) htab_find_slot_with_hash (expr_table, cur_expr,
|
|
351 hash, INSERT);
|
|
352
|
|
353 if (! (*slot))
|
|
354 /* The expression isn't found, so insert it. */
|
|
355 *slot = cur_expr;
|
|
356 else
|
|
357 {
|
|
358 /* The expression is already in the table, so roll back the
|
|
359 obstack and use the existing table entry. */
|
|
360 obstack_free (&expr_obstack, cur_expr);
|
|
361 cur_expr = *slot;
|
|
362 }
|
|
363
|
|
364 /* Search for another occurrence in the same basic block. */
|
|
365 avail_occr = cur_expr->avail_occr;
|
|
366 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
|
|
367 {
|
|
368 /* If an occurrence isn't found, save a pointer to the end of
|
|
369 the list. */
|
|
370 last_occr = avail_occr;
|
|
371 avail_occr = avail_occr->next;
|
|
372 }
|
|
373
|
|
374 if (avail_occr)
|
|
375 /* Found another instance of the expression in the same basic block.
|
|
376 Prefer this occurrence to the currently recorded one. We want
|
|
377 the last one in the block and the block is scanned from start
|
|
378 to end. */
|
|
379 avail_occr->insn = insn;
|
|
380 else
|
|
381 {
|
|
382 /* First occurrence of this expression in this basic block. */
|
|
383 avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
|
|
384 sizeof (struct occr));
|
|
385
|
|
386 /* First occurrence of this expression in any block? */
|
|
387 if (cur_expr->avail_occr == NULL)
|
|
388 cur_expr->avail_occr = avail_occr;
|
|
389 else
|
|
390 last_occr->next = avail_occr;
|
|
391
|
|
392 avail_occr->insn = insn;
|
|
393 avail_occr->next = NULL;
|
|
394 avail_occr->deleted_p = 0;
|
|
395 }
|
|
396 }
|
|
397
|
|
398
|
|
399 /* Lookup pattern PAT in the expression hash table.
|
|
400 The result is a pointer to the table entry, or NULL if not found. */
|
|
401
|
|
402 static struct expr *
|
|
403 lookup_expr_in_table (rtx pat)
|
|
404 {
|
|
405 int do_not_record_p;
|
|
406 struct expr **slot, *tmp_expr;
|
|
407 hashval_t hash = hash_expr (pat, &do_not_record_p);
|
|
408
|
|
409 if (do_not_record_p)
|
|
410 return NULL;
|
|
411
|
|
412 tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
|
|
413 sizeof (struct expr));
|
|
414 tmp_expr->expr = pat;
|
|
415 tmp_expr->hash = hash;
|
|
416 tmp_expr->avail_occr = NULL;
|
|
417
|
|
418 slot = (struct expr **) htab_find_slot_with_hash (expr_table, tmp_expr,
|
|
419 hash, INSERT);
|
|
420 obstack_free (&expr_obstack, tmp_expr);
|
|
421
|
|
422 if (!slot)
|
|
423 return NULL;
|
|
424 else
|
|
425 return (*slot);
|
|
426 }
|
|
427
|
|
428
|
|
429 /* Dump all expressions and occurrences that are currently in the
|
|
430 expression hash table to FILE. */
|
|
431
|
|
432 /* This helper is called via htab_traverse. */
|
|
433 static int
|
|
434 dump_hash_table_entry (void **slot, void *filep)
|
|
435 {
|
|
436 struct expr *expr = (struct expr *) *slot;
|
|
437 FILE *file = (FILE *) filep;
|
|
438 struct occr *occr;
|
|
439
|
|
440 fprintf (file, "expr: ");
|
|
441 print_rtl (file, expr->expr);
|
|
442 fprintf (file,"\nhashcode: %u\n", expr->hash);
|
|
443 fprintf (file,"list of occurrences:\n");
|
|
444 occr = expr->avail_occr;
|
|
445 while (occr)
|
|
446 {
|
|
447 rtx insn = occr->insn;
|
|
448 print_rtl_single (file, insn);
|
|
449 fprintf (file, "\n");
|
|
450 occr = occr->next;
|
|
451 }
|
|
452 fprintf (file, "\n");
|
|
453 return 1;
|
|
454 }
|
|
455
|
|
456 static void
|
|
457 dump_hash_table (FILE *file)
|
|
458 {
|
|
459 fprintf (file, "\n\nexpression hash table\n");
|
|
460 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
|
|
461 (long) htab_size (expr_table),
|
|
462 (long) htab_elements (expr_table),
|
|
463 htab_collisions (expr_table));
|
|
464 if (htab_elements (expr_table) > 0)
|
|
465 {
|
|
466 fprintf (file, "\n\ntable entries:\n");
|
|
467 htab_traverse (expr_table, dump_hash_table_entry, file);
|
|
468 }
|
|
469 fprintf (file, "\n");
|
|
470 }
|
|
471
|
|
472 /* Return true if register X is recorded as being set by an instruction
|
|
473 whose CUID is greater than the one given. */
|
|
474
|
|
475 static bool
|
|
476 reg_changed_after_insn_p (rtx x, int cuid)
|
|
477 {
|
|
478 unsigned int regno, end_regno;
|
|
479
|
|
480 regno = REGNO (x);
|
|
481 end_regno = END_HARD_REGNO (x);
|
|
482 do
|
|
483 if (reg_avail_info[regno] > cuid)
|
|
484 return true;
|
|
485 while (++regno < end_regno);
|
|
486 return false;
|
|
487 }
|
|
488
|
|
489 /* Return nonzero if the operands of expression X are unchanged
|
|
490 1) from the start of INSN's basic block up to but not including INSN
|
|
491 if AFTER_INSN is false, or
|
|
492 2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */
|
|
493
|
|
494 static bool
|
|
495 oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
|
|
496 {
|
|
497 int i, j;
|
|
498 enum rtx_code code;
|
|
499 const char *fmt;
|
|
500
|
|
501 if (x == 0)
|
|
502 return 1;
|
|
503
|
|
504 code = GET_CODE (x);
|
|
505 switch (code)
|
|
506 {
|
|
507 case REG:
|
|
508 /* We are called after register allocation. */
|
|
509 gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
|
|
510 if (after_insn)
|
|
511 return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
|
|
512 else
|
|
513 return !reg_changed_after_insn_p (x, 0);
|
|
514
|
|
515 case MEM:
|
|
516 if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
|
|
517 return 0;
|
|
518 else
|
|
519 return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
|
|
520
|
|
521 case PC:
|
|
522 case CC0: /*FIXME*/
|
|
523 case CONST:
|
|
524 case CONST_INT:
|
|
525 case CONST_DOUBLE:
|
|
526 case CONST_FIXED:
|
|
527 case CONST_VECTOR:
|
|
528 case SYMBOL_REF:
|
|
529 case LABEL_REF:
|
|
530 case ADDR_VEC:
|
|
531 case ADDR_DIFF_VEC:
|
|
532 return 1;
|
|
533
|
|
534 case PRE_DEC:
|
|
535 case PRE_INC:
|
|
536 case POST_DEC:
|
|
537 case POST_INC:
|
|
538 case PRE_MODIFY:
|
|
539 case POST_MODIFY:
|
|
540 if (after_insn)
|
|
541 return 0;
|
|
542 break;
|
|
543
|
|
544 default:
|
|
545 break;
|
|
546 }
|
|
547
|
|
548 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
|
|
549 {
|
|
550 if (fmt[i] == 'e')
|
|
551 {
|
|
552 if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
|
|
553 return 0;
|
|
554 }
|
|
555 else if (fmt[i] == 'E')
|
|
556 for (j = 0; j < XVECLEN (x, i); j++)
|
|
557 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
|
|
558 return 0;
|
|
559 }
|
|
560
|
|
561 return 1;
|
|
562 }
|
|
563
|
|
564
|
|
565 /* Used for communication between find_mem_conflicts and
|
|
566 load_killed_in_block_p. Nonzero if find_mem_conflicts finds a
|
|
567 conflict between two memory references.
|
|
568 This is a bit of a hack to work around the limitations of note_stores. */
|
|
569 static int mems_conflict_p;
|
|
570
|
|
571 /* DEST is the output of an instruction. If it is a memory reference, and
|
|
572 possibly conflicts with the load found in DATA, then set mems_conflict_p
|
|
573 to a nonzero value. */
|
|
574
|
|
575 static void
|
|
576 find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
|
|
577 void *data)
|
|
578 {
|
|
579 rtx mem_op = (rtx) data;
|
|
580
|
|
581 while (GET_CODE (dest) == SUBREG
|
|
582 || GET_CODE (dest) == ZERO_EXTRACT
|
|
583 || GET_CODE (dest) == STRICT_LOW_PART)
|
|
584 dest = XEXP (dest, 0);
|
|
585
|
|
586 /* If DEST is not a MEM, then it will not conflict with the load. Note
|
|
587 that function calls are assumed to clobber memory, but are handled
|
|
588 elsewhere. */
|
|
589 if (! MEM_P (dest))
|
|
590 return;
|
|
591
|
|
592 if (true_dependence (dest, GET_MODE (dest), mem_op,
|
|
593 rtx_addr_varies_p))
|
|
594 mems_conflict_p = 1;
|
|
595 }
|
|
596
|
|
597
|
|
598 /* Return nonzero if the expression in X (a memory reference) is killed
|
|
599 in the current basic block before (if AFTER_INSN is false) or after
|
|
600 (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
|
|
601
|
|
602 This function assumes that the modifies_mem table is flushed when
|
|
603 the hash table construction or redundancy elimination phases start
|
|
604 processing a new basic block. */
|
|
605
|
|
606 static int
|
|
607 load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
|
|
608 {
|
|
609 struct modifies_mem *list_entry = modifies_mem_list;
|
|
610
|
|
611 while (list_entry)
|
|
612 {
|
|
613 rtx setter = list_entry->insn;
|
|
614
|
|
615 /* Ignore entries in the list that do not apply. */
|
|
616 if ((after_insn
|
|
617 && INSN_CUID (setter) < uid_limit)
|
|
618 || (! after_insn
|
|
619 && INSN_CUID (setter) > uid_limit))
|
|
620 {
|
|
621 list_entry = list_entry->next;
|
|
622 continue;
|
|
623 }
|
|
624
|
|
625 /* If SETTER is a call everything is clobbered. Note that calls
|
|
626 to pure functions are never put on the list, so we need not
|
|
627 worry about them. */
|
|
628 if (CALL_P (setter))
|
|
629 return 1;
|
|
630
|
|
631 /* SETTER must be an insn of some kind that sets memory. Call
|
|
632 note_stores to examine each hunk of memory that is modified.
|
|
633 It will set mems_conflict_p to nonzero if there may be a
|
|
634 conflict between X and SETTER. */
|
|
635 mems_conflict_p = 0;
|
|
636 note_stores (PATTERN (setter), find_mem_conflicts, x);
|
|
637 if (mems_conflict_p)
|
|
638 return 1;
|
|
639
|
|
640 list_entry = list_entry->next;
|
|
641 }
|
|
642 return 0;
|
|
643 }
|
|
644
|
|
645
|
|
646 /* Record register first/last/block set information for REGNO in INSN. */
|
|
647
|
|
648 static inline void
|
|
649 record_last_reg_set_info (rtx insn, rtx reg)
|
|
650 {
|
|
651 unsigned int regno, end_regno;
|
|
652
|
|
653 regno = REGNO (reg);
|
|
654 end_regno = END_HARD_REGNO (reg);
|
|
655 do
|
|
656 reg_avail_info[regno] = INSN_CUID (insn);
|
|
657 while (++regno < end_regno);
|
|
658 }
|
|
659
|
|
660 static inline void
|
|
661 record_last_reg_set_info_regno (rtx insn, int regno)
|
|
662 {
|
|
663 reg_avail_info[regno] = INSN_CUID (insn);
|
|
664 }
|
|
665
|
|
666
|
|
667 /* Record memory modification information for INSN. We do not actually care
|
|
668 about the memory location(s) that are set, or even how they are set (consider
|
|
669 a CALL_INSN). We merely need to record which insns modify memory. */
|
|
670
|
|
671 static void
|
|
672 record_last_mem_set_info (rtx insn)
|
|
673 {
|
|
674 struct modifies_mem *list_entry;
|
|
675
|
|
676 list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
|
|
677 sizeof (struct modifies_mem));
|
|
678 list_entry->insn = insn;
|
|
679 list_entry->next = modifies_mem_list;
|
|
680 modifies_mem_list = list_entry;
|
|
681 }
|
|
682
|
|
683 /* Called from compute_hash_table via note_stores to handle one
|
|
684 SET or CLOBBER in an insn. DATA is really the instruction in which
|
|
685 the SET is taking place. */
|
|
686
|
|
687 static void
|
|
688 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
|
|
689 {
|
|
690 rtx last_set_insn = (rtx) data;
|
|
691
|
|
692 if (GET_CODE (dest) == SUBREG)
|
|
693 dest = SUBREG_REG (dest);
|
|
694
|
|
695 if (REG_P (dest))
|
|
696 record_last_reg_set_info (last_set_insn, dest);
|
|
697 else if (MEM_P (dest))
|
|
698 {
|
|
699 /* Ignore pushes, they don't clobber memory. They may still
|
|
700 clobber the stack pointer though. Some targets do argument
|
|
701 pushes without adding REG_INC notes. See e.g. PR25196,
|
|
702 where a pushsi2 on i386 doesn't have REG_INC notes. Note
|
|
703 such changes here too. */
|
|
704 if (! push_operand (dest, GET_MODE (dest)))
|
|
705 record_last_mem_set_info (last_set_insn);
|
|
706 else
|
|
707 record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
|
|
708 }
|
|
709 }
|
|
710
|
|
711
|
|
712 /* Reset tables used to keep track of what's still available since the
|
|
713 start of the block. */
|
|
714
|
|
715 static void
|
|
716 reset_opr_set_tables (void)
|
|
717 {
|
|
718 memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
|
|
719 obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
|
|
720 modifies_mem_list = NULL;
|
|
721 }
|
|
722
|
|
723
|
|
724 /* Record things set by INSN.
|
|
725 This data is used by oprs_unchanged_p. */
|
|
726
|
|
727 static void
|
|
728 record_opr_changes (rtx insn)
|
|
729 {
|
|
730 rtx note;
|
|
731
|
|
732 /* Find all stores and record them. */
|
|
733 note_stores (PATTERN (insn), record_last_set_info, insn);
|
|
734
|
|
735 /* Also record autoincremented REGs for this insn as changed. */
|
|
736 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
|
|
737 if (REG_NOTE_KIND (note) == REG_INC)
|
|
738 record_last_reg_set_info (insn, XEXP (note, 0));
|
|
739
|
|
740 /* Finally, if this is a call, record all call clobbers. */
|
|
741 if (CALL_P (insn))
|
|
742 {
|
|
743 unsigned int regno;
|
|
744 rtx link, x;
|
|
745
|
|
746 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
|
|
747 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
|
|
748 record_last_reg_set_info_regno (insn, regno);
|
|
749
|
|
750 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
|
|
751 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
|
|
752 {
|
|
753 x = XEXP (XEXP (link, 0), 0);
|
|
754 if (REG_P (x))
|
|
755 {
|
|
756 gcc_assert (HARD_REGISTER_P (x));
|
|
757 record_last_reg_set_info (insn, x);
|
|
758 }
|
|
759 }
|
|
760
|
|
761 if (! RTL_CONST_OR_PURE_CALL_P (insn))
|
|
762 record_last_mem_set_info (insn);
|
|
763 }
|
|
764 }
|
|
765
|
|
766
|
|
767 /* Scan the pattern of INSN and add an entry to the hash TABLE.
|
|
768 After reload we are interested in loads/stores only. */
|
|
769
|
|
770 static void
|
|
771 hash_scan_set (rtx insn)
|
|
772 {
|
|
773 rtx pat = PATTERN (insn);
|
|
774 rtx src = SET_SRC (pat);
|
|
775 rtx dest = SET_DEST (pat);
|
|
776
|
|
777 /* We are only interested in loads and stores. */
|
|
778 if (! MEM_P (src) && ! MEM_P (dest))
|
|
779 return;
|
|
780
|
|
781 /* Don't mess with jumps and nops. */
|
|
782 if (JUMP_P (insn) || set_noop_p (pat))
|
|
783 return;
|
|
784
|
|
785 if (REG_P (dest))
|
|
786 {
|
|
787 if (/* Don't CSE something if we can't do a reg/reg copy. */
|
|
788 can_copy_p (GET_MODE (dest))
|
|
789 /* Is SET_SRC something we want to gcse? */
|
|
790 && general_operand (src, GET_MODE (src))
|
|
791 #ifdef STACK_REGS
|
|
792 /* Never consider insns touching the register stack. It may
|
|
793 create situations that reg-stack cannot handle (e.g. a stack
|
|
794 register live across an abnormal edge). */
|
|
795 && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
|
|
796 #endif
|
|
797 /* An expression is not available if its operands are
|
|
798 subsequently modified, including this insn. */
|
|
799 && oprs_unchanged_p (src, insn, true))
|
|
800 {
|
|
801 insert_expr_in_table (src, insn);
|
|
802 }
|
|
803 }
|
|
804 else if (REG_P (src))
|
|
805 {
|
|
806 /* Only record sets of pseudo-regs in the hash table. */
|
|
807 if (/* Don't CSE something if we can't do a reg/reg copy. */
|
|
808 can_copy_p (GET_MODE (src))
|
|
809 /* Is SET_DEST something we want to gcse? */
|
|
810 && general_operand (dest, GET_MODE (dest))
|
|
811 #ifdef STACK_REGS
|
|
812 /* As above for STACK_REGS. */
|
|
813 && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
|
|
814 #endif
|
|
815 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
|
|
816 /* Check if the memory expression is killed after insn. */
|
|
817 && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
|
|
818 && oprs_unchanged_p (XEXP (dest, 0), insn, true))
|
|
819 {
|
|
820 insert_expr_in_table (dest, insn);
|
|
821 }
|
|
822 }
|
|
823 }
|
|
824
|
|
825
|
|
826 /* Create hash table of memory expressions available at end of basic
|
|
827 blocks. Basically you should think of this hash table as the
|
|
828 representation of AVAIL_OUT. This is the set of expressions that
|
|
829 is generated in a basic block and not killed before the end of the
|
|
830 same basic block. Notice that this is really a local computation. */
|
|
831
|
|
832 static void
|
|
833 compute_hash_table (void)
|
|
834 {
|
|
835 basic_block bb;
|
|
836
|
|
837 FOR_EACH_BB (bb)
|
|
838 {
|
|
839 rtx insn;
|
|
840
|
|
841 /* First pass over the instructions records information used to
|
|
842 determine when registers and memory are last set.
|
|
843 Since we compute a "local" AVAIL_OUT, reset the tables that
|
|
844 help us keep track of what has been modified since the start
|
|
845 of the block. */
|
|
846 reset_opr_set_tables ();
|
|
847 FOR_BB_INSNS (bb, insn)
|
|
848 {
|
|
849 if (INSN_P (insn))
|
|
850 record_opr_changes (insn);
|
|
851 }
|
|
852
|
|
853 /* The next pass actually builds the hash table. */
|
|
854 FOR_BB_INSNS (bb, insn)
|
|
855 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
|
|
856 hash_scan_set (insn);
|
|
857 }
|
|
858 }
|
|
859
|
|
860
|
|
861 /* Check if register REG is killed in any insn waiting to be inserted on
|
|
862 edge E. This function is required to check that our data flow analysis
|
|
863 is still valid prior to commit_edge_insertions. */
|
|
864
|
|
865 static bool
|
|
866 reg_killed_on_edge (rtx reg, edge e)
|
|
867 {
|
|
868 rtx insn;
|
|
869
|
|
870 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
|
|
871 if (INSN_P (insn) && reg_set_p (reg, insn))
|
|
872 return true;
|
|
873
|
|
874 return false;
|
|
875 }
|
|
876
|
|
877 /* Similar to above - check if register REG is used in any insn waiting
|
|
878 to be inserted on edge E.
|
|
879 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
|
|
880 with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p. */
|
|
881
|
|
882 static bool
|
|
883 reg_used_on_edge (rtx reg, edge e)
|
|
884 {
|
|
885 rtx insn;
|
|
886
|
|
887 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
|
|
888 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
|
|
889 return true;
|
|
890
|
|
891 return false;
|
|
892 }
|
|
893
|
|
894 /* Return the loaded/stored register of a load/store instruction. */
|
|
895
|
|
896 static rtx
|
|
897 get_avail_load_store_reg (rtx insn)
|
|
898 {
|
|
899 if (REG_P (SET_DEST (PATTERN (insn))))
|
|
900 /* A load. */
|
|
901 return SET_DEST(PATTERN(insn));
|
|
902 else
|
|
903 {
|
|
904 /* A store. */
|
|
905 gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
|
|
906 return SET_SRC (PATTERN (insn));
|
|
907 }
|
|
908 }
|
|
909
|
|
910 /* Return nonzero if the predecessors of BB are "well behaved". */
|
|
911
|
|
912 static bool
|
|
913 bb_has_well_behaved_predecessors (basic_block bb)
|
|
914 {
|
|
915 edge pred;
|
|
916 edge_iterator ei;
|
|
917
|
|
918 if (EDGE_COUNT (bb->preds) == 0)
|
|
919 return false;
|
|
920
|
|
921 FOR_EACH_EDGE (pred, ei, bb->preds)
|
|
922 {
|
|
923 if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
|
|
924 return false;
|
|
925
|
|
926 if (JUMP_TABLE_DATA_P (BB_END (pred->src)))
|
|
927 return false;
|
|
928 }
|
|
929 return true;
|
|
930 }
|
|
931
|
|
932
|
|
933 /* Search for the occurrences of expression in BB. */
|
|
934
|
|
935 static struct occr*
|
|
936 get_bb_avail_insn (basic_block bb, struct occr *occr)
|
|
937 {
|
|
938 for (; occr != NULL; occr = occr->next)
|
|
939 if (BLOCK_FOR_INSN (occr->insn) == bb)
|
|
940 return occr;
|
|
941 return NULL;
|
|
942 }
|
|
943
|
|
944
|
|
945 /* This handles the case where several stores feed a partially redundant
|
|
946 load. It checks if the redundancy elimination is possible and if it's
|
|
947 worth it.
|
|
948
|
|
949 Redundancy elimination is possible if,
|
|
950 1) None of the operands of an insn have been modified since the start
|
|
951 of the current basic block.
|
|
952 2) In any predecessor of the current basic block, the same expression
|
|
953 is generated.
|
|
954
|
|
955 See the function body for the heuristics that determine if eliminating
|
|
956 a redundancy is also worth doing, assuming it is possible. */
|
|
957
|
|
958 static void
|
|
959 eliminate_partially_redundant_load (basic_block bb, rtx insn,
|
|
960 struct expr *expr)
|
|
961 {
|
|
962 edge pred;
|
|
963 rtx avail_insn = NULL_RTX;
|
|
964 rtx avail_reg;
|
|
965 rtx dest, pat;
|
|
966 struct occr *a_occr;
|
|
967 struct unoccr *occr, *avail_occrs = NULL;
|
|
968 struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
|
|
969 int npred_ok = 0;
|
|
970 gcov_type ok_count = 0; /* Redundant load execution count. */
|
|
971 gcov_type critical_count = 0; /* Execution count of critical edges. */
|
|
972 edge_iterator ei;
|
|
973 bool critical_edge_split = false;
|
|
974
|
|
975 /* The execution count of the loads to be added to make the
|
|
976 load fully redundant. */
|
|
977 gcov_type not_ok_count = 0;
|
|
978 basic_block pred_bb;
|
|
979
|
|
980 pat = PATTERN (insn);
|
|
981 dest = SET_DEST (pat);
|
|
982
|
|
983 /* Check that the loaded register is not used, set, or killed from the
|
|
984 beginning of the block. */
|
|
985 if (reg_changed_after_insn_p (dest, 0)
|
|
986 || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
|
|
987 return;
|
|
988
|
|
989 /* Check potential for replacing load with copy for predecessors. */
|
|
990 FOR_EACH_EDGE (pred, ei, bb->preds)
|
|
991 {
|
|
992 rtx next_pred_bb_end;
|
|
993
|
|
994 avail_insn = NULL_RTX;
|
|
995 avail_reg = NULL_RTX;
|
|
996 pred_bb = pred->src;
|
|
997 next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
|
|
998 for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
|
|
999 a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
|
|
1000 {
|
|
1001 /* Check if the loaded register is not used. */
|
|
1002 avail_insn = a_occr->insn;
|
|
1003 avail_reg = get_avail_load_store_reg (avail_insn);
|
|
1004 gcc_assert (avail_reg);
|
|
1005
|
|
1006 /* Make sure we can generate a move from register avail_reg to
|
|
1007 dest. */
|
|
1008 extract_insn (gen_move_insn (copy_rtx (dest),
|
|
1009 copy_rtx (avail_reg)));
|
|
1010 if (! constrain_operands (1)
|
|
1011 || reg_killed_on_edge (avail_reg, pred)
|
|
1012 || reg_used_on_edge (dest, pred))
|
|
1013 {
|
|
1014 avail_insn = NULL;
|
|
1015 continue;
|
|
1016 }
|
|
1017 if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
|
|
1018 /* AVAIL_INSN remains non-null. */
|
|
1019 break;
|
|
1020 else
|
|
1021 avail_insn = NULL;
|
|
1022 }
|
|
1023
|
|
1024 if (EDGE_CRITICAL_P (pred))
|
|
1025 critical_count += pred->count;
|
|
1026
|
|
1027 if (avail_insn != NULL_RTX)
|
|
1028 {
|
|
1029 npred_ok++;
|
|
1030 ok_count += pred->count;
|
|
1031 if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
|
|
1032 copy_rtx (avail_reg)))))
|
|
1033 {
|
|
1034 /* Check if there is going to be a split. */
|
|
1035 if (EDGE_CRITICAL_P (pred))
|
|
1036 critical_edge_split = true;
|
|
1037 }
|
|
1038 else /* Its a dead move no need to generate. */
|
|
1039 continue;
|
|
1040 occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
|
|
1041 sizeof (struct unoccr));
|
|
1042 occr->insn = avail_insn;
|
|
1043 occr->pred = pred;
|
|
1044 occr->next = avail_occrs;
|
|
1045 avail_occrs = occr;
|
|
1046 if (! rollback_unoccr)
|
|
1047 rollback_unoccr = occr;
|
|
1048 }
|
|
1049 else
|
|
1050 {
|
|
1051 /* Adding a load on a critical edge will cause a split. */
|
|
1052 if (EDGE_CRITICAL_P (pred))
|
|
1053 critical_edge_split = true;
|
|
1054 not_ok_count += pred->count;
|
|
1055 unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
|
|
1056 sizeof (struct unoccr));
|
|
1057 unoccr->insn = NULL_RTX;
|
|
1058 unoccr->pred = pred;
|
|
1059 unoccr->next = unavail_occrs;
|
|
1060 unavail_occrs = unoccr;
|
|
1061 if (! rollback_unoccr)
|
|
1062 rollback_unoccr = unoccr;
|
|
1063 }
|
|
1064 }
|
|
1065
|
|
1066 if (/* No load can be replaced by copy. */
|
|
1067 npred_ok == 0
|
|
1068 /* Prevent exploding the code. */
|
|
1069 || (optimize_bb_for_size_p (bb) && npred_ok > 1)
|
|
1070 /* If we don't have profile information we cannot tell if splitting
|
|
1071 a critical edge is profitable or not so don't do it. */
|
|
1072 || ((! profile_info || ! flag_branch_probabilities
|
|
1073 || targetm.cannot_modify_jumps_p ())
|
|
1074 && critical_edge_split))
|
|
1075 goto cleanup;
|
|
1076
|
|
1077 /* Check if it's worth applying the partial redundancy elimination. */
|
|
1078 if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
|
|
1079 goto cleanup;
|
|
1080 if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
|
|
1081 goto cleanup;
|
|
1082
|
|
1083 /* Generate moves to the loaded register from where
|
|
1084 the memory is available. */
|
|
1085 for (occr = avail_occrs; occr; occr = occr->next)
|
|
1086 {
|
|
1087 avail_insn = occr->insn;
|
|
1088 pred = occr->pred;
|
|
1089 /* Set avail_reg to be the register having the value of the
|
|
1090 memory. */
|
|
1091 avail_reg = get_avail_load_store_reg (avail_insn);
|
|
1092 gcc_assert (avail_reg);
|
|
1093
|
|
1094 insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
|
|
1095 copy_rtx (avail_reg)),
|
|
1096 pred);
|
|
1097 stats.moves_inserted++;
|
|
1098
|
|
1099 if (dump_file)
|
|
1100 fprintf (dump_file,
|
|
1101 "generating move from %d to %d on edge from %d to %d\n",
|
|
1102 REGNO (avail_reg),
|
|
1103 REGNO (dest),
|
|
1104 pred->src->index,
|
|
1105 pred->dest->index);
|
|
1106 }
|
|
1107
|
|
1108 /* Regenerate loads where the memory is unavailable. */
|
|
1109 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
|
|
1110 {
|
|
1111 pred = unoccr->pred;
|
|
1112 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
|
|
1113 stats.copies_inserted++;
|
|
1114
|
|
1115 if (dump_file)
|
|
1116 {
|
|
1117 fprintf (dump_file,
|
|
1118 "generating on edge from %d to %d a copy of load: ",
|
|
1119 pred->src->index,
|
|
1120 pred->dest->index);
|
|
1121 print_rtl (dump_file, PATTERN (insn));
|
|
1122 fprintf (dump_file, "\n");
|
|
1123 }
|
|
1124 }
|
|
1125
|
|
1126 /* Delete the insn if it is not available in this block and mark it
|
|
1127 for deletion if it is available. If insn is available it may help
|
|
1128 discover additional redundancies, so mark it for later deletion. */
|
|
1129 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
|
|
1130 a_occr && (a_occr->insn != insn);
|
|
1131 a_occr = get_bb_avail_insn (bb, a_occr->next));
|
|
1132
|
|
1133 if (!a_occr)
|
|
1134 {
|
|
1135 stats.insns_deleted++;
|
|
1136
|
|
1137 if (dump_file)
|
|
1138 {
|
|
1139 fprintf (dump_file, "deleting insn:\n");
|
|
1140 print_rtl_single (dump_file, insn);
|
|
1141 fprintf (dump_file, "\n");
|
|
1142 }
|
|
1143 delete_insn (insn);
|
|
1144 }
|
|
1145 else
|
|
1146 a_occr->deleted_p = 1;
|
|
1147
|
|
1148 cleanup:
|
|
1149 if (rollback_unoccr)
|
|
1150 obstack_free (&unoccr_obstack, rollback_unoccr);
|
|
1151 }
|
|
1152
|
|
1153 /* Performing the redundancy elimination as described before. */
|
|
1154
|
|
1155 static void
|
|
1156 eliminate_partially_redundant_loads (void)
|
|
1157 {
|
|
1158 rtx insn;
|
|
1159 basic_block bb;
|
|
1160
|
|
1161 /* Note we start at block 1. */
|
|
1162
|
|
1163 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
|
|
1164 return;
|
|
1165
|
|
1166 FOR_BB_BETWEEN (bb,
|
|
1167 ENTRY_BLOCK_PTR->next_bb->next_bb,
|
|
1168 EXIT_BLOCK_PTR,
|
|
1169 next_bb)
|
|
1170 {
|
|
1171 /* Don't try anything on basic blocks with strange predecessors. */
|
|
1172 if (! bb_has_well_behaved_predecessors (bb))
|
|
1173 continue;
|
|
1174
|
|
1175 /* Do not try anything on cold basic blocks. */
|
|
1176 if (optimize_bb_for_size_p (bb))
|
|
1177 continue;
|
|
1178
|
|
1179 /* Reset the table of things changed since the start of the current
|
|
1180 basic block. */
|
|
1181 reset_opr_set_tables ();
|
|
1182
|
|
1183 /* Look at all insns in the current basic block and see if there are
|
|
1184 any loads in it that we can record. */
|
|
1185 FOR_BB_INSNS (bb, insn)
|
|
1186 {
|
|
1187 /* Is it a load - of the form (set (reg) (mem))? */
|
|
1188 if (NONJUMP_INSN_P (insn)
|
|
1189 && GET_CODE (PATTERN (insn)) == SET
|
|
1190 && REG_P (SET_DEST (PATTERN (insn)))
|
|
1191 && MEM_P (SET_SRC (PATTERN (insn))))
|
|
1192 {
|
|
1193 rtx pat = PATTERN (insn);
|
|
1194 rtx src = SET_SRC (pat);
|
|
1195 struct expr *expr;
|
|
1196
|
|
1197 if (!MEM_VOLATILE_P (src)
|
|
1198 && GET_MODE (src) != BLKmode
|
|
1199 && general_operand (src, GET_MODE (src))
|
|
1200 /* Are the operands unchanged since the start of the
|
|
1201 block? */
|
|
1202 && oprs_unchanged_p (src, insn, false)
|
|
1203 && !(flag_non_call_exceptions && may_trap_p (src))
|
|
1204 && !side_effects_p (src)
|
|
1205 /* Is the expression recorded? */
|
|
1206 && (expr = lookup_expr_in_table (src)) != NULL)
|
|
1207 {
|
|
1208 /* We now have a load (insn) and an available memory at
|
|
1209 its BB start (expr). Try to remove the loads if it is
|
|
1210 redundant. */
|
|
1211 eliminate_partially_redundant_load (bb, insn, expr);
|
|
1212 }
|
|
1213 }
|
|
1214
|
|
1215 /* Keep track of everything modified by this insn, so that we
|
|
1216 know what has been modified since the start of the current
|
|
1217 basic block. */
|
|
1218 if (INSN_P (insn))
|
|
1219 record_opr_changes (insn);
|
|
1220 }
|
|
1221 }
|
|
1222
|
|
1223 commit_edge_insertions ();
|
|
1224 }
|
|
1225
|
|
1226 /* Go over the expression hash table and delete insns that were
|
|
1227 marked for later deletion. */
|
|
1228
|
|
1229 /* This helper is called via htab_traverse. */
|
|
1230 static int
|
|
1231 delete_redundant_insns_1 (void **slot, void *data ATTRIBUTE_UNUSED)
|
|
1232 {
|
|
1233 struct expr *expr = (struct expr *) *slot;
|
|
1234 struct occr *occr;
|
|
1235
|
|
1236 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
|
|
1237 {
|
|
1238 if (occr->deleted_p && dbg_cnt (gcse2_delete))
|
|
1239 {
|
|
1240 delete_insn (occr->insn);
|
|
1241 stats.insns_deleted++;
|
|
1242
|
|
1243 if (dump_file)
|
|
1244 {
|
|
1245 fprintf (dump_file, "deleting insn:\n");
|
|
1246 print_rtl_single (dump_file, occr->insn);
|
|
1247 fprintf (dump_file, "\n");
|
|
1248 }
|
|
1249 }
|
|
1250 }
|
|
1251
|
|
1252 return 1;
|
|
1253 }
|
|
1254
|
|
1255 static void
|
|
1256 delete_redundant_insns (void)
|
|
1257 {
|
|
1258 htab_traverse (expr_table, delete_redundant_insns_1, NULL);
|
|
1259 if (dump_file)
|
|
1260 fprintf (dump_file, "\n");
|
|
1261 }
|
|
1262
|
|
1263 /* Main entry point of the GCSE after reload - clean some redundant loads
|
|
1264 due to spilling. */
|
|
1265
|
|
1266 static void
|
|
1267 gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
|
|
1268 {
|
|
1269
|
|
1270 memset (&stats, 0, sizeof (stats));
|
|
1271
|
|
1272 /* Allocate memory for this pass.
|
|
1273 Also computes and initializes the insns' CUIDs. */
|
|
1274 alloc_mem ();
|
|
1275
|
|
1276 /* We need alias analysis. */
|
|
1277 init_alias_analysis ();
|
|
1278
|
|
1279 compute_hash_table ();
|
|
1280
|
|
1281 if (dump_file)
|
|
1282 dump_hash_table (dump_file);
|
|
1283
|
|
1284 if (htab_elements (expr_table) > 0)
|
|
1285 {
|
|
1286 eliminate_partially_redundant_loads ();
|
|
1287 delete_redundant_insns ();
|
|
1288
|
|
1289 if (dump_file)
|
|
1290 {
|
|
1291 fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
|
|
1292 fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
|
|
1293 fprintf (dump_file, "moves inserted: %d\n", stats.moves_inserted);
|
|
1294 fprintf (dump_file, "insns deleted: %d\n", stats.insns_deleted);
|
|
1295 fprintf (dump_file, "\n\n");
|
|
1296 }
|
|
1297 }
|
|
1298
|
|
1299 /* We are finished with alias. */
|
|
1300 end_alias_analysis ();
|
|
1301
|
|
1302 free_mem ();
|
|
1303 }
|
|
1304
|
|
1305
|
|
1306 static bool
|
|
1307 gate_handle_gcse2 (void)
|
|
1308 {
|
|
1309 return (optimize > 0 && flag_gcse_after_reload
|
|
1310 && optimize_function_for_speed_p (cfun));
|
|
1311 }
|
|
1312
|
|
1313
|
|
1314 static unsigned int
|
|
1315 rest_of_handle_gcse2 (void)
|
|
1316 {
|
|
1317 gcse_after_reload_main (get_insns ());
|
|
1318 rebuild_jump_labels (get_insns ());
|
|
1319 return 0;
|
|
1320 }
|
|
1321
|
|
1322 struct rtl_opt_pass pass_gcse2 =
|
|
1323 {
|
|
1324 {
|
|
1325 RTL_PASS,
|
|
1326 "gcse2", /* name */
|
|
1327 gate_handle_gcse2, /* gate */
|
|
1328 rest_of_handle_gcse2, /* execute */
|
|
1329 NULL, /* sub */
|
|
1330 NULL, /* next */
|
|
1331 0, /* static_pass_number */
|
|
1332 TV_GCSE_AFTER_RELOAD, /* tv_id */
|
|
1333 0, /* properties_required */
|
|
1334 0, /* properties_provided */
|
|
1335 0, /* properties_destroyed */
|
|
1336 0, /* todo_flags_start */
|
|
1337 TODO_dump_func | TODO_verify_rtl_sharing
|
|
1338 | TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
|
|
1339 }
|
|
1340 };
|
|
1341
|