0
|
1 /* Dead store elimination
|
|
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
|
|
3 Free Software Foundation, Inc.
|
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify
|
|
8 it under the terms of the GNU General Public License as published by
|
|
9 the Free Software Foundation; either version 3, or (at your option)
|
|
10 any later version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful,
|
|
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
15 GNU General Public License for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21 #include "config.h"
|
|
22 #include "system.h"
|
|
23 #include "coretypes.h"
|
|
24 #include "tm.h"
|
|
25 #include "ggc.h"
|
|
26 #include "tree.h"
|
|
27 #include "rtl.h"
|
|
28 #include "tm_p.h"
|
|
29 #include "basic-block.h"
|
|
30 #include "timevar.h"
|
|
31 #include "diagnostic.h"
|
|
32 #include "tree-flow.h"
|
|
33 #include "tree-pass.h"
|
|
34 #include "tree-dump.h"
|
|
35 #include "domwalk.h"
|
|
36 #include "flags.h"
|
|
37 #include "langhooks.h"
|
|
38
|
|
39 /* This file implements dead store elimination.
|
|
40
|
|
41 A dead store is a store into a memory location which will later be
|
|
42 overwritten by another store without any intervening loads. In this
|
|
43 case the earlier store can be deleted.
|
|
44
|
|
45 In our SSA + virtual operand world we use immediate uses of virtual
|
|
46 operands to detect dead stores. If a store's virtual definition
|
|
47 is used precisely once by a later store to the same location which
|
|
48 post dominates the first store, then the first store is dead.
|
|
49
|
|
50 The single use of the store's virtual definition ensures that
|
|
51 there are no intervening aliased loads and the requirement that
|
|
52 the second load post dominate the first ensures that if the earlier
|
|
53 store executes, then the later stores will execute before the function
|
|
54 exits.
|
|
55
|
|
56 It may help to think of this as first moving the earlier store to
|
|
57 the point immediately before the later store. Again, the single
|
|
58 use of the virtual definition and the post-dominance relationship
|
|
59 ensure that such movement would be safe. Clearly if there are
|
|
60 back to back stores, then the second is redundant.
|
|
61
|
|
62 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
|
|
63 may also help in understanding this code since it discusses the
|
|
64 relationship between dead store and redundant load elimination. In
|
|
65 fact, they are the same transformation applied to different views of
|
|
66 the CFG. */
|
|
67
|
|
68
|
|
69 struct dse_global_data
|
|
70 {
|
|
71 /* This is the global bitmap for store statements.
|
|
72
|
|
73 Each statement has a unique ID. When we encounter a store statement
|
|
74 that we want to record, set the bit corresponding to the statement's
|
|
75 unique ID in this bitmap. */
|
|
76 bitmap stores;
|
|
77 };
|
|
78
|
|
79 /* We allocate a bitmap-per-block for stores which are encountered
|
|
80 during the scan of that block. This allows us to restore the
|
|
81 global bitmap of stores when we finish processing a block. */
|
|
82 struct dse_block_local_data
|
|
83 {
|
|
84 bitmap stores;
|
|
85 };
|
|
86
|
|
87 /* Basic blocks of the potentially dead store and the following
|
|
88 store, for memory_address_same. */
|
|
89 struct address_walk_data
|
|
90 {
|
|
91 basic_block store1_bb, store2_bb;
|
|
92 };
|
|
93
|
|
94 static bool gate_dse (void);
|
|
95 static unsigned int tree_ssa_dse (void);
|
|
96 static void dse_initialize_block_local_data (struct dom_walk_data *,
|
|
97 basic_block,
|
|
98 bool);
|
|
99 static void dse_optimize_stmt (struct dom_walk_data *,
|
|
100 basic_block,
|
|
101 gimple_stmt_iterator);
|
|
102 static void dse_record_phis (struct dom_walk_data *, basic_block);
|
|
103 static void dse_finalize_block (struct dom_walk_data *, basic_block);
|
|
104 static void record_voperand_set (bitmap, bitmap *, unsigned int);
|
|
105
|
|
106 /* Returns uid of statement STMT. */
|
|
107
|
|
108 static unsigned
|
|
109 get_stmt_uid (gimple stmt)
|
|
110 {
|
|
111 if (gimple_code (stmt) == GIMPLE_PHI)
|
|
112 return SSA_NAME_VERSION (gimple_phi_result (stmt))
|
|
113 + gimple_stmt_max_uid (cfun);
|
|
114
|
|
115 return gimple_uid (stmt);
|
|
116 }
|
|
117
|
|
118 /* Set bit UID in bitmaps GLOBAL and *LOCAL, creating *LOCAL as needed. */
|
|
119
|
|
120 static void
|
|
121 record_voperand_set (bitmap global, bitmap *local, unsigned int uid)
|
|
122 {
|
|
123 /* Lazily allocate the bitmap. Note that we do not get a notification
|
|
124 when the block local data structures die, so we allocate the local
|
|
125 bitmap backed by the GC system. */
|
|
126 if (*local == NULL)
|
|
127 *local = BITMAP_GGC_ALLOC ();
|
|
128
|
|
129 /* Set the bit in the local and global bitmaps. */
|
|
130 bitmap_set_bit (*local, uid);
|
|
131 bitmap_set_bit (global, uid);
|
|
132 }
|
|
133
|
|
134 /* Initialize block local data structures. */
|
|
135
|
|
136 static void
|
|
137 dse_initialize_block_local_data (struct dom_walk_data *walk_data,
|
|
138 basic_block bb ATTRIBUTE_UNUSED,
|
|
139 bool recycled)
|
|
140 {
|
|
141 struct dse_block_local_data *bd
|
|
142 = (struct dse_block_local_data *)
|
|
143 VEC_last (void_p, walk_data->block_data_stack);
|
|
144
|
|
145 /* If we are given a recycled block local data structure, ensure any
|
|
146 bitmap associated with the block is cleared. */
|
|
147 if (recycled)
|
|
148 {
|
|
149 if (bd->stores)
|
|
150 bitmap_clear (bd->stores);
|
|
151 }
|
|
152 }
|
|
153
|
|
154 /* Helper function for memory_address_same via walk_tree. Returns
|
|
155 non-NULL if it finds an SSA_NAME which is part of the address,
|
|
156 such that the definition of the SSA_NAME post-dominates the store
|
|
157 we want to delete but not the store that we believe makes it
|
|
158 redundant. This indicates that the address may change between
|
|
159 the two stores. */
|
|
160
|
|
161 static tree
|
|
162 memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
|
|
163 void *data)
|
|
164 {
|
|
165 struct address_walk_data *walk_data = (struct address_walk_data *) data;
|
|
166 tree expr = *expr_p;
|
|
167 gimple def_stmt;
|
|
168 basic_block def_bb;
|
|
169
|
|
170 if (TREE_CODE (expr) != SSA_NAME)
|
|
171 return NULL_TREE;
|
|
172
|
|
173 /* If we've found a default definition, then there's no problem. Both
|
|
174 stores will post-dominate it. And def_bb will be NULL. */
|
|
175 if (SSA_NAME_IS_DEFAULT_DEF (expr))
|
|
176 return NULL_TREE;
|
|
177
|
|
178 def_stmt = SSA_NAME_DEF_STMT (expr);
|
|
179 def_bb = gimple_bb (def_stmt);
|
|
180
|
|
181 /* DEF_STMT must dominate both stores. So if it is in the same
|
|
182 basic block as one, it does not post-dominate that store. */
|
|
183 if (walk_data->store1_bb != def_bb
|
|
184 && dominated_by_p (CDI_POST_DOMINATORS, walk_data->store1_bb, def_bb))
|
|
185 {
|
|
186 if (walk_data->store2_bb == def_bb
|
|
187 || !dominated_by_p (CDI_POST_DOMINATORS, walk_data->store2_bb,
|
|
188 def_bb))
|
|
189 /* Return non-NULL to stop the walk. */
|
|
190 return *expr_p;
|
|
191 }
|
|
192
|
|
193 return NULL_TREE;
|
|
194 }
|
|
195
|
|
196 /* Return TRUE if the destination memory address in STORE1 and STORE2
|
|
197 might be modified after STORE1, before control reaches STORE2. */
|
|
198
|
|
199 static bool
|
|
200 memory_address_same (gimple store1, gimple store2)
|
|
201 {
|
|
202 struct address_walk_data walk_data;
|
|
203
|
|
204 walk_data.store1_bb = gimple_bb (store1);
|
|
205 walk_data.store2_bb = gimple_bb (store2);
|
|
206
|
|
207 return (walk_tree (gimple_assign_lhs_ptr (store1), memory_ssa_name_same,
|
|
208 &walk_data, NULL)
|
|
209 == NULL);
|
|
210 }
|
|
211
|
|
212 /* Return true if there is a stmt that kills the lhs of STMT and is in the
|
|
213 virtual def-use chain of STMT without a use in between the kill and STMT.
|
|
214 Returns false if no such stmt is found.
|
|
215 *FIRST_USE_P is set to the first use of the single virtual def of
|
|
216 STMT. *USE_P is set to the vop killed by *USE_STMT. */
|
|
217
|
|
218 static bool
|
|
219 get_kill_of_stmt_lhs (gimple stmt,
|
|
220 use_operand_p * first_use_p,
|
|
221 use_operand_p * use_p, gimple * use_stmt)
|
|
222 {
|
|
223 tree lhs;
|
|
224
|
|
225 gcc_assert (is_gimple_assign (stmt));
|
|
226
|
|
227 lhs = gimple_assign_lhs (stmt);
|
|
228
|
|
229 /* We now walk the chain of single uses of the single VDEFs.
|
|
230 We succeeded finding a kill if the lhs of the use stmt is
|
|
231 equal to the original lhs. We can keep walking to the next
|
|
232 use if there are no possible uses of the original lhs in
|
|
233 the stmt. */
|
|
234 do
|
|
235 {
|
|
236 tree use_lhs;
|
|
237 def_operand_p def_p;
|
|
238
|
|
239 /* The stmt must have a single VDEF. */
|
|
240 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_VDEF);
|
|
241 if (def_p == NULL_DEF_OPERAND_P)
|
|
242 return false;
|
|
243
|
|
244 /* Get the single immediate use of the def. */
|
|
245 if (!single_imm_use (DEF_FROM_PTR (def_p), first_use_p, &stmt))
|
|
246 return false;
|
|
247 first_use_p = use_p;
|
|
248
|
|
249 /* If there are possible hidden uses, give up. */
|
|
250 if (!gimple_assign_single_p (stmt)
|
|
251 || (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME
|
|
252 && !is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
|
|
253 return false;
|
|
254
|
|
255 /* If the use stmts lhs matches the original lhs we have
|
|
256 found the kill, otherwise continue walking. */
|
|
257 use_lhs = gimple_assign_lhs (stmt);
|
|
258 if (operand_equal_p (use_lhs, lhs, 0))
|
|
259 {
|
|
260 *use_stmt = stmt;
|
|
261 return true;
|
|
262 }
|
|
263 }
|
|
264 while (1);
|
|
265 }
|
|
266
|
|
267 /* A helper of dse_optimize_stmt.
|
|
268 Given a GIMPLE_ASSIGN in STMT, check that each VDEF has one
|
|
269 use, and that one use is another VDEF clobbering the first one.
|
|
270
|
|
271 Return TRUE if the above conditions are met, otherwise FALSE. */
|
|
272
|
|
273 static bool
|
|
274 dse_possible_dead_store_p (gimple stmt,
|
|
275 use_operand_p *first_use_p,
|
|
276 use_operand_p *use_p,
|
|
277 gimple *use_stmt,
|
|
278 struct dse_global_data *dse_gd,
|
|
279 struct dse_block_local_data *bd)
|
|
280 {
|
|
281 ssa_op_iter op_iter;
|
|
282 bool fail = false;
|
|
283 def_operand_p var1;
|
|
284 vuse_vec_p vv;
|
|
285 tree defvar = NULL_TREE;
|
|
286 tree prev_defvar = NULL_TREE;
|
|
287 gimple temp;
|
|
288
|
|
289 /* We want to verify that each virtual definition in STMT has
|
|
290 precisely one use and that all the virtual definitions are
|
|
291 used by the same single statement. When complete, we
|
|
292 want USE_STMT to refer to the one statement which uses
|
|
293 all of the virtual definitions from STMT. */
|
|
294 *use_stmt = NULL;
|
|
295 FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
|
|
296 {
|
|
297 defvar = DEF_FROM_PTR (var1);
|
|
298
|
|
299 /* If this virtual def does not have precisely one use, then
|
|
300 we will not be able to eliminate STMT. */
|
|
301 if (!has_single_use (defvar))
|
|
302 {
|
|
303 fail = true;
|
|
304 break;
|
|
305 }
|
|
306
|
|
307 /* Get the one and only immediate use of DEFVAR. */
|
|
308 single_imm_use (defvar, use_p, &temp);
|
|
309 gcc_assert (*use_p != NULL_USE_OPERAND_P);
|
|
310 *first_use_p = *use_p;
|
|
311
|
|
312 /* ??? If we hit a GIMPLE_PHI we could skip to the PHI_RESULT uses.
|
|
313 Don't bother to do that for now. */
|
|
314 if (gimple_code (temp) == GIMPLE_PHI)
|
|
315 {
|
|
316 fail = true;
|
|
317 break;
|
|
318 }
|
|
319
|
|
320 /* In the case of memory partitions, we may get:
|
|
321
|
|
322 # MPT.764_162 = VDEF <MPT.764_161(D)>
|
|
323 x = {};
|
|
324 # MPT.764_167 = VDEF <MPT.764_162>
|
|
325 y = {};
|
|
326
|
|
327 So we must make sure we're talking about the same LHS.
|
|
328 */
|
|
329 if (is_gimple_assign (temp))
|
|
330 {
|
|
331 tree base1 = get_base_address (gimple_assign_lhs (stmt));
|
|
332 tree base2 = get_base_address (gimple_assign_lhs (temp));
|
|
333
|
|
334 while (base1 && INDIRECT_REF_P (base1))
|
|
335 base1 = TREE_OPERAND (base1, 0);
|
|
336 while (base2 && INDIRECT_REF_P (base2))
|
|
337 base2 = TREE_OPERAND (base2, 0);
|
|
338
|
|
339 if (base1 != base2)
|
|
340 {
|
|
341 fail = true;
|
|
342 break;
|
|
343 }
|
|
344 }
|
|
345
|
|
346 /* If the immediate use of DEF_VAR is not the same as the
|
|
347 previously find immediate uses, then we will not be able
|
|
348 to eliminate STMT. */
|
|
349 if (*use_stmt == NULL)
|
|
350 {
|
|
351 *use_stmt = temp;
|
|
352 prev_defvar = defvar;
|
|
353 }
|
|
354 else if (temp != *use_stmt)
|
|
355 {
|
|
356 fail = true;
|
|
357 break;
|
|
358 }
|
|
359 }
|
|
360
|
|
361 if (fail)
|
|
362 {
|
|
363 record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
|
|
364 return false;
|
|
365 }
|
|
366
|
|
367 return true;
|
|
368 }
|
|
369
|
|
370
|
|
371 /* Attempt to eliminate dead stores in the statement referenced by BSI.
|
|
372
|
|
373 A dead store is a store into a memory location which will later be
|
|
374 overwritten by another store without any intervening loads. In this
|
|
375 case the earlier store can be deleted.
|
|
376
|
|
377 In our SSA + virtual operand world we use immediate uses of virtual
|
|
378 operands to detect dead stores. If a store's virtual definition
|
|
379 is used precisely once by a later store to the same location which
|
|
380 post dominates the first store, then the first store is dead. */
|
|
381
|
|
382 static void
|
|
383 dse_optimize_stmt (struct dom_walk_data *walk_data,
|
|
384 basic_block bb ATTRIBUTE_UNUSED,
|
|
385 gimple_stmt_iterator gsi)
|
|
386 {
|
|
387 struct dse_block_local_data *bd
|
|
388 = (struct dse_block_local_data *)
|
|
389 VEC_last (void_p, walk_data->block_data_stack);
|
|
390 struct dse_global_data *dse_gd
|
|
391 = (struct dse_global_data *) walk_data->global_data;
|
|
392 gimple stmt = gsi_stmt (gsi);
|
|
393
|
|
394 /* If this statement has no virtual defs, then there is nothing
|
|
395 to do. */
|
|
396 if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
|
|
397 return;
|
|
398
|
|
399 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
|
|
400 that's not also a function call, then record it into our table. */
|
|
401 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
|
|
402 return;
|
|
403
|
|
404 if (gimple_has_volatile_ops (stmt))
|
|
405 return;
|
|
406
|
|
407 if (is_gimple_assign (stmt))
|
|
408 {
|
|
409 use_operand_p first_use_p = NULL_USE_OPERAND_P;
|
|
410 use_operand_p use_p = NULL;
|
|
411 gimple use_stmt;
|
|
412
|
|
413 if (!dse_possible_dead_store_p (stmt, &first_use_p, &use_p, &use_stmt,
|
|
414 dse_gd, bd))
|
|
415 return;
|
|
416
|
|
417 /* If we have precisely one immediate use at this point, then we may
|
|
418 have found redundant store. Make sure that the stores are to
|
|
419 the same memory location. This includes checking that any
|
|
420 SSA-form variables in the address will have the same values. */
|
|
421 if (use_p != NULL_USE_OPERAND_P
|
|
422 && bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
|
|
423 && !operand_equal_p (gimple_assign_lhs (stmt),
|
|
424 gimple_assign_lhs (use_stmt), 0)
|
|
425 && memory_address_same (stmt, use_stmt))
|
|
426 {
|
|
427 /* If we have precisely one immediate use at this point, but
|
|
428 the stores are not to the same memory location then walk the
|
|
429 virtual def-use chain to get the stmt which stores to that same
|
|
430 memory location. */
|
|
431 if (!get_kill_of_stmt_lhs (stmt, &first_use_p, &use_p, &use_stmt))
|
|
432 {
|
|
433 record_voperand_set (dse_gd->stores, &bd->stores,
|
|
434 gimple_uid (stmt));
|
|
435 return;
|
|
436 }
|
|
437 }
|
|
438
|
|
439 /* If we have precisely one immediate use at this point and the
|
|
440 stores are to the same memory location or there is a chain of
|
|
441 virtual uses from stmt and the stmt which stores to that same
|
|
442 memory location, then we may have found redundant store. */
|
|
443 if (use_p != NULL_USE_OPERAND_P
|
|
444 && bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
|
|
445 && operand_equal_p (gimple_assign_lhs (stmt),
|
|
446 gimple_assign_lhs (use_stmt), 0)
|
|
447 && memory_address_same (stmt, use_stmt))
|
|
448 {
|
|
449 ssa_op_iter op_iter;
|
|
450 def_operand_p var1;
|
|
451 vuse_vec_p vv;
|
|
452 tree stmt_lhs;
|
|
453
|
|
454 /* If use_stmt is or might be a nop assignment, e.g. for
|
|
455 struct { ... } S a, b, *p; ...
|
|
456 b = a; b = b;
|
|
457 or
|
|
458 b = a; b = *p; where p might be &b,
|
|
459 or
|
|
460 *p = a; *p = b; where p might be &b,
|
|
461 or
|
|
462 *p = *u; *p = *v; where p might be v, then USE_STMT
|
|
463 acts as a use as well as definition, so store in STMT
|
|
464 is not dead. */
|
|
465 if (gimple_loaded_syms (use_stmt)
|
|
466 && bitmap_intersect_p (gimple_loaded_syms (use_stmt),
|
|
467 gimple_stored_syms (use_stmt)))
|
|
468 {
|
|
469 record_voperand_set (dse_gd->stores, &bd->stores,
|
|
470 gimple_uid (stmt));
|
|
471 return;
|
|
472 }
|
|
473
|
|
474 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
475 {
|
|
476 fprintf (dump_file, " Deleted dead store '");
|
|
477 print_gimple_stmt (dump_file, gsi_stmt (gsi), dump_flags, 0);
|
|
478 fprintf (dump_file, "'\n");
|
|
479 }
|
|
480
|
|
481 /* Then we need to fix the operand of the consuming stmt. */
|
|
482 stmt_lhs = USE_FROM_PTR (first_use_p);
|
|
483 FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
|
|
484 {
|
|
485 tree usevar;
|
|
486 gimple temp;
|
|
487
|
|
488 single_imm_use (DEF_FROM_PTR (var1), &use_p, &temp);
|
|
489 gcc_assert (VUSE_VECT_NUM_ELEM (*vv) == 1);
|
|
490 usevar = VUSE_ELEMENT_VAR (*vv, 0);
|
|
491 SET_USE (use_p, usevar);
|
|
492
|
|
493 /* Make sure we propagate the ABNORMAL bit setting. */
|
|
494 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (stmt_lhs))
|
|
495 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
|
|
496 }
|
|
497
|
|
498 /* Remove the dead store. */
|
|
499 gsi_remove (&gsi, true);
|
|
500
|
|
501 /* And release any SSA_NAMEs set in this statement back to the
|
|
502 SSA_NAME manager. */
|
|
503 release_defs (stmt);
|
|
504 }
|
|
505
|
|
506 record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
|
|
507 }
|
|
508 }
|
|
509
|
|
510 /* Record that we have seen the PHIs at the start of BB which correspond
|
|
511 to virtual operands. */
|
|
512 static void
|
|
513 dse_record_phis (struct dom_walk_data *walk_data, basic_block bb)
|
|
514 {
|
|
515 struct dse_block_local_data *bd
|
|
516 = (struct dse_block_local_data *)
|
|
517 VEC_last (void_p, walk_data->block_data_stack);
|
|
518 struct dse_global_data *dse_gd
|
|
519 = (struct dse_global_data *) walk_data->global_data;
|
|
520 gimple phi;
|
|
521 gimple_stmt_iterator gsi;
|
|
522
|
|
523 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
524 {
|
|
525 phi = gsi_stmt (gsi);
|
|
526 if (!is_gimple_reg (gimple_phi_result (phi)))
|
|
527 record_voperand_set (dse_gd->stores, &bd->stores, get_stmt_uid (phi));
|
|
528 }
|
|
529 }
|
|
530
|
|
531 static void
|
|
532 dse_finalize_block (struct dom_walk_data *walk_data,
|
|
533 basic_block bb ATTRIBUTE_UNUSED)
|
|
534 {
|
|
535 struct dse_block_local_data *bd
|
|
536 = (struct dse_block_local_data *)
|
|
537 VEC_last (void_p, walk_data->block_data_stack);
|
|
538 struct dse_global_data *dse_gd
|
|
539 = (struct dse_global_data *) walk_data->global_data;
|
|
540 bitmap stores = dse_gd->stores;
|
|
541 unsigned int i;
|
|
542 bitmap_iterator bi;
|
|
543
|
|
544 /* Unwind the stores noted in this basic block. */
|
|
545 if (bd->stores)
|
|
546 EXECUTE_IF_SET_IN_BITMAP (bd->stores, 0, i, bi)
|
|
547 {
|
|
548 bitmap_clear_bit (stores, i);
|
|
549 }
|
|
550 }
|
|
551
|
|
552 /* Main entry point. */
|
|
553
|
|
554 static unsigned int
|
|
555 tree_ssa_dse (void)
|
|
556 {
|
|
557 struct dom_walk_data walk_data;
|
|
558 struct dse_global_data dse_gd;
|
|
559
|
|
560 renumber_gimple_stmt_uids ();
|
|
561
|
|
562 /* We might consider making this a property of each pass so that it
|
|
563 can be [re]computed on an as-needed basis. Particularly since
|
|
564 this pass could be seen as an extension of DCE which needs post
|
|
565 dominators. */
|
|
566 calculate_dominance_info (CDI_POST_DOMINATORS);
|
|
567
|
|
568 /* Dead store elimination is fundamentally a walk of the post-dominator
|
|
569 tree and a backwards walk of statements within each block. */
|
|
570 walk_data.walk_stmts_backward = true;
|
|
571 walk_data.dom_direction = CDI_POST_DOMINATORS;
|
|
572 walk_data.initialize_block_local_data = dse_initialize_block_local_data;
|
|
573 walk_data.before_dom_children_before_stmts = NULL;
|
|
574 walk_data.before_dom_children_walk_stmts = dse_optimize_stmt;
|
|
575 walk_data.before_dom_children_after_stmts = dse_record_phis;
|
|
576 walk_data.after_dom_children_before_stmts = NULL;
|
|
577 walk_data.after_dom_children_walk_stmts = NULL;
|
|
578 walk_data.after_dom_children_after_stmts = dse_finalize_block;
|
|
579 walk_data.interesting_blocks = NULL;
|
|
580
|
|
581 walk_data.block_local_data_size = sizeof (struct dse_block_local_data);
|
|
582
|
|
583 /* This is the main hash table for the dead store elimination pass. */
|
|
584 dse_gd.stores = BITMAP_ALLOC (NULL);
|
|
585 walk_data.global_data = &dse_gd;
|
|
586
|
|
587 /* Initialize the dominator walker. */
|
|
588 init_walk_dominator_tree (&walk_data);
|
|
589
|
|
590 /* Recursively walk the dominator tree. */
|
|
591 walk_dominator_tree (&walk_data, EXIT_BLOCK_PTR);
|
|
592
|
|
593 /* Finalize the dominator walker. */
|
|
594 fini_walk_dominator_tree (&walk_data);
|
|
595
|
|
596 /* Release the main bitmap. */
|
|
597 BITMAP_FREE (dse_gd.stores);
|
|
598
|
|
599 /* For now, just wipe the post-dominator information. */
|
|
600 free_dominance_info (CDI_POST_DOMINATORS);
|
|
601 return 0;
|
|
602 }
|
|
603
|
|
604 static bool
|
|
605 gate_dse (void)
|
|
606 {
|
|
607 return flag_tree_dse != 0;
|
|
608 }
|
|
609
|
|
610 struct gimple_opt_pass pass_dse =
|
|
611 {
|
|
612 {
|
|
613 GIMPLE_PASS,
|
|
614 "dse", /* name */
|
|
615 gate_dse, /* gate */
|
|
616 tree_ssa_dse, /* execute */
|
|
617 NULL, /* sub */
|
|
618 NULL, /* next */
|
|
619 0, /* static_pass_number */
|
|
620 TV_TREE_DSE, /* tv_id */
|
|
621 PROP_cfg
|
|
622 | PROP_ssa
|
|
623 | PROP_alias, /* properties_required */
|
|
624 0, /* properties_provided */
|
|
625 0, /* properties_destroyed */
|
|
626 0, /* todo_flags_start */
|
|
627 TODO_dump_func
|
|
628 | TODO_ggc_collect
|
|
629 | TODO_verify_ssa /* todo_flags_finish */
|
|
630 }
|
|
631 };
|
|
632
|
|
633 /* A very simple dead store pass eliminating write only local variables.
|
|
634 The pass does not require alias information and thus can be run before
|
|
635 inlining to quickly eliminate artifacts of some common C++ constructs. */
|
|
636
|
|
637 static unsigned int
|
|
638 execute_simple_dse (void)
|
|
639 {
|
|
640 gimple_stmt_iterator gsi;
|
|
641 basic_block bb;
|
|
642 bitmap variables_loaded = BITMAP_ALLOC (NULL);
|
|
643 unsigned int todo = 0;
|
|
644
|
|
645 /* Collect into VARIABLES LOADED all variables that are read in function
|
|
646 body. */
|
|
647 FOR_EACH_BB (bb)
|
|
648 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
649
|
|
650 if (gimple_loaded_syms (gsi_stmt (gsi)))
|
|
651 bitmap_ior_into (variables_loaded,
|
|
652 gimple_loaded_syms (gsi_stmt (gsi)));
|
|
653
|
|
654 /* Look for statements writing into the write only variables.
|
|
655 And try to remove them. */
|
|
656
|
|
657 FOR_EACH_BB (bb)
|
|
658 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
|
|
659 {
|
|
660 gimple stmt = gsi_stmt (gsi);
|
|
661 tree op;
|
|
662 bool removed = false;
|
|
663 ssa_op_iter iter;
|
|
664 tree size;
|
|
665
|
|
666 if (is_gimple_assign (stmt)
|
|
667 && AGGREGATE_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt)))
|
|
668 && (size = lang_hooks.expr_size (gimple_assign_lhs (stmt)))
|
|
669 && integer_zerop (size))
|
|
670 {
|
|
671 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
672 {
|
|
673 fprintf (dump_file, " Deleted zero-sized store '");
|
|
674 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
|
|
675 fprintf (dump_file, "'\n");
|
|
676 }
|
|
677 removed = true;
|
|
678 gsi_remove (&gsi, true);
|
|
679 todo |= TODO_cleanup_cfg;
|
|
680 }
|
|
681 else if (gimple_stored_syms (stmt)
|
|
682 && !bitmap_empty_p (gimple_stored_syms (stmt))
|
|
683 && (is_gimple_assign (stmt)
|
|
684 || (is_gimple_call (stmt)
|
|
685 && gimple_call_lhs (stmt)))
|
|
686 && !bitmap_intersect_p (gimple_stored_syms (stmt),
|
|
687 variables_loaded))
|
|
688 {
|
|
689 unsigned int i;
|
|
690 bitmap_iterator bi;
|
|
691 bool dead = true;
|
|
692
|
|
693 /* See if STMT only stores to write-only variables and
|
|
694 verify that there are no volatile operands. tree-ssa-operands
|
|
695 sets has_volatile_ops flag for all statements involving
|
|
696 reads and writes when aliases are not built to prevent passes
|
|
697 from removing them as dead. The flag thus has no use for us
|
|
698 and we need to look into all operands. */
|
|
699
|
|
700 EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
|
|
701 {
|
|
702 tree var = referenced_var_lookup (i);
|
|
703 if (TREE_ADDRESSABLE (var)
|
|
704 || is_global_var (var)
|
|
705 || TREE_THIS_VOLATILE (var))
|
|
706 dead = false;
|
|
707 }
|
|
708
|
|
709 if (dead && gimple_loaded_syms (stmt))
|
|
710 EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
|
|
711 if (TREE_THIS_VOLATILE (referenced_var_lookup (i)))
|
|
712 dead = false;
|
|
713
|
|
714 if (dead)
|
|
715 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_OPERANDS)
|
|
716 if (TREE_THIS_VOLATILE (op))
|
|
717 dead = false;
|
|
718
|
|
719 /* Look for possible occurrence var = indirect_ref (...) where
|
|
720 indirect_ref itself is volatile. */
|
|
721
|
|
722 if (dead && is_gimple_assign (stmt)
|
|
723 && TREE_THIS_VOLATILE (gimple_assign_rhs1 (stmt)))
|
|
724 dead = false;
|
|
725
|
|
726 if (dead)
|
|
727 {
|
|
728 /* When LHS of var = call (); is dead, simplify it into
|
|
729 call (); saving one operand. */
|
|
730 if (is_gimple_call (stmt)
|
|
731 && gimple_has_side_effects (stmt))
|
|
732 {
|
|
733 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
734 {
|
|
735 fprintf (dump_file, "Deleted LHS of call: ");
|
|
736 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
|
|
737 fprintf (dump_file, "\n");
|
|
738 }
|
|
739 push_stmt_changes (gsi_stmt_ptr (&gsi));
|
|
740 gimple_call_set_lhs (stmt, NULL);
|
|
741 pop_stmt_changes (gsi_stmt_ptr (&gsi));
|
|
742 }
|
|
743 else
|
|
744 {
|
|
745 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
746 {
|
|
747 fprintf (dump_file, " Deleted dead store '");
|
|
748 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
|
|
749 fprintf (dump_file, "'\n");
|
|
750 }
|
|
751 removed = true;
|
|
752 gsi_remove (&gsi, true);
|
|
753 todo |= TODO_cleanup_cfg;
|
|
754 }
|
|
755 todo |= TODO_remove_unused_locals | TODO_ggc_collect;
|
|
756 }
|
|
757 }
|
|
758 if (!removed)
|
|
759 gsi_next (&gsi);
|
|
760 }
|
|
761 BITMAP_FREE (variables_loaded);
|
|
762 return todo;
|
|
763 }
|
|
764
|
|
765 struct gimple_opt_pass pass_simple_dse =
|
|
766 {
|
|
767 {
|
|
768 GIMPLE_PASS,
|
|
769 "sdse", /* name */
|
|
770 NULL, /* gate */
|
|
771 execute_simple_dse, /* execute */
|
|
772 NULL, /* sub */
|
|
773 NULL, /* next */
|
|
774 0, /* static_pass_number */
|
|
775 0, /* tv_id */
|
|
776 PROP_ssa, /* properties_required */
|
|
777 0, /* properties_provided */
|
|
778 0, /* properties_destroyed */
|
|
779 0, /* todo_flags_start */
|
|
780 TODO_dump_func /* todo_flags_finish */
|
|
781 }
|
|
782 };
|