0
|
1 /* Data flow functions for trees.
|
|
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
|
|
3 Free Software Foundation, Inc.
|
|
4 Contributed by Diego Novillo <dnovillo@redhat.com>
|
|
5
|
|
6 This file is part of GCC.
|
|
7
|
|
8 GCC is free software; you can redistribute it and/or modify
|
|
9 it under the terms of the GNU General Public License as published by
|
|
10 the Free Software Foundation; either version 3, or (at your option)
|
|
11 any later version.
|
|
12
|
|
13 GCC is distributed in the hope that it will be useful,
|
|
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
16 GNU General Public License for more details.
|
|
17
|
|
18 You should have received a copy of the GNU General Public License
|
|
19 along with GCC; see the file COPYING3. If not see
|
|
20 <http://www.gnu.org/licenses/>. */
|
|
21
|
|
22 #include "config.h"
|
|
23 #include "system.h"
|
|
24 #include "coretypes.h"
|
|
25 #include "tm.h"
|
|
26 #include "hashtab.h"
|
|
27 #include "pointer-set.h"
|
|
28 #include "tree.h"
|
|
29 #include "rtl.h"
|
|
30 #include "tm_p.h"
|
|
31 #include "hard-reg-set.h"
|
|
32 #include "basic-block.h"
|
|
33 #include "output.h"
|
|
34 #include "timevar.h"
|
|
35 #include "expr.h"
|
|
36 #include "ggc.h"
|
|
37 #include "langhooks.h"
|
|
38 #include "flags.h"
|
|
39 #include "function.h"
|
|
40 #include "diagnostic.h"
|
|
41 #include "tree-dump.h"
|
|
42 #include "gimple.h"
|
|
43 #include "tree-flow.h"
|
|
44 #include "tree-inline.h"
|
|
45 #include "tree-pass.h"
|
|
46 #include "convert.h"
|
|
47 #include "params.h"
|
|
48 #include "cgraph.h"
|
|
49
|
|
50 /* Build and maintain data flow information for trees. */
|
|
51
|
|
52 /* Counters used to display DFA and SSA statistics. */
|
|
53 struct dfa_stats_d
|
|
54 {
|
|
55 long num_var_anns;
|
|
56 long num_defs;
|
|
57 long num_uses;
|
|
58 long num_phis;
|
|
59 long num_phi_args;
|
|
60 size_t max_num_phi_args;
|
|
61 long num_vdefs;
|
|
62 long num_vuses;
|
|
63 };
|
|
64
|
|
65
|
|
66 /* Local functions. */
|
|
67 static void collect_dfa_stats (struct dfa_stats_d *);
|
|
68 static tree find_vars_r (tree *, int *, void *);
|
|
69
|
|
70
|
|
71 /*---------------------------------------------------------------------------
|
|
72 Dataflow analysis (DFA) routines
|
|
73 ---------------------------------------------------------------------------*/
|
|
74 /* Find all the variables referenced in the function. This function
|
|
75 builds the global arrays REFERENCED_VARS and CALL_CLOBBERED_VARS.
|
|
76
|
|
77 Note that this function does not look for statement operands, it simply
|
|
78 determines what variables are referenced in the program and detects
|
|
79 various attributes for each variable used by alias analysis and the
|
|
80 optimizer. */
|
|
81
|
|
82 static unsigned int
|
|
83 find_referenced_vars (void)
|
|
84 {
|
|
85 basic_block bb;
|
|
86 gimple_stmt_iterator si;
|
|
87
|
|
88 FOR_EACH_BB (bb)
|
|
89 {
|
|
90 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
|
|
91 {
|
|
92 size_t i;
|
|
93 gimple stmt = gsi_stmt (si);
|
|
94 for (i = 0; i < gimple_num_ops (stmt); i++)
|
|
95 walk_tree (gimple_op_ptr (stmt, i), find_vars_r, NULL, NULL);
|
|
96 }
|
|
97
|
|
98 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
|
|
99 {
|
|
100 gimple phi = gsi_stmt (si);
|
|
101 size_t i, len = gimple_phi_num_args (phi);
|
|
102
|
|
103 walk_tree (gimple_phi_result_ptr (phi), find_vars_r, NULL, NULL);
|
|
104
|
|
105 for (i = 0; i < len; i++)
|
|
106 {
|
|
107 tree arg = gimple_phi_arg_def (phi, i);
|
|
108 walk_tree (&arg, find_vars_r, NULL, NULL);
|
|
109 }
|
|
110 }
|
|
111 }
|
|
112
|
|
113 return 0;
|
|
114 }
|
|
115
|
|
116 struct gimple_opt_pass pass_referenced_vars =
|
|
117 {
|
|
118 {
|
|
119 GIMPLE_PASS,
|
|
120 NULL, /* name */
|
|
121 NULL, /* gate */
|
|
122 find_referenced_vars, /* execute */
|
|
123 NULL, /* sub */
|
|
124 NULL, /* next */
|
|
125 0, /* static_pass_number */
|
|
126 TV_FIND_REFERENCED_VARS, /* tv_id */
|
|
127 PROP_gimple_leh | PROP_cfg, /* properties_required */
|
|
128 PROP_referenced_vars, /* properties_provided */
|
|
129 0, /* properties_destroyed */
|
|
130 TODO_dump_func, /* todo_flags_start */
|
|
131 TODO_dump_func /* todo_flags_finish */
|
|
132 }
|
|
133 };
|
|
134
|
|
135
|
|
136 /*---------------------------------------------------------------------------
|
|
137 Manage annotations
|
|
138 ---------------------------------------------------------------------------*/
|
|
139 /* Create a new annotation for a _DECL node T. */
|
|
140
|
|
141 var_ann_t
|
|
142 create_var_ann (tree t)
|
|
143 {
|
|
144 var_ann_t ann;
|
|
145
|
|
146 gcc_assert (t);
|
|
147 gcc_assert (DECL_P (t));
|
|
148 gcc_assert (!t->base.ann || t->base.ann->common.type == VAR_ANN);
|
|
149
|
|
150 ann = GGC_CNEW (struct var_ann_d);
|
|
151 ann->common.type = VAR_ANN;
|
|
152 t->base.ann = (tree_ann_t) ann;
|
|
153
|
|
154 return ann;
|
|
155 }
|
|
156
|
|
157 /* Create a new annotation for a FUNCTION_DECL node T. */
|
|
158
|
|
159 function_ann_t
|
|
160 create_function_ann (tree t)
|
|
161 {
|
|
162 function_ann_t ann;
|
|
163
|
|
164 gcc_assert (t);
|
|
165 gcc_assert (TREE_CODE (t) == FUNCTION_DECL);
|
|
166 gcc_assert (!t->base.ann || t->base.ann->common.type == FUNCTION_ANN);
|
|
167
|
|
168 ann = (function_ann_t) ggc_alloc (sizeof (*ann));
|
|
169 memset ((void *) ann, 0, sizeof (*ann));
|
|
170
|
|
171 ann->common.type = FUNCTION_ANN;
|
|
172
|
|
173 t->base.ann = (tree_ann_t) ann;
|
|
174
|
|
175 return ann;
|
|
176 }
|
|
177
|
|
178 /* Renumber all of the gimple stmt uids. */
|
|
179
|
|
180 void
|
|
181 renumber_gimple_stmt_uids (void)
|
|
182 {
|
|
183 basic_block bb;
|
|
184
|
|
185 set_gimple_stmt_max_uid (cfun, 0);
|
|
186 FOR_ALL_BB (bb)
|
|
187 {
|
|
188 gimple_stmt_iterator bsi;
|
|
189 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
|
|
190 {
|
|
191 gimple stmt = gsi_stmt (bsi);
|
|
192 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
|
|
193 }
|
|
194 }
|
|
195 }
|
|
196
|
|
197 /* Create a new annotation for a tree T. */
|
|
198
|
|
199 tree_ann_common_t
|
|
200 create_tree_common_ann (tree t)
|
|
201 {
|
|
202 tree_ann_common_t ann;
|
|
203
|
|
204 gcc_assert (t);
|
|
205 gcc_assert (!t->base.ann || t->base.ann->common.type == TREE_ANN_COMMON);
|
|
206
|
|
207 ann = GGC_CNEW (struct tree_ann_common_d);
|
|
208
|
|
209 ann->type = TREE_ANN_COMMON;
|
|
210 ann->rn = -1;
|
|
211 t->base.ann = (tree_ann_t) ann;
|
|
212
|
|
213 return ann;
|
|
214 }
|
|
215
|
|
216 /* Build a temporary. Make sure and register it to be renamed. */
|
|
217
|
|
218 tree
|
|
219 make_rename_temp (tree type, const char *prefix)
|
|
220 {
|
|
221 tree t = create_tmp_var (type, prefix);
|
|
222
|
|
223 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
|
|
224 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
|
|
225 DECL_GIMPLE_REG_P (t) = 1;
|
|
226
|
|
227 if (gimple_referenced_vars (cfun))
|
|
228 {
|
|
229 add_referenced_var (t);
|
|
230 mark_sym_for_renaming (t);
|
|
231 }
|
|
232
|
|
233 return t;
|
|
234 }
|
|
235
|
|
236
|
|
237
|
|
238 /*---------------------------------------------------------------------------
|
|
239 Debugging functions
|
|
240 ---------------------------------------------------------------------------*/
|
|
241 /* Dump the list of all the referenced variables in the current function to
|
|
242 FILE. */
|
|
243
|
|
244 void
|
|
245 dump_referenced_vars (FILE *file)
|
|
246 {
|
|
247 tree var;
|
|
248 referenced_var_iterator rvi;
|
|
249
|
|
250 fprintf (file, "\nReferenced variables in %s: %u\n\n",
|
|
251 get_name (current_function_decl), (unsigned) num_referenced_vars);
|
|
252
|
|
253 FOR_EACH_REFERENCED_VAR (var, rvi)
|
|
254 {
|
|
255 fprintf (file, "Variable: ");
|
|
256 dump_variable (file, var);
|
|
257 fprintf (file, "\n");
|
|
258 }
|
|
259 }
|
|
260
|
|
261
|
|
262 /* Dump the list of all the referenced variables to stderr. */
|
|
263
|
|
264 void
|
|
265 debug_referenced_vars (void)
|
|
266 {
|
|
267 dump_referenced_vars (stderr);
|
|
268 }
|
|
269
|
|
270
|
|
271 /* Dump variable VAR and its may-aliases to FILE. */
|
|
272
|
|
273 void
|
|
274 dump_variable (FILE *file, tree var)
|
|
275 {
|
|
276 var_ann_t ann;
|
|
277
|
|
278 if (TREE_CODE (var) == SSA_NAME)
|
|
279 {
|
|
280 if (POINTER_TYPE_P (TREE_TYPE (var)))
|
|
281 dump_points_to_info_for (file, var);
|
|
282 var = SSA_NAME_VAR (var);
|
|
283 }
|
|
284
|
|
285 if (var == NULL_TREE)
|
|
286 {
|
|
287 fprintf (file, "<nil>");
|
|
288 return;
|
|
289 }
|
|
290
|
|
291 print_generic_expr (file, var, dump_flags);
|
|
292
|
|
293 ann = var_ann (var);
|
|
294
|
|
295 fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
|
|
296
|
|
297 fprintf (file, ", ");
|
|
298 print_generic_expr (file, TREE_TYPE (var), dump_flags);
|
|
299
|
|
300 if (ann && ann->symbol_mem_tag)
|
|
301 {
|
|
302 fprintf (file, ", symbol memory tag: ");
|
|
303 print_generic_expr (file, ann->symbol_mem_tag, dump_flags);
|
|
304 }
|
|
305
|
|
306 if (TREE_ADDRESSABLE (var))
|
|
307 fprintf (file, ", is addressable");
|
|
308
|
|
309 if (is_global_var (var))
|
|
310 fprintf (file, ", is global");
|
|
311
|
|
312 if (TREE_THIS_VOLATILE (var))
|
|
313 fprintf (file, ", is volatile");
|
|
314
|
|
315 dump_mem_sym_stats_for_var (file, var);
|
|
316
|
|
317 if (is_call_clobbered (var))
|
|
318 {
|
|
319 const char *s = "";
|
|
320 var_ann_t va = var_ann (var);
|
|
321 unsigned int escape_mask = va->escape_mask;
|
|
322
|
|
323 fprintf (file, ", call clobbered");
|
|
324 fprintf (file, " (");
|
|
325 if (escape_mask & ESCAPE_STORED_IN_GLOBAL)
|
|
326 { fprintf (file, "%sstored in global", s); s = ", "; }
|
|
327 if (escape_mask & ESCAPE_TO_ASM)
|
|
328 { fprintf (file, "%sgoes through ASM", s); s = ", "; }
|
|
329 if (escape_mask & ESCAPE_TO_CALL)
|
|
330 { fprintf (file, "%spassed to call", s); s = ", "; }
|
|
331 if (escape_mask & ESCAPE_BAD_CAST)
|
|
332 { fprintf (file, "%sbad cast", s); s = ", "; }
|
|
333 if (escape_mask & ESCAPE_TO_RETURN)
|
|
334 { fprintf (file, "%sreturned from func", s); s = ", "; }
|
|
335 if (escape_mask & ESCAPE_TO_PURE_CONST)
|
|
336 { fprintf (file, "%spassed to pure/const", s); s = ", "; }
|
|
337 if (escape_mask & ESCAPE_IS_GLOBAL)
|
|
338 { fprintf (file, "%sis global var", s); s = ", "; }
|
|
339 if (escape_mask & ESCAPE_IS_PARM)
|
|
340 { fprintf (file, "%sis incoming pointer", s); s = ", "; }
|
|
341 if (escape_mask & ESCAPE_UNKNOWN)
|
|
342 { fprintf (file, "%sunknown escape", s); s = ", "; }
|
|
343 fprintf (file, ")");
|
|
344 }
|
|
345
|
|
346 if (ann->noalias_state == NO_ALIAS)
|
|
347 fprintf (file, ", NO_ALIAS (does not alias other NO_ALIAS symbols)");
|
|
348 else if (ann->noalias_state == NO_ALIAS_GLOBAL)
|
|
349 fprintf (file, ", NO_ALIAS_GLOBAL (does not alias other NO_ALIAS symbols"
|
|
350 " and global vars)");
|
|
351 else if (ann->noalias_state == NO_ALIAS_ANYTHING)
|
|
352 fprintf (file, ", NO_ALIAS_ANYTHING (does not alias any other symbols)");
|
|
353
|
|
354 if (gimple_default_def (cfun, var))
|
|
355 {
|
|
356 fprintf (file, ", default def: ");
|
|
357 print_generic_expr (file, gimple_default_def (cfun, var), dump_flags);
|
|
358 }
|
|
359
|
|
360 if (MTAG_P (var) && may_aliases (var))
|
|
361 {
|
|
362 fprintf (file, ", may aliases: ");
|
|
363 dump_may_aliases_for (file, var);
|
|
364 }
|
|
365
|
|
366 if (!is_gimple_reg (var))
|
|
367 {
|
|
368 if (memory_partition (var))
|
|
369 {
|
|
370 fprintf (file, ", belongs to partition: ");
|
|
371 print_generic_expr (file, memory_partition (var), dump_flags);
|
|
372 }
|
|
373
|
|
374 if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
|
|
375 {
|
|
376 fprintf (file, ", partition symbols: ");
|
|
377 dump_decl_set (file, MPT_SYMBOLS (var));
|
|
378 }
|
|
379 }
|
|
380
|
|
381 fprintf (file, "\n");
|
|
382 }
|
|
383
|
|
384
|
|
385 /* Dump variable VAR and its may-aliases to stderr. */
|
|
386
|
|
387 void
|
|
388 debug_variable (tree var)
|
|
389 {
|
|
390 dump_variable (stderr, var);
|
|
391 }
|
|
392
|
|
393
|
|
394 /* Dump various DFA statistics to FILE. */
|
|
395
|
|
396 void
|
|
397 dump_dfa_stats (FILE *file)
|
|
398 {
|
|
399 struct dfa_stats_d dfa_stats;
|
|
400
|
|
401 unsigned long size, total = 0;
|
|
402 const char * const fmt_str = "%-30s%-13s%12s\n";
|
|
403 const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n";
|
|
404 const char * const fmt_str_3 = "%-43s%11lu%c\n";
|
|
405 const char *funcname
|
|
406 = lang_hooks.decl_printable_name (current_function_decl, 2);
|
|
407
|
|
408 collect_dfa_stats (&dfa_stats);
|
|
409
|
|
410 fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
|
|
411
|
|
412 fprintf (file, "---------------------------------------------------------\n");
|
|
413 fprintf (file, fmt_str, "", " Number of ", "Memory");
|
|
414 fprintf (file, fmt_str, "", " instances ", "used ");
|
|
415 fprintf (file, "---------------------------------------------------------\n");
|
|
416
|
|
417 size = num_referenced_vars * sizeof (tree);
|
|
418 total += size;
|
|
419 fprintf (file, fmt_str_1, "Referenced variables", (unsigned long)num_referenced_vars,
|
|
420 SCALE (size), LABEL (size));
|
|
421
|
|
422 size = dfa_stats.num_var_anns * sizeof (struct var_ann_d);
|
|
423 total += size;
|
|
424 fprintf (file, fmt_str_1, "Variables annotated", dfa_stats.num_var_anns,
|
|
425 SCALE (size), LABEL (size));
|
|
426
|
|
427 size = dfa_stats.num_uses * sizeof (tree *);
|
|
428 total += size;
|
|
429 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
|
|
430 SCALE (size), LABEL (size));
|
|
431
|
|
432 size = dfa_stats.num_defs * sizeof (tree *);
|
|
433 total += size;
|
|
434 fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
|
|
435 SCALE (size), LABEL (size));
|
|
436
|
|
437 size = dfa_stats.num_vuses * sizeof (tree *);
|
|
438 total += size;
|
|
439 fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
|
|
440 SCALE (size), LABEL (size));
|
|
441
|
|
442 size = dfa_stats.num_vdefs * sizeof (tree *);
|
|
443 total += size;
|
|
444 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
|
|
445 SCALE (size), LABEL (size));
|
|
446
|
|
447 size = dfa_stats.num_phis * sizeof (struct gimple_statement_phi);
|
|
448 total += size;
|
|
449 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
|
|
450 SCALE (size), LABEL (size));
|
|
451
|
|
452 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
|
|
453 total += size;
|
|
454 fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
|
|
455 SCALE (size), LABEL (size));
|
|
456
|
|
457 fprintf (file, "---------------------------------------------------------\n");
|
|
458 fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data", SCALE (total),
|
|
459 LABEL (total));
|
|
460 fprintf (file, "---------------------------------------------------------\n");
|
|
461 fprintf (file, "\n");
|
|
462
|
|
463 if (dfa_stats.num_phis)
|
|
464 fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
|
|
465 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
|
|
466 (long) dfa_stats.max_num_phi_args);
|
|
467
|
|
468 fprintf (file, "\n");
|
|
469 }
|
|
470
|
|
471
|
|
472 /* Dump DFA statistics on stderr. */
|
|
473
|
|
474 void
|
|
475 debug_dfa_stats (void)
|
|
476 {
|
|
477 dump_dfa_stats (stderr);
|
|
478 }
|
|
479
|
|
480
|
|
481 /* Collect DFA statistics and store them in the structure pointed to by
|
|
482 DFA_STATS_P. */
|
|
483
|
|
484 static void
|
|
485 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
|
|
486 {
|
|
487 basic_block bb;
|
|
488 referenced_var_iterator vi;
|
|
489 tree var;
|
|
490
|
|
491 gcc_assert (dfa_stats_p);
|
|
492
|
|
493 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
|
|
494
|
|
495 /* Count all the variable annotations. */
|
|
496 FOR_EACH_REFERENCED_VAR (var, vi)
|
|
497 if (var_ann (var))
|
|
498 dfa_stats_p->num_var_anns++;
|
|
499
|
|
500 /* Walk all the statements in the function counting references. */
|
|
501 FOR_EACH_BB (bb)
|
|
502 {
|
|
503 gimple_stmt_iterator si;
|
|
504
|
|
505 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
|
|
506 {
|
|
507 gimple phi = gsi_stmt (si);
|
|
508 dfa_stats_p->num_phis++;
|
|
509 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
|
|
510 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
|
|
511 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
|
|
512 }
|
|
513
|
|
514 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
|
|
515 {
|
|
516 gimple stmt = gsi_stmt (si);
|
|
517 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
|
|
518 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
|
|
519 dfa_stats_p->num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
|
|
520 dfa_stats_p->num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
|
|
521 }
|
|
522 }
|
|
523 }
|
|
524
|
|
525
|
|
526 /*---------------------------------------------------------------------------
|
|
527 Miscellaneous helpers
|
|
528 ---------------------------------------------------------------------------*/
|
|
529 /* Callback for walk_tree. Used to collect variables referenced in
|
|
530 the function. */
|
|
531
|
|
532 static tree
|
|
533 find_vars_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
|
|
534 {
|
|
535 /* If we are reading the lto info back in, we need to rescan the
|
|
536 referenced vars. */
|
|
537 if (TREE_CODE (*tp) == SSA_NAME)
|
|
538 add_referenced_var (SSA_NAME_VAR (*tp));
|
|
539
|
|
540 /* If T is a regular variable that the optimizers are interested
|
|
541 in, add it to the list of variables. */
|
|
542 else if (SSA_VAR_P (*tp))
|
|
543 add_referenced_var (*tp);
|
|
544
|
|
545 /* Type, _DECL and constant nodes have no interesting children.
|
|
546 Ignore them. */
|
|
547 else if (IS_TYPE_OR_DECL_P (*tp) || CONSTANT_CLASS_P (*tp))
|
|
548 *walk_subtrees = 0;
|
|
549
|
|
550 return NULL_TREE;
|
|
551 }
|
|
552
|
|
553 /* Lookup UID in the referenced_vars hashtable and return the associated
|
|
554 variable. */
|
|
555
|
|
556 tree
|
|
557 referenced_var_lookup (unsigned int uid)
|
|
558 {
|
|
559 tree h;
|
|
560 struct tree_decl_minimal in;
|
|
561 in.uid = uid;
|
|
562 h = (tree) htab_find_with_hash (gimple_referenced_vars (cfun), &in, uid);
|
|
563 gcc_assert (h || uid == 0);
|
|
564 return h;
|
|
565 }
|
|
566
|
|
567 /* Check if TO is in the referenced_vars hash table and insert it if not.
|
|
568 Return true if it required insertion. */
|
|
569
|
|
570 bool
|
|
571 referenced_var_check_and_insert (tree to)
|
|
572 {
|
|
573 tree h, *loc;
|
|
574 struct tree_decl_minimal in;
|
|
575 unsigned int uid = DECL_UID (to);
|
|
576
|
|
577 in.uid = uid;
|
|
578 h = (tree) htab_find_with_hash (gimple_referenced_vars (cfun), &in, uid);
|
|
579 if (h)
|
|
580 {
|
|
581 /* DECL_UID has already been entered in the table. Verify that it is
|
|
582 the same entry as TO. See PR 27793. */
|
|
583 gcc_assert (h == to);
|
|
584 return false;
|
|
585 }
|
|
586
|
|
587 loc = (tree *) htab_find_slot_with_hash (gimple_referenced_vars (cfun),
|
|
588 &in, uid, INSERT);
|
|
589 *loc = to;
|
|
590 return true;
|
|
591 }
|
|
592
|
|
593 /* Lookup VAR UID in the default_defs hashtable and return the associated
|
|
594 variable. */
|
|
595
|
|
596 tree
|
|
597 gimple_default_def (struct function *fn, tree var)
|
|
598 {
|
|
599 struct tree_decl_minimal ind;
|
|
600 struct tree_ssa_name in;
|
|
601 gcc_assert (SSA_VAR_P (var));
|
|
602 in.var = (tree)&ind;
|
|
603 ind.uid = DECL_UID (var);
|
|
604 return (tree) htab_find_with_hash (DEFAULT_DEFS (fn), &in, DECL_UID (var));
|
|
605 }
|
|
606
|
|
607 /* Insert the pair VAR's UID, DEF into the default_defs hashtable. */
|
|
608
|
|
609 void
|
|
610 set_default_def (tree var, tree def)
|
|
611 {
|
|
612 struct tree_decl_minimal ind;
|
|
613 struct tree_ssa_name in;
|
|
614 void **loc;
|
|
615
|
|
616 gcc_assert (SSA_VAR_P (var));
|
|
617 in.var = (tree)&ind;
|
|
618 ind.uid = DECL_UID (var);
|
|
619 if (!def)
|
|
620 {
|
|
621 loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
|
|
622 DECL_UID (var), INSERT);
|
|
623 gcc_assert (*loc);
|
|
624 htab_remove_elt (DEFAULT_DEFS (cfun), *loc);
|
|
625 return;
|
|
626 }
|
|
627 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
|
|
628 loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
|
|
629 DECL_UID (var), INSERT);
|
|
630
|
|
631 /* Default definition might be changed by tail call optimization. */
|
|
632 if (*loc)
|
|
633 SSA_NAME_IS_DEFAULT_DEF (*(tree *) loc) = false;
|
|
634 *(tree *) loc = def;
|
|
635
|
|
636 /* Mark DEF as the default definition for VAR. */
|
|
637 SSA_NAME_IS_DEFAULT_DEF (def) = true;
|
|
638 }
|
|
639
|
|
640 /* Add VAR to the list of referenced variables if it isn't already there. */
|
|
641
|
|
642 bool
|
|
643 add_referenced_var (tree var)
|
|
644 {
|
|
645 var_ann_t v_ann;
|
|
646
|
|
647 v_ann = get_var_ann (var);
|
|
648 gcc_assert (DECL_P (var));
|
|
649
|
|
650 /* Insert VAR into the referenced_vars has table if it isn't present. */
|
|
651 if (referenced_var_check_and_insert (var))
|
|
652 {
|
|
653 /* This is the first time we found this variable, annotate it with
|
|
654 attributes that are intrinsic to the variable. */
|
|
655
|
|
656 /* Tag's don't have DECL_INITIAL. */
|
|
657 if (MTAG_P (var))
|
|
658 return true;
|
|
659
|
|
660 /* Scan DECL_INITIAL for pointer variables as they may contain
|
|
661 address arithmetic referencing the address of other
|
|
662 variables.
|
|
663 Even non-constant initializers need to be walked, because
|
|
664 IPA passes might prove that their are invariant later on. */
|
|
665 if (DECL_INITIAL (var)
|
|
666 /* Initializers of external variables are not useful to the
|
|
667 optimizers. */
|
|
668 && !DECL_EXTERNAL (var))
|
|
669 walk_tree (&DECL_INITIAL (var), find_vars_r, NULL, 0);
|
|
670
|
|
671 return true;
|
|
672 }
|
|
673
|
|
674 return false;
|
|
675 }
|
|
676
|
|
677 /* Remove VAR from the list. */
|
|
678
|
|
679 void
|
|
680 remove_referenced_var (tree var)
|
|
681 {
|
|
682 var_ann_t v_ann;
|
|
683 struct tree_decl_minimal in;
|
|
684 void **loc;
|
|
685 unsigned int uid = DECL_UID (var);
|
|
686
|
|
687 clear_call_clobbered (var);
|
|
688 bitmap_clear_bit (gimple_call_used_vars (cfun), uid);
|
|
689 if ((v_ann = var_ann (var)))
|
|
690 {
|
|
691 /* Preserve var_anns of globals, but clear their alias info. */
|
|
692 if (MTAG_P (var)
|
|
693 || (!TREE_STATIC (var) && !DECL_EXTERNAL (var)))
|
|
694 {
|
|
695 ggc_free (v_ann);
|
|
696 var->base.ann = NULL;
|
|
697 }
|
|
698 else
|
|
699 {
|
|
700 v_ann->mpt = NULL_TREE;
|
|
701 v_ann->symbol_mem_tag = NULL_TREE;
|
|
702 }
|
|
703 }
|
|
704 gcc_assert (DECL_P (var));
|
|
705 in.uid = uid;
|
|
706 loc = htab_find_slot_with_hash (gimple_referenced_vars (cfun), &in, uid,
|
|
707 NO_INSERT);
|
|
708 htab_clear_slot (gimple_referenced_vars (cfun), loc);
|
|
709 }
|
|
710
|
|
711
|
|
712 /* Return the virtual variable associated to the non-scalar variable VAR. */
|
|
713
|
|
714 tree
|
|
715 get_virtual_var (tree var)
|
|
716 {
|
|
717 STRIP_NOPS (var);
|
|
718
|
|
719 if (TREE_CODE (var) == SSA_NAME)
|
|
720 var = SSA_NAME_VAR (var);
|
|
721
|
|
722 while (TREE_CODE (var) == REALPART_EXPR || TREE_CODE (var) == IMAGPART_EXPR
|
|
723 || handled_component_p (var))
|
|
724 var = TREE_OPERAND (var, 0);
|
|
725
|
|
726 /* Treating GIMPLE registers as virtual variables makes no sense.
|
|
727 Also complain if we couldn't extract a _DECL out of the original
|
|
728 expression. */
|
|
729 gcc_assert (SSA_VAR_P (var));
|
|
730 gcc_assert (!is_gimple_reg (var));
|
|
731
|
|
732 return var;
|
|
733 }
|
|
734
|
|
735 /* Mark all the naked symbols in STMT for SSA renaming.
|
|
736
|
|
737 NOTE: This function should only be used for brand new statements.
|
|
738 If the caller is modifying an existing statement, it should use the
|
|
739 combination push_stmt_changes/pop_stmt_changes. */
|
|
740
|
|
741 void
|
|
742 mark_symbols_for_renaming (gimple stmt)
|
|
743 {
|
|
744 tree op;
|
|
745 ssa_op_iter iter;
|
|
746
|
|
747 update_stmt (stmt);
|
|
748
|
|
749 /* Mark all the operands for renaming. */
|
|
750 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_OPERANDS)
|
|
751 if (DECL_P (op))
|
|
752 mark_sym_for_renaming (op);
|
|
753 }
|
|
754
|
|
755
|
|
756 /* Find all variables within the gimplified statement that were not
|
|
757 previously visible to the function and add them to the referenced
|
|
758 variables list. */
|
|
759
|
|
760 static tree
|
|
761 find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
|
|
762 void *data ATTRIBUTE_UNUSED)
|
|
763 {
|
|
764 tree t = *tp;
|
|
765
|
|
766 if (TREE_CODE (t) == VAR_DECL && !var_ann (t))
|
|
767 {
|
|
768 add_referenced_var (t);
|
|
769 mark_sym_for_renaming (t);
|
|
770 }
|
|
771
|
|
772 if (IS_TYPE_OR_DECL_P (t))
|
|
773 *walk_subtrees = 0;
|
|
774
|
|
775 return NULL;
|
|
776 }
|
|
777
|
|
778
|
|
779 /* Find any new referenced variables in STMT. */
|
|
780
|
|
781 void
|
|
782 find_new_referenced_vars (gimple stmt)
|
|
783 {
|
|
784 walk_gimple_op (stmt, find_new_referenced_vars_1, NULL);
|
|
785 }
|
|
786
|
|
787
|
|
788 /* If EXP is a handled component reference for a structure, return the
|
|
789 base variable. The access range is delimited by bit positions *POFFSET and
|
|
790 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
|
|
791 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
|
|
792 and *PMAX_SIZE are equal, the access is non-variable. */
|
|
793
|
|
794 tree
|
|
795 get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
|
|
796 HOST_WIDE_INT *psize,
|
|
797 HOST_WIDE_INT *pmax_size)
|
|
798 {
|
|
799 HOST_WIDE_INT bitsize = -1;
|
|
800 HOST_WIDE_INT maxsize = -1;
|
|
801 tree size_tree = NULL_TREE;
|
|
802 HOST_WIDE_INT bit_offset = 0;
|
|
803 bool seen_variable_array_ref = false;
|
|
804
|
|
805 gcc_assert (!SSA_VAR_P (exp));
|
|
806
|
|
807 /* First get the final access size from just the outermost expression. */
|
|
808 if (TREE_CODE (exp) == COMPONENT_REF)
|
|
809 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
|
|
810 else if (TREE_CODE (exp) == BIT_FIELD_REF)
|
|
811 size_tree = TREE_OPERAND (exp, 1);
|
|
812 else
|
|
813 {
|
|
814 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
|
|
815 if (mode == BLKmode)
|
|
816 size_tree = TYPE_SIZE (TREE_TYPE (exp));
|
|
817 else
|
|
818 bitsize = GET_MODE_BITSIZE (mode);
|
|
819 }
|
|
820 if (size_tree != NULL_TREE)
|
|
821 {
|
|
822 if (! host_integerp (size_tree, 1))
|
|
823 bitsize = -1;
|
|
824 else
|
|
825 bitsize = TREE_INT_CST_LOW (size_tree);
|
|
826 }
|
|
827
|
|
828 /* Initially, maxsize is the same as the accessed element size.
|
|
829 In the following it will only grow (or become -1). */
|
|
830 maxsize = bitsize;
|
|
831
|
|
832 /* Compute cumulative bit-offset for nested component-refs and array-refs,
|
|
833 and find the ultimate containing object. */
|
|
834 while (1)
|
|
835 {
|
|
836 switch (TREE_CODE (exp))
|
|
837 {
|
|
838 case BIT_FIELD_REF:
|
|
839 bit_offset += tree_low_cst (TREE_OPERAND (exp, 2), 0);
|
|
840 break;
|
|
841
|
|
842 case COMPONENT_REF:
|
|
843 {
|
|
844 tree field = TREE_OPERAND (exp, 1);
|
|
845 tree this_offset = component_ref_field_offset (exp);
|
|
846
|
|
847 if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
|
|
848 {
|
|
849 HOST_WIDE_INT hthis_offset = tree_low_cst (this_offset, 0);
|
|
850
|
|
851 hthis_offset *= BITS_PER_UNIT;
|
|
852 bit_offset += hthis_offset;
|
|
853 bit_offset += tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
|
|
854 }
|
|
855 else
|
|
856 {
|
|
857 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
|
|
858 /* We need to adjust maxsize to the whole structure bitsize.
|
|
859 But we can subtract any constant offset seen so far,
|
|
860 because that would get us out of the structure otherwise. */
|
|
861 if (maxsize != -1 && csize && host_integerp (csize, 1))
|
|
862 maxsize = TREE_INT_CST_LOW (csize) - bit_offset;
|
|
863 else
|
|
864 maxsize = -1;
|
|
865 }
|
|
866 }
|
|
867 break;
|
|
868
|
|
869 case ARRAY_REF:
|
|
870 case ARRAY_RANGE_REF:
|
|
871 {
|
|
872 tree index = TREE_OPERAND (exp, 1);
|
|
873 tree low_bound = array_ref_low_bound (exp);
|
|
874 tree unit_size = array_ref_element_size (exp);
|
|
875
|
|
876 /* If the resulting bit-offset is constant, track it. */
|
|
877 if (host_integerp (index, 0)
|
|
878 && host_integerp (low_bound, 0)
|
|
879 && host_integerp (unit_size, 1))
|
|
880 {
|
|
881 HOST_WIDE_INT hindex = tree_low_cst (index, 0);
|
|
882
|
|
883 hindex -= tree_low_cst (low_bound, 0);
|
|
884 hindex *= tree_low_cst (unit_size, 1);
|
|
885 hindex *= BITS_PER_UNIT;
|
|
886 bit_offset += hindex;
|
|
887
|
|
888 /* An array ref with a constant index up in the structure
|
|
889 hierarchy will constrain the size of any variable array ref
|
|
890 lower in the access hierarchy. */
|
|
891 seen_variable_array_ref = false;
|
|
892 }
|
|
893 else
|
|
894 {
|
|
895 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
|
|
896 /* We need to adjust maxsize to the whole array bitsize.
|
|
897 But we can subtract any constant offset seen so far,
|
|
898 because that would get us outside of the array otherwise. */
|
|
899 if (maxsize != -1 && asize && host_integerp (asize, 1))
|
|
900 maxsize = TREE_INT_CST_LOW (asize) - bit_offset;
|
|
901 else
|
|
902 maxsize = -1;
|
|
903
|
|
904 /* Remember that we have seen an array ref with a variable
|
|
905 index. */
|
|
906 seen_variable_array_ref = true;
|
|
907 }
|
|
908 }
|
|
909 break;
|
|
910
|
|
911 case REALPART_EXPR:
|
|
912 break;
|
|
913
|
|
914 case IMAGPART_EXPR:
|
|
915 bit_offset += bitsize;
|
|
916 break;
|
|
917
|
|
918 case VIEW_CONVERT_EXPR:
|
|
919 /* ??? We probably should give up here and bail out. */
|
|
920 break;
|
|
921
|
|
922 default:
|
|
923 goto done;
|
|
924 }
|
|
925
|
|
926 exp = TREE_OPERAND (exp, 0);
|
|
927 }
|
|
928 done:
|
|
929
|
|
930 /* We need to deal with variable arrays ending structures such as
|
|
931 struct { int length; int a[1]; } x; x.a[d]
|
|
932 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
|
|
933 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
|
|
934 where we do not know maxsize for variable index accesses to
|
|
935 the array. The simplest way to conservatively deal with this
|
|
936 is to punt in the case that offset + maxsize reaches the
|
|
937 base type boundary. */
|
|
938 if (seen_variable_array_ref
|
|
939 && maxsize != -1
|
|
940 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
|
|
941 && bit_offset + maxsize
|
|
942 == (signed)TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))))
|
|
943 maxsize = -1;
|
|
944
|
|
945 /* ??? Due to negative offsets in ARRAY_REF we can end up with
|
|
946 negative bit_offset here. We might want to store a zero offset
|
|
947 in this case. */
|
|
948 *poffset = bit_offset;
|
|
949 *psize = bitsize;
|
|
950 *pmax_size = maxsize;
|
|
951
|
|
952 return exp;
|
|
953 }
|
|
954
|
|
955 /* Returns true if STMT references an SSA_NAME that has
|
|
956 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
|
|
957
|
|
958 bool
|
|
959 stmt_references_abnormal_ssa_name (gimple stmt)
|
|
960 {
|
|
961 ssa_op_iter oi;
|
|
962 use_operand_p use_p;
|
|
963
|
|
964 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
|
|
965 {
|
|
966 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
|
|
967 return true;
|
|
968 }
|
|
969
|
|
970 return false;
|
|
971 }
|
|
972
|
|
973 /* Return true, if the two memory references REF1 and REF2 may alias. */
|
|
974
|
|
975 bool
|
|
976 refs_may_alias_p (tree ref1, tree ref2)
|
|
977 {
|
|
978 tree base1, base2;
|
|
979 HOST_WIDE_INT offset1 = 0, offset2 = 0;
|
|
980 HOST_WIDE_INT size1 = -1, size2 = -1;
|
|
981 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
|
|
982 bool strict_aliasing_applies;
|
|
983
|
|
984 gcc_assert ((SSA_VAR_P (ref1)
|
|
985 || handled_component_p (ref1)
|
|
986 || INDIRECT_REF_P (ref1)
|
|
987 || TREE_CODE (ref1) == TARGET_MEM_REF)
|
|
988 && (SSA_VAR_P (ref2)
|
|
989 || handled_component_p (ref2)
|
|
990 || INDIRECT_REF_P (ref2)
|
|
991 || TREE_CODE (ref2) == TARGET_MEM_REF));
|
|
992
|
|
993 /* Defer to TBAA if possible. */
|
|
994 if (flag_strict_aliasing
|
|
995 && !alias_sets_conflict_p (get_alias_set (ref1), get_alias_set (ref2)))
|
|
996 return false;
|
|
997
|
|
998 /* Decompose the references into their base objects and the access. */
|
|
999 base1 = ref1;
|
|
1000 if (handled_component_p (ref1))
|
|
1001 base1 = get_ref_base_and_extent (ref1, &offset1, &size1, &max_size1);
|
|
1002 base2 = ref2;
|
|
1003 if (handled_component_p (ref2))
|
|
1004 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &max_size2);
|
|
1005
|
|
1006 /* If both references are based on different variables, they cannot alias.
|
|
1007 If both references are based on the same variable, they cannot alias if
|
|
1008 the accesses do not overlap. */
|
|
1009 if (SSA_VAR_P (base1)
|
|
1010 && SSA_VAR_P (base2))
|
|
1011 {
|
|
1012 if (!operand_equal_p (base1, base2, 0))
|
|
1013 return false;
|
|
1014 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
|
|
1015 }
|
|
1016
|
|
1017 /* If one base is a ref-all pointer weird things are allowed. */
|
|
1018 strict_aliasing_applies = (flag_strict_aliasing
|
|
1019 && (!INDIRECT_REF_P (base1)
|
|
1020 || get_alias_set (base1) != 0)
|
|
1021 && (!INDIRECT_REF_P (base2)
|
|
1022 || get_alias_set (base2) != 0));
|
|
1023
|
|
1024 /* If strict aliasing applies the only way to access a scalar variable
|
|
1025 is through a pointer dereference or through a union (gcc extension). */
|
|
1026 if (strict_aliasing_applies
|
|
1027 && ((SSA_VAR_P (ref2)
|
|
1028 && !AGGREGATE_TYPE_P (TREE_TYPE (ref2))
|
|
1029 && !INDIRECT_REF_P (ref1)
|
|
1030 && TREE_CODE (TREE_TYPE (base1)) != UNION_TYPE)
|
|
1031 || (SSA_VAR_P (ref1)
|
|
1032 && !AGGREGATE_TYPE_P (TREE_TYPE (ref1))
|
|
1033 && !INDIRECT_REF_P (ref2)
|
|
1034 && TREE_CODE (TREE_TYPE (base2)) != UNION_TYPE)))
|
|
1035 return false;
|
|
1036
|
|
1037 /* If both references are through the same type, or if strict aliasing
|
|
1038 doesn't apply they are through two same pointers, they do not alias
|
|
1039 if the accesses do not overlap. */
|
|
1040 if ((strict_aliasing_applies
|
|
1041 && (TYPE_MAIN_VARIANT (TREE_TYPE (base1))
|
|
1042 == TYPE_MAIN_VARIANT (TREE_TYPE (base2))))
|
|
1043 || (TREE_CODE (base1) == INDIRECT_REF
|
|
1044 && TREE_CODE (base2) == INDIRECT_REF
|
|
1045 && operand_equal_p (TREE_OPERAND (base1, 0),
|
|
1046 TREE_OPERAND (base2, 0), 0)))
|
|
1047 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
|
|
1048
|
|
1049 /* If both are component references through pointers try to find a
|
|
1050 common base and apply offset based disambiguation. This handles
|
|
1051 for example
|
|
1052 struct A { int i; int j; } *q;
|
|
1053 struct B { struct A a; int k; } *p;
|
|
1054 disambiguating q->i and p->a.j. */
|
|
1055 if (strict_aliasing_applies
|
|
1056 && (TREE_CODE (base1) == INDIRECT_REF
|
|
1057 || TREE_CODE (base2) == INDIRECT_REF)
|
|
1058 && handled_component_p (ref1)
|
|
1059 && handled_component_p (ref2))
|
|
1060 {
|
|
1061 tree *refp;
|
|
1062 /* Now search for the type of base1 in the access path of ref2. This
|
|
1063 would be a common base for doing offset based disambiguation on. */
|
|
1064 refp = &ref2;
|
|
1065 while (handled_component_p (*refp)
|
|
1066 /* Note that the following is only conservative if there are
|
|
1067 never copies of types appearing as sub-structures. */
|
|
1068 && (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
|
|
1069 != TYPE_MAIN_VARIANT (TREE_TYPE (base1))))
|
|
1070 refp = &TREE_OPERAND (*refp, 0);
|
|
1071 if (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
|
|
1072 == TYPE_MAIN_VARIANT (TREE_TYPE (base1)))
|
|
1073 {
|
|
1074 HOST_WIDE_INT offadj, sztmp, msztmp;
|
|
1075 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
|
|
1076 offset2 -= offadj;
|
|
1077 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
|
|
1078 }
|
|
1079 /* The other way around. */
|
|
1080 refp = &ref1;
|
|
1081 while (handled_component_p (*refp)
|
|
1082 && (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
|
|
1083 != TYPE_MAIN_VARIANT (TREE_TYPE (base2))))
|
|
1084 refp = &TREE_OPERAND (*refp, 0);
|
|
1085 if (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
|
|
1086 == TYPE_MAIN_VARIANT (TREE_TYPE (base2)))
|
|
1087 {
|
|
1088 HOST_WIDE_INT offadj, sztmp, msztmp;
|
|
1089 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
|
|
1090 offset1 -= offadj;
|
|
1091 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
|
|
1092 }
|
|
1093 /* If we can be sure to catch all equivalent types in the search
|
|
1094 for the common base then we could return false here. In that
|
|
1095 case we would be able to disambiguate q->i and p->k. */
|
|
1096 }
|
|
1097
|
|
1098 return true;
|
|
1099 }
|
|
1100
|
|
1101 /* Given a stmt STMT that references memory, return the single stmt
|
|
1102 that is reached by following the VUSE -> VDEF link. Returns
|
|
1103 NULL_TREE, if there is no single stmt that defines all VUSEs of
|
|
1104 STMT.
|
|
1105 Note that for a stmt with a single virtual operand this may return
|
|
1106 a PHI node as well. Note that if all VUSEs are default definitions
|
|
1107 this function will return an empty statement. */
|
|
1108
|
|
1109 gimple
|
|
1110 get_single_def_stmt (gimple stmt)
|
|
1111 {
|
|
1112 gimple def_stmt = NULL;
|
|
1113 tree use;
|
|
1114 ssa_op_iter iter;
|
|
1115
|
|
1116 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_VIRTUAL_USES)
|
|
1117 {
|
|
1118 gimple tmp = SSA_NAME_DEF_STMT (use);
|
|
1119
|
|
1120 /* ??? This is too simplistic for multiple virtual operands
|
|
1121 reaching different PHI nodes of the same basic blocks or for
|
|
1122 reaching all default definitions. */
|
|
1123 if (def_stmt
|
|
1124 && def_stmt != tmp
|
|
1125 && !(gimple_nop_p (def_stmt)
|
|
1126 && gimple_nop_p (tmp)))
|
|
1127 return NULL;
|
|
1128
|
|
1129 def_stmt = tmp;
|
|
1130 }
|
|
1131
|
|
1132 return def_stmt;
|
|
1133 }
|
|
1134
|
|
1135 /* Given a PHI node of virtual operands, tries to eliminate cyclic
|
|
1136 reached definitions if they do not alias REF and returns the
|
|
1137 defining statement of the single virtual operand that flows in
|
|
1138 from a non-backedge. Returns NULL_TREE if such statement within
|
|
1139 the above conditions cannot be found. */
|
|
1140
|
|
1141 gimple
|
|
1142 get_single_def_stmt_from_phi (tree ref, gimple phi)
|
|
1143 {
|
|
1144 tree def_arg = NULL_TREE;
|
|
1145 unsigned i;
|
|
1146
|
|
1147 /* Find the single PHI argument that is not flowing in from a
|
|
1148 back edge and verify that the loop-carried definitions do
|
|
1149 not alias the reference we look for. */
|
|
1150 for (i = 0; i < gimple_phi_num_args (phi); ++i)
|
|
1151 {
|
|
1152 tree arg = PHI_ARG_DEF (phi, i);
|
|
1153 gimple def_stmt;
|
|
1154
|
|
1155 if (!(gimple_phi_arg_edge (phi, i)->flags & EDGE_DFS_BACK))
|
|
1156 {
|
|
1157 /* Multiple non-back edges? Do not try to handle this. */
|
|
1158 if (def_arg)
|
|
1159 return NULL;
|
|
1160 def_arg = arg;
|
|
1161 continue;
|
|
1162 }
|
|
1163
|
|
1164 /* Follow the definitions back to the original PHI node. Bail
|
|
1165 out once a definition is found that may alias REF. */
|
|
1166 def_stmt = SSA_NAME_DEF_STMT (arg);
|
|
1167 do
|
|
1168 {
|
|
1169 if (!is_gimple_assign (def_stmt)
|
|
1170 || refs_may_alias_p (ref, gimple_assign_lhs (def_stmt)))
|
|
1171 return NULL;
|
|
1172 /* ??? This will only work, reaching the PHI node again if
|
|
1173 there is a single virtual operand on def_stmt. */
|
|
1174 def_stmt = get_single_def_stmt (def_stmt);
|
|
1175 if (!def_stmt)
|
|
1176 return NULL;
|
|
1177 }
|
|
1178 while (def_stmt != phi);
|
|
1179 }
|
|
1180
|
|
1181 return SSA_NAME_DEF_STMT (def_arg);
|
|
1182 }
|
|
1183
|
|
1184 /* Return the single reference statement defining all virtual uses
|
|
1185 on STMT or NULL_TREE, if there are multiple defining statements.
|
|
1186 Take into account only definitions that alias REF if following
|
|
1187 back-edges when looking through a loop PHI node. */
|
|
1188
|
|
1189 gimple
|
|
1190 get_single_def_stmt_with_phi (tree ref, gimple stmt)
|
|
1191 {
|
|
1192 switch (NUM_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES))
|
|
1193 {
|
|
1194 case 0:
|
|
1195 gcc_unreachable ();
|
|
1196
|
|
1197 case 1:
|
|
1198 {
|
|
1199 gimple def_stmt = SSA_NAME_DEF_STMT (SINGLE_SSA_TREE_OPERAND
|
|
1200 (stmt, SSA_OP_VIRTUAL_USES));
|
|
1201 /* We can handle lookups over PHI nodes only for a single
|
|
1202 virtual operand. */
|
|
1203 if (gimple_code (def_stmt) == GIMPLE_PHI)
|
|
1204 return get_single_def_stmt_from_phi (ref, def_stmt);
|
|
1205 return def_stmt;
|
|
1206 }
|
|
1207
|
|
1208 default:
|
|
1209 return get_single_def_stmt (stmt);
|
|
1210 }
|
|
1211 }
|