0
|
1 /* Miscellaneous SSA utility functions.
|
|
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
|
|
3 Free Software Foundation, Inc.
|
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify
|
|
8 it under the terms of the GNU General Public License as published by
|
|
9 the Free Software Foundation; either version 3, or (at your option)
|
|
10 any later version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful,
|
|
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
15 GNU General Public License for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21 #include "config.h"
|
|
22 #include "system.h"
|
|
23 #include "coretypes.h"
|
|
24 #include "tm.h"
|
|
25 #include "tree.h"
|
|
26 #include "flags.h"
|
|
27 #include "rtl.h"
|
|
28 #include "tm_p.h"
|
|
29 #include "ggc.h"
|
|
30 #include "langhooks.h"
|
|
31 #include "hard-reg-set.h"
|
|
32 #include "basic-block.h"
|
|
33 #include "output.h"
|
|
34 #include "expr.h"
|
|
35 #include "function.h"
|
|
36 #include "diagnostic.h"
|
|
37 #include "bitmap.h"
|
|
38 #include "pointer-set.h"
|
|
39 #include "tree-flow.h"
|
|
40 #include "gimple.h"
|
|
41 #include "tree-inline.h"
|
|
42 #include "varray.h"
|
|
43 #include "timevar.h"
|
|
44 #include "hashtab.h"
|
|
45 #include "tree-dump.h"
|
|
46 #include "tree-pass.h"
|
|
47 #include "toplev.h"
|
|
48
|
|
49 /* Pointer map of variable mappings, keyed by edge. */
|
|
50 static struct pointer_map_t *edge_var_maps;
|
|
51
|
|
52
|
|
53 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
|
|
54
|
|
55 void
|
|
56 redirect_edge_var_map_add (edge e, tree result, tree def)
|
|
57 {
|
|
58 void **slot;
|
|
59 edge_var_map_vector old_head, head;
|
|
60 edge_var_map new_node;
|
|
61
|
|
62 if (edge_var_maps == NULL)
|
|
63 edge_var_maps = pointer_map_create ();
|
|
64
|
|
65 slot = pointer_map_insert (edge_var_maps, e);
|
|
66 old_head = head = (edge_var_map_vector) *slot;
|
|
67 if (!head)
|
|
68 {
|
|
69 head = VEC_alloc (edge_var_map, heap, 5);
|
|
70 *slot = head;
|
|
71 }
|
|
72 new_node.def = def;
|
|
73 new_node.result = result;
|
|
74
|
|
75 VEC_safe_push (edge_var_map, heap, head, &new_node);
|
|
76 if (old_head != head)
|
|
77 {
|
|
78 /* The push did some reallocation. Update the pointer map. */
|
|
79 *slot = head;
|
|
80 }
|
|
81 }
|
|
82
|
|
83
|
|
84 /* Clear the var mappings in edge E. */
|
|
85
|
|
86 void
|
|
87 redirect_edge_var_map_clear (edge e)
|
|
88 {
|
|
89 void **slot;
|
|
90 edge_var_map_vector head;
|
|
91
|
|
92 if (!edge_var_maps)
|
|
93 return;
|
|
94
|
|
95 slot = pointer_map_contains (edge_var_maps, e);
|
|
96
|
|
97 if (slot)
|
|
98 {
|
|
99 head = (edge_var_map_vector) *slot;
|
|
100 VEC_free (edge_var_map, heap, head);
|
|
101 *slot = NULL;
|
|
102 }
|
|
103 }
|
|
104
|
|
105
|
|
106 /* Duplicate the redirected var mappings in OLDE in NEWE.
|
|
107
|
|
108 Since we can't remove a mapping, let's just duplicate it. This assumes a
|
|
109 pointer_map can have multiple edges mapping to the same var_map (many to
|
|
110 one mapping), since we don't remove the previous mappings. */
|
|
111
|
|
112 void
|
|
113 redirect_edge_var_map_dup (edge newe, edge olde)
|
|
114 {
|
|
115 void **new_slot, **old_slot;
|
|
116 edge_var_map_vector head;
|
|
117
|
|
118 if (!edge_var_maps)
|
|
119 return;
|
|
120
|
|
121 new_slot = pointer_map_insert (edge_var_maps, newe);
|
|
122 old_slot = pointer_map_contains (edge_var_maps, olde);
|
|
123 if (!old_slot)
|
|
124 return;
|
|
125 head = (edge_var_map_vector) *old_slot;
|
|
126
|
|
127 if (head)
|
|
128 *new_slot = VEC_copy (edge_var_map, heap, head);
|
|
129 else
|
|
130 *new_slot = VEC_alloc (edge_var_map, heap, 5);
|
|
131 }
|
|
132
|
|
133
|
|
134 /* Return the variable mappings for a given edge. If there is none, return
|
|
135 NULL. */
|
|
136
|
|
137 edge_var_map_vector
|
|
138 redirect_edge_var_map_vector (edge e)
|
|
139 {
|
|
140 void **slot;
|
|
141
|
|
142 /* Hey, what kind of idiot would... you'd be surprised. */
|
|
143 if (!edge_var_maps)
|
|
144 return NULL;
|
|
145
|
|
146 slot = pointer_map_contains (edge_var_maps, e);
|
|
147 if (!slot)
|
|
148 return NULL;
|
|
149
|
|
150 return (edge_var_map_vector) *slot;
|
|
151 }
|
|
152
|
|
153 /* Used by redirect_edge_var_map_destroy to free all memory. */
|
|
154
|
|
155 static bool
|
|
156 free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
|
|
157 void **value,
|
|
158 void *data ATTRIBUTE_UNUSED)
|
|
159 {
|
|
160 edge_var_map_vector head = (edge_var_map_vector) *value;
|
|
161 VEC_free (edge_var_map, heap, head);
|
|
162 return true;
|
|
163 }
|
|
164
|
|
165 /* Clear the edge variable mappings. */
|
|
166
|
|
167 void
|
|
168 redirect_edge_var_map_destroy (void)
|
|
169 {
|
|
170 if (edge_var_maps)
|
|
171 {
|
|
172 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
|
|
173 pointer_map_destroy (edge_var_maps);
|
|
174 edge_var_maps = NULL;
|
|
175 }
|
|
176 }
|
|
177
|
|
178
|
|
179 /* Remove the corresponding arguments from the PHI nodes in E's
|
|
180 destination block and redirect it to DEST. Return redirected edge.
|
|
181 The list of removed arguments is stored in a vector accessed
|
|
182 through edge_var_maps. */
|
|
183
|
|
184 edge
|
|
185 ssa_redirect_edge (edge e, basic_block dest)
|
|
186 {
|
|
187 gimple_stmt_iterator gsi;
|
|
188 gimple phi;
|
|
189
|
|
190 redirect_edge_var_map_clear (e);
|
|
191
|
|
192 /* Remove the appropriate PHI arguments in E's destination block. */
|
|
193 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
194 {
|
|
195 tree def;
|
|
196
|
|
197 phi = gsi_stmt (gsi);
|
|
198 def = gimple_phi_arg_def (phi, e->dest_idx);
|
|
199
|
|
200 if (def == NULL_TREE)
|
|
201 continue;
|
|
202
|
|
203 redirect_edge_var_map_add (e, gimple_phi_result (phi), def);
|
|
204 }
|
|
205
|
|
206 e = redirect_edge_succ_nodup (e, dest);
|
|
207
|
|
208 return e;
|
|
209 }
|
|
210
|
|
211
|
|
212 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
|
|
213 E->dest. */
|
|
214
|
|
215 void
|
|
216 flush_pending_stmts (edge e)
|
|
217 {
|
|
218 gimple phi;
|
|
219 edge_var_map_vector v;
|
|
220 edge_var_map *vm;
|
|
221 int i;
|
|
222 gimple_stmt_iterator gsi;
|
|
223
|
|
224 v = redirect_edge_var_map_vector (e);
|
|
225 if (!v)
|
|
226 return;
|
|
227
|
|
228 for (gsi = gsi_start_phis (e->dest), i = 0;
|
|
229 !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
|
|
230 gsi_next (&gsi), i++)
|
|
231 {
|
|
232 tree def;
|
|
233
|
|
234 phi = gsi_stmt (gsi);
|
|
235 def = redirect_edge_var_map_def (vm);
|
|
236 add_phi_arg (phi, def, e);
|
|
237 }
|
|
238
|
|
239 redirect_edge_var_map_clear (e);
|
|
240 }
|
|
241
|
|
242 /* Return true if SSA_NAME is malformed and mark it visited.
|
|
243
|
|
244 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
|
|
245 operand. */
|
|
246
|
|
247 static bool
|
|
248 verify_ssa_name (tree ssa_name, bool is_virtual)
|
|
249 {
|
|
250 if (TREE_CODE (ssa_name) != SSA_NAME)
|
|
251 {
|
|
252 error ("expected an SSA_NAME object");
|
|
253 return true;
|
|
254 }
|
|
255
|
|
256 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
|
|
257 {
|
|
258 error ("type mismatch between an SSA_NAME and its symbol");
|
|
259 return true;
|
|
260 }
|
|
261
|
|
262 if (SSA_NAME_IN_FREE_LIST (ssa_name))
|
|
263 {
|
|
264 error ("found an SSA_NAME that had been released into the free pool");
|
|
265 return true;
|
|
266 }
|
|
267
|
|
268 if (is_virtual && is_gimple_reg (ssa_name))
|
|
269 {
|
|
270 error ("found a virtual definition for a GIMPLE register");
|
|
271 return true;
|
|
272 }
|
|
273
|
|
274 if (!is_virtual && !is_gimple_reg (ssa_name))
|
|
275 {
|
|
276 error ("found a real definition for a non-register");
|
|
277 return true;
|
|
278 }
|
|
279
|
|
280 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
|
|
281 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
|
|
282 {
|
|
283 error ("found a default name with a non-empty defining statement");
|
|
284 return true;
|
|
285 }
|
|
286
|
|
287 return false;
|
|
288 }
|
|
289
|
|
290
|
|
291 /* Return true if the definition of SSA_NAME at block BB is malformed.
|
|
292
|
|
293 STMT is the statement where SSA_NAME is created.
|
|
294
|
|
295 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
|
|
296 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
|
|
297 it means that the block in that array slot contains the
|
|
298 definition of SSA_NAME.
|
|
299
|
|
300 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
|
|
301
|
|
302 static bool
|
|
303 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
|
|
304 gimple stmt, bool is_virtual)
|
|
305 {
|
|
306 if (verify_ssa_name (ssa_name, is_virtual))
|
|
307 goto err;
|
|
308
|
|
309 if (definition_block[SSA_NAME_VERSION (ssa_name)])
|
|
310 {
|
|
311 error ("SSA_NAME created in two different blocks %i and %i",
|
|
312 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
|
|
313 goto err;
|
|
314 }
|
|
315
|
|
316 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
|
|
317
|
|
318 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
|
|
319 {
|
|
320 error ("SSA_NAME_DEF_STMT is wrong");
|
|
321 fprintf (stderr, "Expected definition statement:\n");
|
|
322 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
|
|
323 fprintf (stderr, "\nActual definition statement:\n");
|
|
324 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
|
|
325 goto err;
|
|
326 }
|
|
327
|
|
328 return false;
|
|
329
|
|
330 err:
|
|
331 fprintf (stderr, "while verifying SSA_NAME ");
|
|
332 print_generic_expr (stderr, ssa_name, 0);
|
|
333 fprintf (stderr, " in statement\n");
|
|
334 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
|
|
335
|
|
336 return true;
|
|
337 }
|
|
338
|
|
339
|
|
340 /* Return true if the use of SSA_NAME at statement STMT in block BB is
|
|
341 malformed.
|
|
342
|
|
343 DEF_BB is the block where SSA_NAME was found to be created.
|
|
344
|
|
345 IDOM contains immediate dominator information for the flowgraph.
|
|
346
|
|
347 CHECK_ABNORMAL is true if the caller wants to check whether this use
|
|
348 is flowing through an abnormal edge (only used when checking PHI
|
|
349 arguments).
|
|
350
|
|
351 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
|
|
352 that are defined before STMT in basic block BB. */
|
|
353
|
|
354 static bool
|
|
355 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
|
|
356 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
|
|
357 {
|
|
358 bool err = false;
|
|
359 tree ssa_name = USE_FROM_PTR (use_p);
|
|
360
|
|
361 if (!TREE_VISITED (ssa_name))
|
|
362 if (verify_imm_links (stderr, ssa_name))
|
|
363 err = true;
|
|
364
|
|
365 TREE_VISITED (ssa_name) = 1;
|
|
366
|
|
367 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
|
|
368 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
|
|
369 ; /* Default definitions have empty statements. Nothing to do. */
|
|
370 else if (!def_bb)
|
|
371 {
|
|
372 error ("missing definition");
|
|
373 err = true;
|
|
374 }
|
|
375 else if (bb != def_bb
|
|
376 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
|
|
377 {
|
|
378 error ("definition in block %i does not dominate use in block %i",
|
|
379 def_bb->index, bb->index);
|
|
380 err = true;
|
|
381 }
|
|
382 else if (bb == def_bb
|
|
383 && names_defined_in_bb != NULL
|
|
384 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
|
|
385 {
|
|
386 error ("definition in block %i follows the use", def_bb->index);
|
|
387 err = true;
|
|
388 }
|
|
389
|
|
390 if (check_abnormal
|
|
391 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
|
|
392 {
|
|
393 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
|
|
394 err = true;
|
|
395 }
|
|
396
|
|
397 /* Make sure the use is in an appropriate list by checking the previous
|
|
398 element to make sure it's the same. */
|
|
399 if (use_p->prev == NULL)
|
|
400 {
|
|
401 error ("no immediate_use list");
|
|
402 err = true;
|
|
403 }
|
|
404 else
|
|
405 {
|
|
406 tree listvar;
|
|
407 if (use_p->prev->use == NULL)
|
|
408 listvar = use_p->prev->loc.ssa_name;
|
|
409 else
|
|
410 listvar = USE_FROM_PTR (use_p->prev);
|
|
411 if (listvar != ssa_name)
|
|
412 {
|
|
413 error ("wrong immediate use list");
|
|
414 err = true;
|
|
415 }
|
|
416 }
|
|
417
|
|
418 if (err)
|
|
419 {
|
|
420 fprintf (stderr, "for SSA_NAME: ");
|
|
421 print_generic_expr (stderr, ssa_name, TDF_VOPS);
|
|
422 fprintf (stderr, " in statement:\n");
|
|
423 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
|
|
424 }
|
|
425
|
|
426 return err;
|
|
427 }
|
|
428
|
|
429
|
|
430 /* Return true if any of the arguments for PHI node PHI at block BB is
|
|
431 malformed.
|
|
432
|
|
433 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
|
|
434 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
|
|
435 it means that the block in that array slot contains the
|
|
436 definition of SSA_NAME. */
|
|
437
|
|
438 static bool
|
|
439 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
|
|
440 {
|
|
441 edge e;
|
|
442 bool err = false;
|
|
443 size_t i, phi_num_args = gimple_phi_num_args (phi);
|
|
444
|
|
445 if (EDGE_COUNT (bb->preds) != phi_num_args)
|
|
446 {
|
|
447 error ("incoming edge count does not match number of PHI arguments");
|
|
448 err = true;
|
|
449 goto error;
|
|
450 }
|
|
451
|
|
452 for (i = 0; i < phi_num_args; i++)
|
|
453 {
|
|
454 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
|
|
455 tree op = USE_FROM_PTR (op_p);
|
|
456
|
|
457 e = EDGE_PRED (bb, i);
|
|
458
|
|
459 if (op == NULL_TREE)
|
|
460 {
|
|
461 error ("PHI argument is missing for edge %d->%d",
|
|
462 e->src->index,
|
|
463 e->dest->index);
|
|
464 err = true;
|
|
465 goto error;
|
|
466 }
|
|
467
|
|
468 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
|
|
469 {
|
|
470 error ("PHI argument is not SSA_NAME, or invariant");
|
|
471 err = true;
|
|
472 }
|
|
473
|
|
474 if (TREE_CODE (op) == SSA_NAME)
|
|
475 {
|
|
476 err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
|
|
477 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
|
|
478 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
|
|
479 }
|
|
480
|
|
481 if (e->dest != bb)
|
|
482 {
|
|
483 error ("wrong edge %d->%d for PHI argument",
|
|
484 e->src->index, e->dest->index);
|
|
485 err = true;
|
|
486 }
|
|
487
|
|
488 if (err)
|
|
489 {
|
|
490 fprintf (stderr, "PHI argument\n");
|
|
491 print_generic_stmt (stderr, op, TDF_VOPS);
|
|
492 goto error;
|
|
493 }
|
|
494 }
|
|
495
|
|
496 error:
|
|
497 if (err)
|
|
498 {
|
|
499 fprintf (stderr, "for PHI node\n");
|
|
500 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
501 }
|
|
502
|
|
503
|
|
504 return err;
|
|
505 }
|
|
506
|
|
507
|
|
508 static void
|
|
509 verify_flow_insensitive_alias_info (void)
|
|
510 {
|
|
511 tree var;
|
|
512 referenced_var_iterator rvi;
|
|
513
|
|
514 FOR_EACH_REFERENCED_VAR (var, rvi)
|
|
515 {
|
|
516 unsigned int j;
|
|
517 bitmap aliases;
|
|
518 tree alias;
|
|
519 bitmap_iterator bi;
|
|
520
|
|
521 if (!MTAG_P (var) || !MTAG_ALIASES (var))
|
|
522 continue;
|
|
523
|
|
524 aliases = MTAG_ALIASES (var);
|
|
525
|
|
526 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, j, bi)
|
|
527 {
|
|
528 alias = referenced_var (j);
|
|
529
|
|
530 if (TREE_CODE (alias) != MEMORY_PARTITION_TAG
|
|
531 && !may_be_aliased (alias))
|
|
532 {
|
|
533 error ("non-addressable variable inside an alias set");
|
|
534 debug_variable (alias);
|
|
535 goto err;
|
|
536 }
|
|
537 }
|
|
538 }
|
|
539
|
|
540 return;
|
|
541
|
|
542 err:
|
|
543 debug_variable (var);
|
|
544 internal_error ("verify_flow_insensitive_alias_info failed");
|
|
545 }
|
|
546
|
|
547
|
|
548 static void
|
|
549 verify_flow_sensitive_alias_info (void)
|
|
550 {
|
|
551 size_t i;
|
|
552 tree ptr;
|
|
553
|
|
554 for (i = 1; i < num_ssa_names; i++)
|
|
555 {
|
|
556 tree var;
|
|
557 var_ann_t ann;
|
|
558 struct ptr_info_def *pi;
|
|
559
|
|
560
|
|
561 ptr = ssa_name (i);
|
|
562 if (!ptr)
|
|
563 continue;
|
|
564
|
|
565 /* We only care for pointers that are actually referenced in the
|
|
566 program. */
|
|
567 if (!POINTER_TYPE_P (TREE_TYPE (ptr)) || !TREE_VISITED (ptr))
|
|
568 continue;
|
|
569
|
|
570 /* RESULT_DECL is special. If it's a GIMPLE register, then it
|
|
571 is only written-to only once in the return statement.
|
|
572 Otherwise, aggregate RESULT_DECLs may be written-to more than
|
|
573 once in virtual operands. */
|
|
574 var = SSA_NAME_VAR (ptr);
|
|
575 if (TREE_CODE (var) == RESULT_DECL
|
|
576 && is_gimple_reg (ptr))
|
|
577 continue;
|
|
578
|
|
579 pi = SSA_NAME_PTR_INFO (ptr);
|
|
580 if (pi == NULL)
|
|
581 continue;
|
|
582
|
|
583 ann = var_ann (var);
|
|
584 if (pi->memory_tag_needed && !pi->name_mem_tag && !ann->symbol_mem_tag)
|
|
585 {
|
|
586 error ("dereferenced pointers should have a name or a symbol tag");
|
|
587 goto err;
|
|
588 }
|
|
589
|
|
590 if (pi->name_mem_tag
|
|
591 && (pi->pt_vars == NULL || bitmap_empty_p (pi->pt_vars)))
|
|
592 {
|
|
593 error ("pointers with a memory tag, should have points-to sets");
|
|
594 goto err;
|
|
595 }
|
|
596
|
|
597 if (pi->value_escapes_p
|
|
598 && pi->escape_mask & ~ESCAPE_TO_RETURN
|
|
599 && pi->name_mem_tag)
|
|
600 {
|
|
601 tree t = memory_partition (pi->name_mem_tag);
|
|
602 if (t == NULL_TREE)
|
|
603 t = pi->name_mem_tag;
|
|
604
|
|
605 if (!is_call_clobbered (t))
|
|
606 {
|
|
607 error ("pointer escapes but its name tag is not call-clobbered");
|
|
608 goto err;
|
|
609 }
|
|
610 }
|
|
611 }
|
|
612
|
|
613 return;
|
|
614
|
|
615 err:
|
|
616 debug_variable (ptr);
|
|
617 internal_error ("verify_flow_sensitive_alias_info failed");
|
|
618 }
|
|
619
|
|
620
|
|
621 /* Verify the consistency of call clobbering information. */
|
|
622
|
|
623 static void
|
|
624 verify_call_clobbering (void)
|
|
625 {
|
|
626 unsigned int i;
|
|
627 bitmap_iterator bi;
|
|
628 tree var;
|
|
629 referenced_var_iterator rvi;
|
|
630
|
|
631 /* At all times, the result of the call_clobbered flag should
|
|
632 match the result of the call_clobbered_vars bitmap. Verify both
|
|
633 that everything in call_clobbered_vars is marked
|
|
634 call_clobbered, and that everything marked
|
|
635 call_clobbered is in call_clobbered_vars. */
|
|
636 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
|
|
637 {
|
|
638 var = referenced_var (i);
|
|
639
|
|
640 if (memory_partition (var))
|
|
641 var = memory_partition (var);
|
|
642
|
|
643 if (!MTAG_P (var) && !var_ann (var)->call_clobbered)
|
|
644 {
|
|
645 error ("variable in call_clobbered_vars but not marked "
|
|
646 "call_clobbered");
|
|
647 debug_variable (var);
|
|
648 goto err;
|
|
649 }
|
|
650 }
|
|
651
|
|
652 FOR_EACH_REFERENCED_VAR (var, rvi)
|
|
653 {
|
|
654 if (is_gimple_reg (var))
|
|
655 continue;
|
|
656
|
|
657 if (memory_partition (var))
|
|
658 var = memory_partition (var);
|
|
659
|
|
660 if (!MTAG_P (var)
|
|
661 && var_ann (var)->call_clobbered
|
|
662 && !bitmap_bit_p (gimple_call_clobbered_vars (cfun), DECL_UID (var)))
|
|
663 {
|
|
664 error ("variable marked call_clobbered but not in "
|
|
665 "call_clobbered_vars bitmap.");
|
|
666 debug_variable (var);
|
|
667 goto err;
|
|
668 }
|
|
669 }
|
|
670
|
|
671 return;
|
|
672
|
|
673 err:
|
|
674 internal_error ("verify_call_clobbering failed");
|
|
675 }
|
|
676
|
|
677
|
|
678 /* Verify invariants in memory partitions. */
|
|
679
|
|
680 static void
|
|
681 verify_memory_partitions (void)
|
|
682 {
|
|
683 unsigned i;
|
|
684 tree mpt;
|
|
685 VEC(tree,heap) *mpt_table = gimple_ssa_operands (cfun)->mpt_table;
|
|
686 struct pointer_set_t *partitioned_syms = pointer_set_create ();
|
|
687
|
|
688 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
|
|
689 {
|
|
690 unsigned j;
|
|
691 bitmap_iterator bj;
|
|
692
|
|
693 if (MPT_SYMBOLS (mpt) == NULL)
|
|
694 {
|
|
695 error ("Memory partitions should have at least one symbol");
|
|
696 debug_variable (mpt);
|
|
697 goto err;
|
|
698 }
|
|
699
|
|
700 EXECUTE_IF_SET_IN_BITMAP (MPT_SYMBOLS (mpt), 0, j, bj)
|
|
701 {
|
|
702 tree var = referenced_var (j);
|
|
703 if (pointer_set_insert (partitioned_syms, var))
|
|
704 {
|
|
705 error ("Partitioned symbols should belong to exactly one "
|
|
706 "partition");
|
|
707 debug_variable (var);
|
|
708 goto err;
|
|
709 }
|
|
710 }
|
|
711 }
|
|
712
|
|
713 pointer_set_destroy (partitioned_syms);
|
|
714
|
|
715 return;
|
|
716
|
|
717 err:
|
|
718 internal_error ("verify_memory_partitions failed");
|
|
719 }
|
|
720
|
|
721
|
|
722 /* Verify the consistency of aliasing information. */
|
|
723
|
|
724 static void
|
|
725 verify_alias_info (void)
|
|
726 {
|
|
727 verify_flow_sensitive_alias_info ();
|
|
728 verify_call_clobbering ();
|
|
729 verify_flow_insensitive_alias_info ();
|
|
730 verify_memory_partitions ();
|
|
731 }
|
|
732
|
|
733
|
|
734 /* Verify common invariants in the SSA web.
|
|
735 TODO: verify the variable annotations. */
|
|
736
|
|
737 void
|
|
738 verify_ssa (bool check_modified_stmt)
|
|
739 {
|
|
740 size_t i;
|
|
741 basic_block bb;
|
|
742 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
|
|
743 ssa_op_iter iter;
|
|
744 tree op;
|
|
745 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
|
|
746 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
|
|
747
|
|
748 gcc_assert (!need_ssa_update_p ());
|
|
749
|
|
750 verify_stmts ();
|
|
751
|
|
752 timevar_push (TV_TREE_SSA_VERIFY);
|
|
753
|
|
754 /* Keep track of SSA names present in the IL. */
|
|
755 for (i = 1; i < num_ssa_names; i++)
|
|
756 {
|
|
757 tree name = ssa_name (i);
|
|
758 if (name)
|
|
759 {
|
|
760 gimple stmt;
|
|
761 TREE_VISITED (name) = 0;
|
|
762
|
|
763 stmt = SSA_NAME_DEF_STMT (name);
|
|
764 if (!gimple_nop_p (stmt))
|
|
765 {
|
|
766 basic_block bb = gimple_bb (stmt);
|
|
767 verify_def (bb, definition_block,
|
|
768 name, stmt, !is_gimple_reg (name));
|
|
769
|
|
770 }
|
|
771 }
|
|
772 }
|
|
773
|
|
774 calculate_dominance_info (CDI_DOMINATORS);
|
|
775
|
|
776 /* Now verify all the uses and make sure they agree with the definitions
|
|
777 found in the previous pass. */
|
|
778 FOR_EACH_BB (bb)
|
|
779 {
|
|
780 edge e;
|
|
781 gimple phi;
|
|
782 edge_iterator ei;
|
|
783 gimple_stmt_iterator gsi;
|
|
784
|
|
785 /* Make sure that all edges have a clear 'aux' field. */
|
|
786 FOR_EACH_EDGE (e, ei, bb->preds)
|
|
787 {
|
|
788 if (e->aux)
|
|
789 {
|
|
790 error ("AUX pointer initialized for edge %d->%d", e->src->index,
|
|
791 e->dest->index);
|
|
792 goto err;
|
|
793 }
|
|
794 }
|
|
795
|
|
796 /* Verify the arguments for every PHI node in the block. */
|
|
797 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
798 {
|
|
799 phi = gsi_stmt (gsi);
|
|
800 if (verify_phi_args (phi, bb, definition_block))
|
|
801 goto err;
|
|
802
|
|
803 bitmap_set_bit (names_defined_in_bb,
|
|
804 SSA_NAME_VERSION (gimple_phi_result (phi)));
|
|
805 }
|
|
806
|
|
807 /* Now verify all the uses and vuses in every statement of the block. */
|
|
808 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
809 {
|
|
810 gimple stmt = gsi_stmt (gsi);
|
|
811 use_operand_p use_p;
|
|
812
|
|
813 if (check_modified_stmt && gimple_modified_p (stmt))
|
|
814 {
|
|
815 error ("stmt (%p) marked modified after optimization pass: ",
|
|
816 (void *)stmt);
|
|
817 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
|
|
818 goto err;
|
|
819 }
|
|
820
|
|
821 if (is_gimple_assign (stmt)
|
|
822 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
|
|
823 {
|
|
824 tree lhs, base_address;
|
|
825
|
|
826 lhs = gimple_assign_lhs (stmt);
|
|
827 base_address = get_base_address (lhs);
|
|
828
|
|
829 if (base_address
|
|
830 && gimple_aliases_computed_p (cfun)
|
|
831 && SSA_VAR_P (base_address)
|
|
832 && !gimple_has_volatile_ops (stmt)
|
|
833 && ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
|
|
834 {
|
|
835 error ("statement makes a memory store, but has no VDEFS");
|
|
836 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
|
|
837 goto err;
|
|
838 }
|
|
839 }
|
|
840
|
|
841 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_VIRTUALS)
|
|
842 {
|
|
843 if (verify_ssa_name (op, true))
|
|
844 {
|
|
845 error ("in statement");
|
|
846 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
847 goto err;
|
|
848 }
|
|
849 }
|
|
850
|
|
851 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
|
|
852 {
|
|
853 if (verify_ssa_name (op, false))
|
|
854 {
|
|
855 error ("in statement");
|
|
856 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
857 goto err;
|
|
858 }
|
|
859 }
|
|
860
|
|
861 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
|
|
862 {
|
|
863 op = USE_FROM_PTR (use_p);
|
|
864 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
|
|
865 use_p, stmt, false, names_defined_in_bb))
|
|
866 goto err;
|
|
867 }
|
|
868
|
|
869 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
|
|
870 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
|
|
871 }
|
|
872
|
|
873 bitmap_clear (names_defined_in_bb);
|
|
874 }
|
|
875
|
|
876 /* Finally, verify alias information. */
|
|
877 if (gimple_aliases_computed_p (cfun))
|
|
878 verify_alias_info ();
|
|
879
|
|
880 free (definition_block);
|
|
881
|
|
882 /* Restore the dominance information to its prior known state, so
|
|
883 that we do not perturb the compiler's subsequent behavior. */
|
|
884 if (orig_dom_state == DOM_NONE)
|
|
885 free_dominance_info (CDI_DOMINATORS);
|
|
886 else
|
|
887 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
|
|
888
|
|
889 BITMAP_FREE (names_defined_in_bb);
|
|
890 timevar_pop (TV_TREE_SSA_VERIFY);
|
|
891 return;
|
|
892
|
|
893 err:
|
|
894 internal_error ("verify_ssa failed");
|
|
895 }
|
|
896
|
|
897 /* Return true if the uid in both int tree maps are equal. */
|
|
898
|
|
899 int
|
|
900 int_tree_map_eq (const void *va, const void *vb)
|
|
901 {
|
|
902 const struct int_tree_map *a = (const struct int_tree_map *) va;
|
|
903 const struct int_tree_map *b = (const struct int_tree_map *) vb;
|
|
904 return (a->uid == b->uid);
|
|
905 }
|
|
906
|
|
907 /* Hash a UID in a int_tree_map. */
|
|
908
|
|
909 unsigned int
|
|
910 int_tree_map_hash (const void *item)
|
|
911 {
|
|
912 return ((const struct int_tree_map *)item)->uid;
|
|
913 }
|
|
914
|
|
915 /* Return true if the DECL_UID in both trees are equal. */
|
|
916
|
|
917 int
|
|
918 uid_decl_map_eq (const void *va, const void *vb)
|
|
919 {
|
|
920 const_tree a = (const_tree) va;
|
|
921 const_tree b = (const_tree) vb;
|
|
922 return (a->decl_minimal.uid == b->decl_minimal.uid);
|
|
923 }
|
|
924
|
|
925 /* Hash a tree in a uid_decl_map. */
|
|
926
|
|
927 unsigned int
|
|
928 uid_decl_map_hash (const void *item)
|
|
929 {
|
|
930 return ((const_tree)item)->decl_minimal.uid;
|
|
931 }
|
|
932
|
|
933 /* Return true if the DECL_UID in both trees are equal. */
|
|
934
|
|
935 static int
|
|
936 uid_ssaname_map_eq (const void *va, const void *vb)
|
|
937 {
|
|
938 const_tree a = (const_tree) va;
|
|
939 const_tree b = (const_tree) vb;
|
|
940 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
|
|
941 }
|
|
942
|
|
943 /* Hash a tree in a uid_decl_map. */
|
|
944
|
|
945 static unsigned int
|
|
946 uid_ssaname_map_hash (const void *item)
|
|
947 {
|
|
948 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
|
|
949 }
|
|
950
|
|
951
|
|
952 /* Initialize global DFA and SSA structures. */
|
|
953
|
|
954 void
|
|
955 init_tree_ssa (struct function *fn)
|
|
956 {
|
|
957 fn->gimple_df = GGC_CNEW (struct gimple_df);
|
|
958 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
|
|
959 uid_decl_map_eq, NULL);
|
|
960 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
|
|
961 uid_ssaname_map_eq, NULL);
|
|
962 fn->gimple_df->call_clobbered_vars = BITMAP_GGC_ALLOC ();
|
|
963 fn->gimple_df->call_used_vars = BITMAP_GGC_ALLOC ();
|
|
964 fn->gimple_df->addressable_vars = BITMAP_GGC_ALLOC ();
|
|
965 init_ssanames (fn, 0);
|
|
966 init_phinodes ();
|
|
967 }
|
|
968
|
|
969
|
|
970 /* Deallocate memory associated with SSA data structures for FNDECL. */
|
|
971
|
|
972 void
|
|
973 delete_tree_ssa (void)
|
|
974 {
|
|
975 size_t i;
|
|
976 basic_block bb;
|
|
977 gimple_stmt_iterator gsi;
|
|
978 referenced_var_iterator rvi;
|
|
979 tree var;
|
|
980
|
|
981 /* Release any ssa_names still in use. */
|
|
982 for (i = 0; i < num_ssa_names; i++)
|
|
983 {
|
|
984 tree var = ssa_name (i);
|
|
985 if (var && TREE_CODE (var) == SSA_NAME)
|
|
986 {
|
|
987 SSA_NAME_IMM_USE_NODE (var).prev = &(SSA_NAME_IMM_USE_NODE (var));
|
|
988 SSA_NAME_IMM_USE_NODE (var).next = &(SSA_NAME_IMM_USE_NODE (var));
|
|
989 }
|
|
990 release_ssa_name (var);
|
|
991 }
|
|
992
|
|
993 /* FIXME. This may not be necessary. We will release all this
|
|
994 memory en masse in free_ssa_operands. This clearing used to be
|
|
995 necessary to avoid problems with the inliner, but it may not be
|
|
996 needed anymore. */
|
|
997 FOR_EACH_BB (bb)
|
|
998 {
|
|
999 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1000 {
|
|
1001 gimple stmt = gsi_stmt (gsi);
|
|
1002
|
|
1003 if (gimple_has_ops (stmt))
|
|
1004 {
|
|
1005 gimple_set_def_ops (stmt, NULL);
|
|
1006 gimple_set_use_ops (stmt, NULL);
|
|
1007 gimple_set_addresses_taken (stmt, NULL);
|
|
1008 }
|
|
1009
|
|
1010 if (gimple_has_mem_ops (stmt))
|
|
1011 {
|
|
1012 gimple_set_vdef_ops (stmt, NULL);
|
|
1013 gimple_set_vuse_ops (stmt, NULL);
|
|
1014 BITMAP_FREE (stmt->gsmem.membase.stores);
|
|
1015 BITMAP_FREE (stmt->gsmem.membase.loads);
|
|
1016 }
|
|
1017
|
|
1018 gimple_set_modified (stmt, true);
|
|
1019 }
|
|
1020 set_phi_nodes (bb, NULL);
|
|
1021 }
|
|
1022
|
|
1023 /* Remove annotations from every referenced local variable. */
|
|
1024 FOR_EACH_REFERENCED_VAR (var, rvi)
|
|
1025 {
|
|
1026 if (!MTAG_P (var)
|
|
1027 && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
|
|
1028 {
|
|
1029 var_ann (var)->mpt = NULL_TREE;
|
|
1030 var_ann (var)->symbol_mem_tag = NULL_TREE;
|
|
1031 continue;
|
|
1032 }
|
|
1033 if (var->base.ann)
|
|
1034 ggc_free (var->base.ann);
|
|
1035 var->base.ann = NULL;
|
|
1036 }
|
|
1037 htab_delete (gimple_referenced_vars (cfun));
|
|
1038 cfun->gimple_df->referenced_vars = NULL;
|
|
1039
|
|
1040 fini_ssanames ();
|
|
1041 fini_phinodes ();
|
|
1042
|
|
1043 /* We no longer maintain the SSA operand cache at this point. */
|
|
1044 if (ssa_operands_active ())
|
|
1045 fini_ssa_operands ();
|
|
1046
|
|
1047 cfun->gimple_df->global_var = NULL_TREE;
|
|
1048
|
|
1049 htab_delete (cfun->gimple_df->default_defs);
|
|
1050 cfun->gimple_df->default_defs = NULL;
|
|
1051 cfun->gimple_df->call_clobbered_vars = NULL;
|
|
1052 cfun->gimple_df->call_used_vars = NULL;
|
|
1053 cfun->gimple_df->addressable_vars = NULL;
|
|
1054 cfun->gimple_df->modified_noreturn_calls = NULL;
|
|
1055 if (gimple_aliases_computed_p (cfun))
|
|
1056 {
|
|
1057 delete_alias_heapvars ();
|
|
1058 gcc_assert (!need_ssa_update_p ());
|
|
1059 }
|
|
1060 cfun->gimple_df->aliases_computed_p = false;
|
|
1061 delete_mem_ref_stats (cfun);
|
|
1062
|
|
1063 cfun->gimple_df = NULL;
|
|
1064
|
|
1065 /* We no longer need the edge variable maps. */
|
|
1066 redirect_edge_var_map_destroy ();
|
|
1067 }
|
|
1068
|
|
1069 /* Helper function for useless_type_conversion_p. */
|
|
1070
|
|
1071 static bool
|
|
1072 useless_type_conversion_p_1 (tree outer_type, tree inner_type)
|
|
1073 {
|
|
1074 /* Do the following before stripping toplevel qualifiers. */
|
|
1075 if (POINTER_TYPE_P (inner_type)
|
|
1076 && POINTER_TYPE_P (outer_type))
|
|
1077 {
|
|
1078 /* Do not lose casts to restrict qualified pointers. */
|
|
1079 if ((TYPE_RESTRICT (outer_type)
|
|
1080 != TYPE_RESTRICT (inner_type))
|
|
1081 && TYPE_RESTRICT (outer_type))
|
|
1082 return false;
|
|
1083 }
|
|
1084
|
|
1085 /* From now on qualifiers on value types do not matter. */
|
|
1086 inner_type = TYPE_MAIN_VARIANT (inner_type);
|
|
1087 outer_type = TYPE_MAIN_VARIANT (outer_type);
|
|
1088
|
|
1089 if (inner_type == outer_type)
|
|
1090 return true;
|
|
1091
|
|
1092 /* If we know the canonical types, compare them. */
|
|
1093 if (TYPE_CANONICAL (inner_type)
|
|
1094 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
|
|
1095 return true;
|
|
1096
|
|
1097 /* Changes in machine mode are never useless conversions. */
|
|
1098 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
|
|
1099 return false;
|
|
1100
|
|
1101 /* If both the inner and outer types are integral types, then the
|
|
1102 conversion is not necessary if they have the same mode and
|
|
1103 signedness and precision, and both or neither are boolean. */
|
|
1104 if (INTEGRAL_TYPE_P (inner_type)
|
|
1105 && INTEGRAL_TYPE_P (outer_type))
|
|
1106 {
|
|
1107 /* Preserve changes in signedness or precision. */
|
|
1108 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
|
|
1109 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
|
|
1110 return false;
|
|
1111
|
|
1112 /* Conversions from a non-base to a base type are not useless.
|
|
1113 This way we preserve the invariant to do arithmetic in
|
|
1114 base types only. */
|
|
1115 if (TREE_TYPE (inner_type)
|
|
1116 && TREE_TYPE (inner_type) != inner_type
|
|
1117 && (TREE_TYPE (outer_type) == outer_type
|
|
1118 || TREE_TYPE (outer_type) == NULL_TREE))
|
|
1119 return false;
|
|
1120
|
|
1121 /* We don't need to preserve changes in the types minimum or
|
|
1122 maximum value in general as these do not generate code
|
|
1123 unless the types precisions are different. */
|
|
1124
|
|
1125 return true;
|
|
1126 }
|
|
1127
|
|
1128 /* Scalar floating point types with the same mode are compatible. */
|
|
1129 else if (SCALAR_FLOAT_TYPE_P (inner_type)
|
|
1130 && SCALAR_FLOAT_TYPE_P (outer_type))
|
|
1131 return true;
|
|
1132
|
|
1133 /* We need to take special care recursing to pointed-to types. */
|
|
1134 else if (POINTER_TYPE_P (inner_type)
|
|
1135 && POINTER_TYPE_P (outer_type))
|
|
1136 {
|
|
1137 /* Don't lose casts between pointers to volatile and non-volatile
|
|
1138 qualified types. Doing so would result in changing the semantics
|
|
1139 of later accesses. For function types the volatile qualifier
|
|
1140 is used to indicate noreturn functions. */
|
|
1141 if (TREE_CODE (TREE_TYPE (outer_type)) != FUNCTION_TYPE
|
|
1142 && TREE_CODE (TREE_TYPE (outer_type)) != METHOD_TYPE
|
|
1143 && TREE_CODE (TREE_TYPE (inner_type)) != FUNCTION_TYPE
|
|
1144 && TREE_CODE (TREE_TYPE (inner_type)) != METHOD_TYPE
|
|
1145 && (TYPE_VOLATILE (TREE_TYPE (outer_type))
|
|
1146 != TYPE_VOLATILE (TREE_TYPE (inner_type)))
|
|
1147 && TYPE_VOLATILE (TREE_TYPE (outer_type)))
|
|
1148 return false;
|
|
1149
|
|
1150 /* Do not lose casts between pointers with different
|
|
1151 TYPE_REF_CAN_ALIAS_ALL setting or alias sets. */
|
|
1152 if ((TYPE_REF_CAN_ALIAS_ALL (inner_type)
|
|
1153 != TYPE_REF_CAN_ALIAS_ALL (outer_type))
|
|
1154 || (get_alias_set (TREE_TYPE (inner_type))
|
|
1155 != get_alias_set (TREE_TYPE (outer_type))))
|
|
1156 return false;
|
|
1157
|
|
1158 /* We do not care for const qualification of the pointed-to types
|
|
1159 as const qualification has no semantic value to the middle-end. */
|
|
1160
|
|
1161 /* Otherwise pointers/references are equivalent if their pointed
|
|
1162 to types are effectively the same. We can strip qualifiers
|
|
1163 on pointed-to types for further comparison, which is done in
|
|
1164 the callee. */
|
|
1165 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
|
|
1166 TREE_TYPE (inner_type));
|
|
1167 }
|
|
1168
|
|
1169 /* Recurse for complex types. */
|
|
1170 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
|
|
1171 && TREE_CODE (outer_type) == COMPLEX_TYPE)
|
|
1172 return useless_type_conversion_p (TREE_TYPE (outer_type),
|
|
1173 TREE_TYPE (inner_type));
|
|
1174
|
|
1175 /* Recurse for vector types with the same number of subparts. */
|
|
1176 else if (TREE_CODE (inner_type) == VECTOR_TYPE
|
|
1177 && TREE_CODE (outer_type) == VECTOR_TYPE
|
|
1178 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
|
|
1179 return useless_type_conversion_p (TREE_TYPE (outer_type),
|
|
1180 TREE_TYPE (inner_type));
|
|
1181
|
|
1182 /* For aggregates we may need to fall back to structural equality
|
|
1183 checks. */
|
|
1184 else if (AGGREGATE_TYPE_P (inner_type)
|
|
1185 && AGGREGATE_TYPE_P (outer_type))
|
|
1186 {
|
|
1187 /* Different types of aggregates are incompatible. */
|
|
1188 if (TREE_CODE (inner_type) != TREE_CODE (outer_type))
|
|
1189 return false;
|
|
1190
|
|
1191 /* ??? This seems to be necessary even for aggregates that don't
|
|
1192 have TYPE_STRUCTURAL_EQUALITY_P set. */
|
|
1193
|
|
1194 /* ??? This should eventually just return false. */
|
|
1195 return lang_hooks.types_compatible_p (inner_type, outer_type);
|
|
1196 }
|
|
1197 /* Also for functions and possibly other types with
|
|
1198 TYPE_STRUCTURAL_EQUALITY_P set. */
|
|
1199 else if (TYPE_STRUCTURAL_EQUALITY_P (inner_type)
|
|
1200 && TYPE_STRUCTURAL_EQUALITY_P (outer_type))
|
|
1201 return lang_hooks.types_compatible_p (inner_type, outer_type);
|
|
1202
|
|
1203 return false;
|
|
1204 }
|
|
1205
|
|
1206 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
|
|
1207 useless type conversion, otherwise return false.
|
|
1208
|
|
1209 This function implicitly defines the middle-end type system. With
|
|
1210 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
|
|
1211 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
|
|
1212 the following invariants shall be fulfilled:
|
|
1213
|
|
1214 1) useless_type_conversion_p is transitive.
|
|
1215 If a < b and b < c then a < c.
|
|
1216
|
|
1217 2) useless_type_conversion_p is not symmetric.
|
|
1218 From a < b does not follow a > b.
|
|
1219
|
|
1220 3) Types define the available set of operations applicable to values.
|
|
1221 A type conversion is useless if the operations for the target type
|
|
1222 is a subset of the operations for the source type. For example
|
|
1223 casts to void* are useless, casts from void* are not (void* can't
|
|
1224 be dereferenced or offsetted, but copied, hence its set of operations
|
|
1225 is a strict subset of that of all other data pointer types). Casts
|
|
1226 to const T* are useless (can't be written to), casts from const T*
|
|
1227 to T* are not. */
|
|
1228
|
|
1229 bool
|
|
1230 useless_type_conversion_p (tree outer_type, tree inner_type)
|
|
1231 {
|
|
1232 /* If the outer type is (void *), then the conversion is not
|
|
1233 necessary. We have to make sure to not apply this while
|
|
1234 recursing though. */
|
|
1235 if (POINTER_TYPE_P (inner_type)
|
|
1236 && POINTER_TYPE_P (outer_type)
|
|
1237 && TREE_CODE (TREE_TYPE (outer_type)) == VOID_TYPE)
|
|
1238 return true;
|
|
1239
|
|
1240 return useless_type_conversion_p_1 (outer_type, inner_type);
|
|
1241 }
|
|
1242
|
|
1243 /* Return true if a conversion from either type of TYPE1 and TYPE2
|
|
1244 to the other is not required. Otherwise return false. */
|
|
1245
|
|
1246 bool
|
|
1247 types_compatible_p (tree type1, tree type2)
|
|
1248 {
|
|
1249 return (type1 == type2
|
|
1250 || (useless_type_conversion_p (type1, type2)
|
|
1251 && useless_type_conversion_p (type2, type1)));
|
|
1252 }
|
|
1253
|
|
1254 /* Return true if EXPR is a useless type conversion, otherwise return
|
|
1255 false. */
|
|
1256
|
|
1257 bool
|
|
1258 tree_ssa_useless_type_conversion (tree expr)
|
|
1259 {
|
|
1260 /* If we have an assignment that merely uses a NOP_EXPR to change
|
|
1261 the top of the RHS to the type of the LHS and the type conversion
|
|
1262 is "safe", then strip away the type conversion so that we can
|
|
1263 enter LHS = RHS into the const_and_copies table. */
|
|
1264 if (CONVERT_EXPR_P (expr)
|
|
1265 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
|
|
1266 || TREE_CODE (expr) == NON_LVALUE_EXPR)
|
|
1267 return useless_type_conversion_p
|
|
1268 (TREE_TYPE (expr),
|
|
1269 TREE_TYPE (TREE_OPERAND (expr, 0)));
|
|
1270
|
|
1271 return false;
|
|
1272 }
|
|
1273
|
|
1274
|
|
1275 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
|
|
1276 described in walk_use_def_chains.
|
|
1277
|
|
1278 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
|
|
1279 infinite loops. We used to have a bitmap for this to just mark
|
|
1280 SSA versions we had visited. But non-sparse bitmaps are way too
|
|
1281 expensive, while sparse bitmaps may cause quadratic behavior.
|
|
1282
|
|
1283 IS_DFS is true if the caller wants to perform a depth-first search
|
|
1284 when visiting PHI nodes. A DFS will visit each PHI argument and
|
|
1285 call FN after each one. Otherwise, all the arguments are
|
|
1286 visited first and then FN is called with each of the visited
|
|
1287 arguments in a separate pass. */
|
|
1288
|
|
1289 static bool
|
|
1290 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
|
|
1291 struct pointer_set_t *visited, bool is_dfs)
|
|
1292 {
|
|
1293 gimple def_stmt;
|
|
1294
|
|
1295 if (pointer_set_insert (visited, var))
|
|
1296 return false;
|
|
1297
|
|
1298 def_stmt = SSA_NAME_DEF_STMT (var);
|
|
1299
|
|
1300 if (gimple_code (def_stmt) != GIMPLE_PHI)
|
|
1301 {
|
|
1302 /* If we reached the end of the use-def chain, call FN. */
|
|
1303 return fn (var, def_stmt, data);
|
|
1304 }
|
|
1305 else
|
|
1306 {
|
|
1307 size_t i;
|
|
1308
|
|
1309 /* When doing a breadth-first search, call FN before following the
|
|
1310 use-def links for each argument. */
|
|
1311 if (!is_dfs)
|
|
1312 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
|
|
1313 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
|
|
1314 return true;
|
|
1315
|
|
1316 /* Follow use-def links out of each PHI argument. */
|
|
1317 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
|
|
1318 {
|
|
1319 tree arg = gimple_phi_arg_def (def_stmt, i);
|
|
1320
|
|
1321 /* ARG may be NULL for newly introduced PHI nodes. */
|
|
1322 if (arg
|
|
1323 && TREE_CODE (arg) == SSA_NAME
|
|
1324 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
|
|
1325 return true;
|
|
1326 }
|
|
1327
|
|
1328 /* When doing a depth-first search, call FN after following the
|
|
1329 use-def links for each argument. */
|
|
1330 if (is_dfs)
|
|
1331 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
|
|
1332 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
|
|
1333 return true;
|
|
1334 }
|
|
1335
|
|
1336 return false;
|
|
1337 }
|
|
1338
|
|
1339
|
|
1340
|
|
1341 /* Walk use-def chains starting at the SSA variable VAR. Call
|
|
1342 function FN at each reaching definition found. FN takes three
|
|
1343 arguments: VAR, its defining statement (DEF_STMT) and a generic
|
|
1344 pointer to whatever state information that FN may want to maintain
|
|
1345 (DATA). FN is able to stop the walk by returning true, otherwise
|
|
1346 in order to continue the walk, FN should return false.
|
|
1347
|
|
1348 Note, that if DEF_STMT is a PHI node, the semantics are slightly
|
|
1349 different. The first argument to FN is no longer the original
|
|
1350 variable VAR, but the PHI argument currently being examined. If FN
|
|
1351 wants to get at VAR, it should call PHI_RESULT (PHI).
|
|
1352
|
|
1353 If IS_DFS is true, this function will:
|
|
1354
|
|
1355 1- walk the use-def chains for all the PHI arguments, and,
|
|
1356 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
|
|
1357
|
|
1358 If IS_DFS is false, the two steps above are done in reverse order
|
|
1359 (i.e., a breadth-first search). */
|
|
1360
|
|
1361 void
|
|
1362 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
|
|
1363 bool is_dfs)
|
|
1364 {
|
|
1365 gimple def_stmt;
|
|
1366
|
|
1367 gcc_assert (TREE_CODE (var) == SSA_NAME);
|
|
1368
|
|
1369 def_stmt = SSA_NAME_DEF_STMT (var);
|
|
1370
|
|
1371 /* We only need to recurse if the reaching definition comes from a PHI
|
|
1372 node. */
|
|
1373 if (gimple_code (def_stmt) != GIMPLE_PHI)
|
|
1374 (*fn) (var, def_stmt, data);
|
|
1375 else
|
|
1376 {
|
|
1377 struct pointer_set_t *visited = pointer_set_create ();
|
|
1378 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
|
|
1379 pointer_set_destroy (visited);
|
|
1380 }
|
|
1381 }
|
|
1382
|
|
1383
|
|
1384 /* Return true if T, an SSA_NAME, has an undefined value. */
|
|
1385
|
|
1386 bool
|
|
1387 ssa_undefined_value_p (tree t)
|
|
1388 {
|
|
1389 tree var = SSA_NAME_VAR (t);
|
|
1390
|
|
1391 /* Parameters get their initial value from the function entry. */
|
|
1392 if (TREE_CODE (var) == PARM_DECL)
|
|
1393 return false;
|
|
1394
|
|
1395 /* Hard register variables get their initial value from the ether. */
|
|
1396 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
|
|
1397 return false;
|
|
1398
|
|
1399 /* The value is undefined iff its definition statement is empty. */
|
|
1400 return gimple_nop_p (SSA_NAME_DEF_STMT (t));
|
|
1401 }
|
|
1402
|
|
1403 /* Emit warnings for uninitialized variables. This is done in two passes.
|
|
1404
|
|
1405 The first pass notices real uses of SSA names with undefined values.
|
|
1406 Such uses are unconditionally uninitialized, and we can be certain that
|
|
1407 such a use is a mistake. This pass is run before most optimizations,
|
|
1408 so that we catch as many as we can.
|
|
1409
|
|
1410 The second pass follows PHI nodes to find uses that are potentially
|
|
1411 uninitialized. In this case we can't necessarily prove that the use
|
|
1412 is really uninitialized. This pass is run after most optimizations,
|
|
1413 so that we thread as many jumps and possible, and delete as much dead
|
|
1414 code as possible, in order to reduce false positives. We also look
|
|
1415 again for plain uninitialized variables, since optimization may have
|
|
1416 changed conditionally uninitialized to unconditionally uninitialized. */
|
|
1417
|
|
1418 /* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
|
|
1419 warning text is in MSGID and LOCUS may contain a location or be null. */
|
|
1420
|
|
1421 static void
|
|
1422 warn_uninit (tree t, const char *gmsgid, void *data)
|
|
1423 {
|
|
1424 tree var = SSA_NAME_VAR (t);
|
|
1425 gimple context = (gimple) data;
|
|
1426 location_t location;
|
|
1427 expanded_location xloc, floc;
|
|
1428
|
|
1429 if (!ssa_undefined_value_p (t))
|
|
1430 return;
|
|
1431
|
|
1432 /* TREE_NO_WARNING either means we already warned, or the front end
|
|
1433 wishes to suppress the warning. */
|
|
1434 if (TREE_NO_WARNING (var))
|
|
1435 return;
|
|
1436
|
|
1437 /* Do not warn if it can be initialized outside this module. */
|
|
1438 if (is_global_var (var))
|
|
1439 return;
|
|
1440
|
|
1441 location = (context != NULL && gimple_has_location (context))
|
|
1442 ? gimple_location (context)
|
|
1443 : DECL_SOURCE_LOCATION (var);
|
|
1444 xloc = expand_location (location);
|
|
1445 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
|
|
1446 if (warning_at (location, OPT_Wuninitialized, gmsgid, var))
|
|
1447 {
|
|
1448 TREE_NO_WARNING (var) = 1;
|
|
1449
|
|
1450 if (xloc.file != floc.file
|
|
1451 || xloc.line < floc.line
|
|
1452 || xloc.line > LOCATION_LINE (cfun->function_end_locus))
|
|
1453 inform (input_location, "%J%qD was declared here", var, var);
|
|
1454 }
|
|
1455 }
|
|
1456
|
|
1457 struct walk_data {
|
|
1458 gimple stmt;
|
|
1459 bool always_executed;
|
|
1460 bool warn_possibly_uninitialized;
|
|
1461 };
|
|
1462
|
|
1463 /* Called via walk_tree, look for SSA_NAMEs that have empty definitions
|
|
1464 and warn about them. */
|
|
1465
|
|
1466 static tree
|
|
1467 warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
|
|
1468 {
|
|
1469 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
|
|
1470 struct walk_data *data = (struct walk_data *) wi->info;
|
|
1471 tree t = *tp;
|
|
1472
|
|
1473 /* We do not care about LHS. */
|
|
1474 if (wi->is_lhs)
|
|
1475 return NULL_TREE;
|
|
1476
|
|
1477 switch (TREE_CODE (t))
|
|
1478 {
|
|
1479 case ADDR_EXPR:
|
|
1480 /* Taking the address of an uninitialized variable does not
|
|
1481 count as using it. */
|
|
1482 *walk_subtrees = 0;
|
|
1483 break;
|
|
1484
|
|
1485 case VAR_DECL:
|
|
1486 {
|
|
1487 /* A VAR_DECL in the RHS of a gimple statement may mean that
|
|
1488 this variable is loaded from memory. */
|
|
1489 use_operand_p vuse;
|
|
1490 tree op;
|
|
1491
|
|
1492 /* If there is not gimple stmt,
|
|
1493 or alias information has not been computed,
|
|
1494 then we cannot check VUSE ops. */
|
|
1495 if (data->stmt == NULL
|
|
1496 || !gimple_aliases_computed_p (cfun))
|
|
1497 return NULL_TREE;
|
|
1498
|
|
1499 /* If the load happens as part of a call do not warn about it. */
|
|
1500 if (is_gimple_call (data->stmt))
|
|
1501 return NULL_TREE;
|
|
1502
|
|
1503 vuse = SINGLE_SSA_USE_OPERAND (data->stmt, SSA_OP_VUSE);
|
|
1504 if (vuse == NULL_USE_OPERAND_P)
|
|
1505 return NULL_TREE;
|
|
1506
|
|
1507 op = USE_FROM_PTR (vuse);
|
|
1508 if (t != SSA_NAME_VAR (op)
|
|
1509 || !SSA_NAME_IS_DEFAULT_DEF (op))
|
|
1510 return NULL_TREE;
|
|
1511 /* If this is a VUSE of t and it is the default definition,
|
|
1512 then warn about op. */
|
|
1513 t = op;
|
|
1514 /* Fall through into SSA_NAME. */
|
|
1515 }
|
|
1516
|
|
1517 case SSA_NAME:
|
|
1518 /* We only do data flow with SSA_NAMEs, so that's all we
|
|
1519 can warn about. */
|
|
1520 if (data->always_executed)
|
|
1521 warn_uninit (t, "%qD is used uninitialized in this function",
|
|
1522 data->stmt);
|
|
1523 else if (data->warn_possibly_uninitialized)
|
|
1524 warn_uninit (t, "%qD may be used uninitialized in this function",
|
|
1525 data->stmt);
|
|
1526 *walk_subtrees = 0;
|
|
1527 break;
|
|
1528
|
|
1529 case REALPART_EXPR:
|
|
1530 case IMAGPART_EXPR:
|
|
1531 /* The total store transformation performed during gimplification
|
|
1532 creates uninitialized variable uses. If all is well, these will
|
|
1533 be optimized away, so don't warn now. */
|
|
1534 if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
|
|
1535 *walk_subtrees = 0;
|
|
1536 break;
|
|
1537
|
|
1538 default:
|
|
1539 if (IS_TYPE_OR_DECL_P (t))
|
|
1540 *walk_subtrees = 0;
|
|
1541 break;
|
|
1542 }
|
|
1543
|
|
1544 return NULL_TREE;
|
|
1545 }
|
|
1546
|
|
1547 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
|
|
1548 and warn about them. */
|
|
1549
|
|
1550 static void
|
|
1551 warn_uninitialized_phi (gimple phi)
|
|
1552 {
|
|
1553 size_t i, n = gimple_phi_num_args (phi);
|
|
1554
|
|
1555 /* Don't look at memory tags. */
|
|
1556 if (!is_gimple_reg (gimple_phi_result (phi)))
|
|
1557 return;
|
|
1558
|
|
1559 for (i = 0; i < n; ++i)
|
|
1560 {
|
|
1561 tree op = gimple_phi_arg_def (phi, i);
|
|
1562 if (TREE_CODE (op) == SSA_NAME)
|
|
1563 warn_uninit (op, "%qD may be used uninitialized in this function",
|
|
1564 NULL);
|
|
1565 }
|
|
1566 }
|
|
1567
|
|
1568 static unsigned int
|
|
1569 warn_uninitialized_vars (bool warn_possibly_uninitialized)
|
|
1570 {
|
|
1571 gimple_stmt_iterator gsi;
|
|
1572 basic_block bb;
|
|
1573 struct walk_data data;
|
|
1574
|
|
1575 data.warn_possibly_uninitialized = warn_possibly_uninitialized;
|
|
1576
|
|
1577 calculate_dominance_info (CDI_POST_DOMINATORS);
|
|
1578
|
|
1579 FOR_EACH_BB (bb)
|
|
1580 {
|
|
1581 data.always_executed = dominated_by_p (CDI_POST_DOMINATORS,
|
|
1582 single_succ (ENTRY_BLOCK_PTR), bb);
|
|
1583 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1584 {
|
|
1585 struct walk_stmt_info wi;
|
|
1586 data.stmt = gsi_stmt (gsi);
|
|
1587 memset (&wi, 0, sizeof (wi));
|
|
1588 wi.info = &data;
|
|
1589 walk_gimple_op (gsi_stmt (gsi), warn_uninitialized_var, &wi);
|
|
1590 }
|
|
1591 }
|
|
1592
|
|
1593 /* Post-dominator information can not be reliably updated. Free it
|
|
1594 after the use. */
|
|
1595
|
|
1596 free_dominance_info (CDI_POST_DOMINATORS);
|
|
1597 return 0;
|
|
1598 }
|
|
1599
|
|
1600 static unsigned int
|
|
1601 execute_early_warn_uninitialized (void)
|
|
1602 {
|
|
1603 /* Currently, this pass runs always but
|
|
1604 execute_late_warn_uninitialized only runs with optimization. With
|
|
1605 optimization we want to warn about possible uninitialized as late
|
|
1606 as possible, thus don't do it here. However, without
|
|
1607 optimization we need to warn here about "may be uninitialized".
|
|
1608 */
|
|
1609 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
|
|
1610 return 0;
|
|
1611 }
|
|
1612
|
|
1613 static unsigned int
|
|
1614 execute_late_warn_uninitialized (void)
|
|
1615 {
|
|
1616 basic_block bb;
|
|
1617 gimple_stmt_iterator gsi;
|
|
1618
|
|
1619 /* Re-do the plain uninitialized variable check, as optimization may have
|
|
1620 straightened control flow. Do this first so that we don't accidentally
|
|
1621 get a "may be" warning when we'd have seen an "is" warning later. */
|
|
1622 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/1);
|
|
1623
|
|
1624 FOR_EACH_BB (bb)
|
|
1625 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1626 warn_uninitialized_phi (gsi_stmt (gsi));
|
|
1627
|
|
1628 return 0;
|
|
1629 }
|
|
1630
|
|
1631 static bool
|
|
1632 gate_warn_uninitialized (void)
|
|
1633 {
|
|
1634 return warn_uninitialized != 0;
|
|
1635 }
|
|
1636
|
|
1637 struct gimple_opt_pass pass_early_warn_uninitialized =
|
|
1638 {
|
|
1639 {
|
|
1640 GIMPLE_PASS,
|
|
1641 NULL, /* name */
|
|
1642 gate_warn_uninitialized, /* gate */
|
|
1643 execute_early_warn_uninitialized, /* execute */
|
|
1644 NULL, /* sub */
|
|
1645 NULL, /* next */
|
|
1646 0, /* static_pass_number */
|
|
1647 0, /* tv_id */
|
|
1648 PROP_ssa, /* properties_required */
|
|
1649 0, /* properties_provided */
|
|
1650 0, /* properties_destroyed */
|
|
1651 0, /* todo_flags_start */
|
|
1652 0 /* todo_flags_finish */
|
|
1653 }
|
|
1654 };
|
|
1655
|
|
1656 struct gimple_opt_pass pass_late_warn_uninitialized =
|
|
1657 {
|
|
1658 {
|
|
1659 GIMPLE_PASS,
|
|
1660 NULL, /* name */
|
|
1661 gate_warn_uninitialized, /* gate */
|
|
1662 execute_late_warn_uninitialized, /* execute */
|
|
1663 NULL, /* sub */
|
|
1664 NULL, /* next */
|
|
1665 0, /* static_pass_number */
|
|
1666 0, /* tv_id */
|
|
1667 PROP_ssa, /* properties_required */
|
|
1668 0, /* properties_provided */
|
|
1669 0, /* properties_destroyed */
|
|
1670 0, /* todo_flags_start */
|
|
1671 0 /* todo_flags_finish */
|
|
1672 }
|
|
1673 };
|
|
1674
|
|
1675 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
|
|
1676
|
|
1677 static unsigned int
|
|
1678 execute_update_addresses_taken (void)
|
|
1679 {
|
|
1680 tree var;
|
|
1681 referenced_var_iterator rvi;
|
|
1682 gimple_stmt_iterator gsi;
|
|
1683 basic_block bb;
|
|
1684 bitmap addresses_taken = BITMAP_ALLOC (NULL);
|
|
1685 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
|
|
1686 bitmap vars_updated = BITMAP_ALLOC (NULL);
|
|
1687 bool update_vops = false;
|
|
1688
|
|
1689 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
|
|
1690 the function body. */
|
|
1691 FOR_EACH_BB (bb)
|
|
1692 {
|
|
1693 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1694 {
|
|
1695 const_gimple stmt = gsi_stmt (gsi);
|
|
1696 enum gimple_code code = gimple_code (stmt);
|
|
1697 bitmap taken = gimple_addresses_taken (stmt);
|
|
1698
|
|
1699 if (taken)
|
|
1700 bitmap_ior_into (addresses_taken, taken);
|
|
1701
|
|
1702 /* If we have a call or an assignment, see if the lhs contains
|
|
1703 a local decl that requires not to be a gimple register. */
|
|
1704 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
|
|
1705 {
|
|
1706 tree lhs = gimple_get_lhs (stmt);
|
|
1707 /* A plain decl does not need it set. */
|
|
1708 if (lhs && handled_component_p (lhs))
|
|
1709 {
|
|
1710 var = get_base_address (lhs);
|
|
1711 if (DECL_P (var))
|
|
1712 bitmap_set_bit (not_reg_needs, DECL_UID (var));
|
|
1713 }
|
|
1714 }
|
|
1715 }
|
|
1716
|
|
1717 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1718 {
|
|
1719 size_t i;
|
|
1720 gimple phi = gsi_stmt (gsi);
|
|
1721
|
|
1722 for (i = 0; i < gimple_phi_num_args (phi); i++)
|
|
1723 {
|
|
1724 tree op = PHI_ARG_DEF (phi, i), var;
|
|
1725 if (TREE_CODE (op) == ADDR_EXPR
|
|
1726 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
|
|
1727 && DECL_P (var))
|
|
1728 bitmap_set_bit (addresses_taken, DECL_UID (var));
|
|
1729 }
|
|
1730 }
|
|
1731 }
|
|
1732
|
|
1733 /* When possible, clear ADDRESSABLE bit or set the REGISTER bit
|
|
1734 and mark variable for conversion into SSA. */
|
|
1735 FOR_EACH_REFERENCED_VAR (var, rvi)
|
|
1736 {
|
|
1737 /* Global Variables, result decls cannot be changed. */
|
|
1738 if (is_global_var (var)
|
|
1739 || TREE_CODE (var) == RESULT_DECL
|
|
1740 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
|
|
1741 continue;
|
|
1742
|
|
1743 if (TREE_ADDRESSABLE (var)
|
|
1744 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
|
|
1745 a non-register. Otherwise we are confused and forget to
|
|
1746 add virtual operands for it. */
|
|
1747 && (!is_gimple_reg_type (TREE_TYPE (var))
|
|
1748 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
|
|
1749 {
|
|
1750 TREE_ADDRESSABLE (var) = 0;
|
|
1751 if (is_gimple_reg (var))
|
|
1752 mark_sym_for_renaming (var);
|
|
1753 update_vops = true;
|
|
1754 bitmap_set_bit (vars_updated, DECL_UID (var));
|
|
1755 if (dump_file)
|
|
1756 {
|
|
1757 fprintf (dump_file, "No longer having address taken ");
|
|
1758 print_generic_expr (dump_file, var, 0);
|
|
1759 fprintf (dump_file, "\n");
|
|
1760 }
|
|
1761 }
|
|
1762 if (!DECL_GIMPLE_REG_P (var)
|
|
1763 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
|
|
1764 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
|
|
1765 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
|
|
1766 {
|
|
1767 DECL_GIMPLE_REG_P (var) = 1;
|
|
1768 mark_sym_for_renaming (var);
|
|
1769 update_vops = true;
|
|
1770 bitmap_set_bit (vars_updated, DECL_UID (var));
|
|
1771 if (dump_file)
|
|
1772 {
|
|
1773 fprintf (dump_file, "Decl is now a gimple register ");
|
|
1774 print_generic_expr (dump_file, var, 0);
|
|
1775 fprintf (dump_file, "\n");
|
|
1776 }
|
|
1777 }
|
|
1778 }
|
|
1779
|
|
1780 /* Operand caches needs to be recomputed for operands referencing the updated
|
|
1781 variables. */
|
|
1782 if (update_vops)
|
|
1783 FOR_EACH_BB (bb)
|
|
1784 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1785 {
|
|
1786 gimple stmt = gsi_stmt (gsi);
|
|
1787
|
|
1788 if ((gimple_loaded_syms (stmt)
|
|
1789 && bitmap_intersect_p (gimple_loaded_syms (stmt), vars_updated))
|
|
1790 || (gimple_stored_syms (stmt)
|
|
1791 && bitmap_intersect_p (gimple_stored_syms (stmt), vars_updated)))
|
|
1792 update_stmt (stmt);
|
|
1793 }
|
|
1794 BITMAP_FREE (not_reg_needs);
|
|
1795 BITMAP_FREE (addresses_taken);
|
|
1796 BITMAP_FREE (vars_updated);
|
|
1797 return 0;
|
|
1798 }
|
|
1799
|
|
1800 struct gimple_opt_pass pass_update_address_taken =
|
|
1801 {
|
|
1802 {
|
|
1803 GIMPLE_PASS,
|
|
1804 "addressables", /* name */
|
|
1805 NULL, /* gate */
|
|
1806 execute_update_addresses_taken, /* execute */
|
|
1807 NULL, /* sub */
|
|
1808 NULL, /* next */
|
|
1809 0, /* static_pass_number */
|
|
1810 0, /* tv_id */
|
|
1811 PROP_ssa, /* properties_required */
|
|
1812 0, /* properties_provided */
|
|
1813 0, /* properties_destroyed */
|
|
1814 0, /* todo_flags_start */
|
|
1815 TODO_update_ssa /* todo_flags_finish */
|
|
1816 }
|
|
1817 };
|