Mercurial > hg > CbC > CbC_gcc
annotate gcc/tree-ssa-operands.c @ 22:0eb6cac880f0
add cbc example of quicksort.
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 13 Oct 2009 17:15:58 +0900 |
parents | 9de9dad105d4 |
children | 326d9e06c2e3 |
rev | line source |
---|---|
0 | 1 /* SSA operands management for trees. |
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 | |
3 Free Software Foundation, Inc. | |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify | |
8 it under the terms of the GNU General Public License as published by | |
9 the Free Software Foundation; either version 3, or (at your option) | |
10 any later version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, | |
13 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 GNU General Public License for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "tm.h" | |
25 #include "tree.h" | |
26 #include "flags.h" | |
27 #include "function.h" | |
28 #include "diagnostic.h" | |
29 #include "tree-flow.h" | |
30 #include "tree-inline.h" | |
31 #include "tree-pass.h" | |
32 #include "ggc.h" | |
33 #include "timevar.h" | |
34 #include "toplev.h" | |
35 #include "langhooks.h" | |
36 #include "ipa-reference.h" | |
1 | 37 #ifndef noCbC |
38 #include "cbc-tree.h" | |
39 #endif | |
0 | 40 |
41 /* This file contains the code required to manage the operands cache of the | |
42 SSA optimizer. For every stmt, we maintain an operand cache in the stmt | |
43 annotation. This cache contains operands that will be of interest to | |
44 optimizers and other passes wishing to manipulate the IL. | |
45 | |
46 The operand type are broken up into REAL and VIRTUAL operands. The real | |
47 operands are represented as pointers into the stmt's operand tree. Thus | |
48 any manipulation of the real operands will be reflected in the actual tree. | |
49 Virtual operands are represented solely in the cache, although the base | |
50 variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
51 Manipulation of the virtual operands will not be reflected in the stmt tree. | |
52 | |
53 The routines in this file are concerned with creating this operand cache | |
54 from a stmt tree. | |
55 | |
56 The operand tree is the parsed by the various get_* routines which look | |
57 through the stmt tree for the occurrence of operands which may be of | |
58 interest, and calls are made to the append_* routines whenever one is | |
59 found. There are 4 of these routines, each representing one of the | |
60 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. | |
61 | |
62 The append_* routines check for duplication, and simply keep a list of | |
63 unique objects for each operand type in the build_* extendable vectors. | |
64 | |
65 Once the stmt tree is completely parsed, the finalize_ssa_operands() | |
66 routine is called, which proceeds to perform the finalization routine | |
67 on each of the 4 operand vectors which have been built up. | |
68 | |
69 If the stmt had a previous operand cache, the finalization routines | |
70 attempt to match up the new operands with the old ones. If it's a perfect | |
71 match, the old vector is simply reused. If it isn't a perfect match, then | |
72 a new vector is created and the new operands are placed there. For | |
73 virtual operands, if the previous cache had SSA_NAME version of a | |
74 variable, and that same variable occurs in the same operands cache, then | |
75 the new cache vector will also get the same SSA_NAME. | |
76 | |
77 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new | |
78 operand vector for VUSE, then the new vector will also be modified | |
79 such that it contains 'a_5' rather than 'a'. */ | |
80 | |
81 /* Helper functions from gimple.c. These are GIMPLE manipulation | |
82 routines that only the operand scanner should need. */ | |
83 void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *); | |
84 void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *); | |
85 | |
86 /* Structure storing statistics on how many call clobbers we have, and | |
87 how many where avoided. */ | |
88 | |
89 static struct | |
90 { | |
91 /* Number of call-clobbered ops we attempt to add to calls in | |
92 add_call_clobbered_mem_symbols. */ | |
93 unsigned int clobbered_vars; | |
94 | |
95 /* Number of write-clobbers (VDEFs) avoided by using | |
96 not_written information. */ | |
97 unsigned int static_write_clobbers_avoided; | |
98 | |
99 /* Number of reads (VUSEs) avoided by using not_read information. */ | |
100 unsigned int static_read_clobbers_avoided; | |
101 | |
102 /* Number of write-clobbers avoided because the variable can't escape to | |
103 this call. */ | |
104 unsigned int unescapable_clobbers_avoided; | |
105 | |
106 /* Number of read-only uses we attempt to add to calls in | |
107 add_call_read_mem_symbols. */ | |
108 unsigned int readonly_clobbers; | |
109 | |
110 /* Number of read-only uses we avoid using not_read information. */ | |
111 unsigned int static_readonly_clobbers_avoided; | |
112 } clobber_stats; | |
113 | |
114 | |
115 /* Flags to describe operand properties in helpers. */ | |
116 | |
117 /* By default, operands are loaded. */ | |
118 #define opf_use 0 | |
119 | |
120 /* Operand is the target of an assignment expression or a | |
121 call-clobbered variable. */ | |
122 #define opf_def (1 << 0) | |
123 | |
124 /* No virtual operands should be created in the expression. This is used | |
125 when traversing ADDR_EXPR nodes which have different semantics than | |
126 other expressions. Inside an ADDR_EXPR node, the only operands that we | |
127 need to consider are indices into arrays. For instance, &a.b[i] should | |
128 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
129 VUSE for 'b'. */ | |
130 #define opf_no_vops (1 << 1) | |
131 | |
132 /* Operand is an implicit reference. This is used to distinguish | |
133 explicit assignments in the form of MODIFY_EXPR from | |
134 clobbering sites like function calls or ASM_EXPRs. */ | |
135 #define opf_implicit (1 << 2) | |
136 | |
137 /* Array for building all the def operands. */ | |
138 static VEC(tree,heap) *build_defs; | |
139 | |
140 /* Array for building all the use operands. */ | |
141 static VEC(tree,heap) *build_uses; | |
142 | |
143 /* Set for building all the VDEF operands. */ | |
144 static VEC(tree,heap) *build_vdefs; | |
145 | |
146 /* Set for building all the VUSE operands. */ | |
147 static VEC(tree,heap) *build_vuses; | |
148 | |
149 /* Bitmap obstack for our datastructures that needs to survive across | |
150 compilations of multiple functions. */ | |
151 static bitmap_obstack operands_bitmap_obstack; | |
152 | |
153 /* Set for building all the loaded symbols. */ | |
154 static bitmap build_loads; | |
155 | |
156 /* Set for building all the stored symbols. */ | |
157 static bitmap build_stores; | |
158 | |
159 static void get_expr_operands (gimple, tree *, int); | |
160 | |
161 /* Number of functions with initialized ssa_operands. */ | |
162 static int n_initialized = 0; | |
163 | |
164 /* Statement change buffer. Data structure used to record state | |
165 information for statements. This is used to determine what needs | |
166 to be done in order to update the SSA web after a statement is | |
167 modified by a pass. If STMT is a statement that has just been | |
168 created, or needs to be folded via fold_stmt, or anything that | |
169 changes its physical structure then the pass should: | |
170 | |
171 1- Call push_stmt_changes (&stmt) to record the current state of | |
172 STMT before any modifications are made. | |
173 | |
174 2- Make all appropriate modifications to the statement. | |
175 | |
176 3- Call pop_stmt_changes (&stmt) to find new symbols that | |
177 need to be put in SSA form, SSA name mappings for names that | |
178 have disappeared, recompute invariantness for address | |
179 expressions, cleanup EH information, etc. | |
180 | |
181 If it is possible to determine that the statement was not modified, | |
182 instead of calling pop_stmt_changes it is quicker to call | |
183 discard_stmt_changes to avoid the expensive and unnecessary operand | |
184 re-scan and change comparison. */ | |
185 | |
186 struct scb_d | |
187 { | |
188 /* Pointer to the statement being modified. */ | |
189 gimple *stmt_p; | |
190 | |
191 /* If the statement references memory these are the sets of symbols | |
192 loaded and stored by the statement. */ | |
193 bitmap loads; | |
194 bitmap stores; | |
195 }; | |
196 | |
197 typedef struct scb_d *scb_t; | |
198 DEF_VEC_P(scb_t); | |
199 DEF_VEC_ALLOC_P(scb_t,heap); | |
200 | |
201 /* Stack of statement change buffers (SCB). Every call to | |
202 push_stmt_changes pushes a new buffer onto the stack. Calls to | |
203 pop_stmt_changes pop a buffer off of the stack and compute the set | |
204 of changes for the popped statement. */ | |
205 static VEC(scb_t,heap) *scb_stack; | |
206 | |
207 /* Return the DECL_UID of the base variable of T. */ | |
208 | |
209 static inline unsigned | |
210 get_name_decl (const_tree t) | |
211 { | |
212 if (TREE_CODE (t) != SSA_NAME) | |
213 return DECL_UID (t); | |
214 else | |
215 return DECL_UID (SSA_NAME_VAR (t)); | |
216 } | |
217 | |
218 | |
219 /* Comparison function for qsort used in operand_build_sort_virtual. */ | |
220 | |
221 int | |
222 operand_build_cmp (const void *p, const void *q) | |
223 { | |
224 const_tree const e1 = *((const_tree const *)p); | |
225 const_tree const e2 = *((const_tree const *)q); | |
226 const unsigned int u1 = get_name_decl (e1); | |
227 const unsigned int u2 = get_name_decl (e2); | |
228 | |
229 /* We want to sort in ascending order. They can never be equal. */ | |
230 #ifdef ENABLE_CHECKING | |
231 gcc_assert (u1 != u2); | |
232 #endif | |
233 return (u1 > u2 ? 1 : -1); | |
234 } | |
235 | |
236 | |
237 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */ | |
238 | |
239 static inline void | |
240 operand_build_sort_virtual (VEC(tree,heap) *list) | |
241 { | |
242 int num = VEC_length (tree, list); | |
243 | |
244 if (num < 2) | |
245 return; | |
246 | |
247 if (num == 2) | |
248 { | |
249 if (get_name_decl (VEC_index (tree, list, 0)) | |
250 > get_name_decl (VEC_index (tree, list, 1))) | |
251 { | |
252 /* Swap elements if in the wrong order. */ | |
253 tree tmp = VEC_index (tree, list, 0); | |
254 VEC_replace (tree, list, 0, VEC_index (tree, list, 1)); | |
255 VEC_replace (tree, list, 1, tmp); | |
256 } | |
257 return; | |
258 } | |
259 | |
260 /* There are 3 or more elements, call qsort. */ | |
261 qsort (VEC_address (tree, list), | |
262 VEC_length (tree, list), | |
263 sizeof (tree), | |
264 operand_build_cmp); | |
265 } | |
266 | |
267 /* Return true if the SSA operands cache is active. */ | |
268 | |
269 bool | |
270 ssa_operands_active (void) | |
271 { | |
272 /* This function may be invoked from contexts where CFUN is NULL | |
273 (IPA passes), return false for now. FIXME: operands may be | |
274 active in each individual function, maybe this function should | |
275 take CFUN as a parameter. */ | |
276 if (cfun == NULL) | |
277 return false; | |
278 | |
279 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active; | |
280 } | |
281 | |
282 | |
283 /* VOPs are of variable sized, so the free list maps "free buckets" to the | |
284 following table: | |
285 bucket # operands | |
286 ------ ---------- | |
287 0 1 | |
288 1 2 | |
289 ... | |
290 15 16 | |
291 16 17-24 | |
292 17 25-32 | |
293 18 31-40 | |
294 ... | |
295 29 121-128 | |
296 Any VOPs larger than this are simply added to the largest bucket when they | |
297 are freed. */ | |
298 | |
299 | |
300 /* Return the number of operands used in bucket BUCKET. */ | |
301 | |
302 static inline int | |
303 vop_free_bucket_size (int bucket) | |
304 { | |
305 #ifdef ENABLE_CHECKING | |
306 gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS); | |
307 #endif | |
308 if (bucket < 16) | |
309 return bucket + 1; | |
310 return (bucket - 13) * 8; | |
311 } | |
312 | |
313 | |
314 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is | |
315 beyond the end of the bucket table, return -1. */ | |
316 | |
317 static inline int | |
318 vop_free_bucket_index (int num) | |
319 { | |
320 gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16); | |
321 | |
322 /* Sizes 1 through 16 use buckets 0-15. */ | |
323 if (num <= 16) | |
324 return num - 1; | |
325 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */ | |
326 num = 14 + (num - 1) / 8; | |
327 if (num >= NUM_VOP_FREE_BUCKETS) | |
328 return -1; | |
329 else | |
330 return num; | |
331 } | |
332 | |
333 | |
334 /* Initialize the VOP free buckets. */ | |
335 | |
336 static inline void | |
337 init_vop_buckets (void) | |
338 { | |
339 int x; | |
340 | |
341 for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++) | |
342 gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL; | |
343 } | |
344 | |
345 | |
346 /* Add PTR to the appropriate VOP bucket. */ | |
347 | |
348 static inline void | |
349 add_vop_to_freelist (voptype_p ptr) | |
350 { | |
351 int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev)); | |
352 | |
353 /* Too large, use the largest bucket so its not a complete throw away. */ | |
354 if (bucket == -1) | |
355 bucket = NUM_VOP_FREE_BUCKETS - 1; | |
356 | |
357 ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket]; | |
358 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr; | |
359 } | |
360 | |
361 | |
362 /* These are the sizes of the operand memory buffer which gets allocated each | |
363 time more operands space is required. The final value is the amount that is | |
364 allocated every time after that. */ | |
365 | |
366 #define OP_SIZE_INIT 0 | |
367 #define OP_SIZE_1 30 | |
368 #define OP_SIZE_2 110 | |
369 #define OP_SIZE_3 511 | |
370 | |
371 /* Initialize the operand cache routines. */ | |
372 | |
373 void | |
374 init_ssa_operands (void) | |
375 { | |
376 if (!n_initialized++) | |
377 { | |
378 build_defs = VEC_alloc (tree, heap, 5); | |
379 build_uses = VEC_alloc (tree, heap, 10); | |
380 build_vuses = VEC_alloc (tree, heap, 25); | |
381 build_vdefs = VEC_alloc (tree, heap, 25); | |
382 bitmap_obstack_initialize (&operands_bitmap_obstack); | |
383 build_loads = BITMAP_ALLOC (&operands_bitmap_obstack); | |
384 build_stores = BITMAP_ALLOC (&operands_bitmap_obstack); | |
385 scb_stack = VEC_alloc (scb_t, heap, 20); | |
386 } | |
387 | |
388 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL); | |
389 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL); | |
390 gimple_ssa_operands (cfun)->operand_memory_index | |
391 = gimple_ssa_operands (cfun)->ssa_operand_mem_size; | |
392 gimple_ssa_operands (cfun)->ops_active = true; | |
393 memset (&clobber_stats, 0, sizeof (clobber_stats)); | |
394 init_vop_buckets (); | |
395 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT; | |
396 } | |
397 | |
398 | |
399 /* Dispose of anything required by the operand routines. */ | |
400 | |
401 void | |
402 fini_ssa_operands (void) | |
403 { | |
404 struct ssa_operand_memory_d *ptr; | |
405 unsigned ix; | |
406 tree mpt; | |
407 | |
408 if (!--n_initialized) | |
409 { | |
410 VEC_free (tree, heap, build_defs); | |
411 VEC_free (tree, heap, build_uses); | |
412 VEC_free (tree, heap, build_vdefs); | |
413 VEC_free (tree, heap, build_vuses); | |
414 BITMAP_FREE (build_loads); | |
415 BITMAP_FREE (build_stores); | |
416 | |
417 /* The change buffer stack had better be empty. */ | |
418 gcc_assert (VEC_length (scb_t, scb_stack) == 0); | |
419 VEC_free (scb_t, heap, scb_stack); | |
420 scb_stack = NULL; | |
421 } | |
422 | |
423 gimple_ssa_operands (cfun)->free_defs = NULL; | |
424 gimple_ssa_operands (cfun)->free_uses = NULL; | |
425 | |
426 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL) | |
427 { | |
428 gimple_ssa_operands (cfun)->operand_memory | |
429 = gimple_ssa_operands (cfun)->operand_memory->next; | |
430 ggc_free (ptr); | |
431 } | |
432 | |
433 for (ix = 0; | |
434 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt); | |
435 ix++) | |
436 { | |
437 if (mpt) | |
438 BITMAP_FREE (MPT_SYMBOLS (mpt)); | |
439 } | |
440 | |
441 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table); | |
442 | |
443 gimple_ssa_operands (cfun)->ops_active = false; | |
444 | |
445 if (!n_initialized) | |
446 bitmap_obstack_release (&operands_bitmap_obstack); | |
447 | |
448 if (dump_file && (dump_flags & TDF_STATS)) | |
449 { | |
450 fprintf (dump_file, "Original clobbered vars: %d\n", | |
451 clobber_stats.clobbered_vars); | |
452 fprintf (dump_file, "Static write clobbers avoided: %d\n", | |
453 clobber_stats.static_write_clobbers_avoided); | |
454 fprintf (dump_file, "Static read clobbers avoided: %d\n", | |
455 clobber_stats.static_read_clobbers_avoided); | |
456 fprintf (dump_file, "Unescapable clobbers avoided: %d\n", | |
457 clobber_stats.unescapable_clobbers_avoided); | |
458 fprintf (dump_file, "Original read-only clobbers: %d\n", | |
459 clobber_stats.readonly_clobbers); | |
460 fprintf (dump_file, "Static read-only clobbers avoided: %d\n", | |
461 clobber_stats.static_readonly_clobbers_avoided); | |
462 } | |
463 } | |
464 | |
465 | |
466 /* Return memory for operands of SIZE chunks. */ | |
467 | |
468 static inline void * | |
469 ssa_operand_alloc (unsigned size) | |
470 { | |
471 char *ptr; | |
472 | |
473 if (gimple_ssa_operands (cfun)->operand_memory_index + size | |
474 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) | |
475 { | |
476 struct ssa_operand_memory_d *ptr; | |
477 | |
478 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT) | |
479 gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
480 = OP_SIZE_1 * sizeof (struct voptype_d); | |
481 else | |
482 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
483 == OP_SIZE_1 * sizeof (struct voptype_d)) | |
484 gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
485 = OP_SIZE_2 * sizeof (struct voptype_d); | |
486 else | |
487 gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
488 = OP_SIZE_3 * sizeof (struct voptype_d); | |
489 | |
490 /* Go right to the maximum size if the request is too large. */ | |
491 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size) | |
492 gimple_ssa_operands (cfun)->ssa_operand_mem_size | |
493 = OP_SIZE_3 * sizeof (struct voptype_d); | |
494 | |
495 /* We can reliably trigger the case that we need arbitrary many | |
496 operands (see PR34093), so allocate a buffer just for this request. */ | |
497 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size) | |
498 gimple_ssa_operands (cfun)->ssa_operand_mem_size = size; | |
499 | |
500 ptr = (struct ssa_operand_memory_d *) | |
501 ggc_alloc (sizeof (struct ssa_operand_memory_d) | |
502 + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1); | |
503 ptr->next = gimple_ssa_operands (cfun)->operand_memory; | |
504 gimple_ssa_operands (cfun)->operand_memory = ptr; | |
505 gimple_ssa_operands (cfun)->operand_memory_index = 0; | |
506 } | |
507 ptr = &(gimple_ssa_operands (cfun)->operand_memory | |
508 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]); | |
509 gimple_ssa_operands (cfun)->operand_memory_index += size; | |
510 return ptr; | |
511 } | |
512 | |
513 | |
514 /* Allocate a DEF operand. */ | |
515 | |
516 static inline struct def_optype_d * | |
517 alloc_def (void) | |
518 { | |
519 struct def_optype_d *ret; | |
520 if (gimple_ssa_operands (cfun)->free_defs) | |
521 { | |
522 ret = gimple_ssa_operands (cfun)->free_defs; | |
523 gimple_ssa_operands (cfun)->free_defs | |
524 = gimple_ssa_operands (cfun)->free_defs->next; | |
525 } | |
526 else | |
527 ret = (struct def_optype_d *) | |
528 ssa_operand_alloc (sizeof (struct def_optype_d)); | |
529 return ret; | |
530 } | |
531 | |
532 | |
533 /* Allocate a USE operand. */ | |
534 | |
535 static inline struct use_optype_d * | |
536 alloc_use (void) | |
537 { | |
538 struct use_optype_d *ret; | |
539 if (gimple_ssa_operands (cfun)->free_uses) | |
540 { | |
541 ret = gimple_ssa_operands (cfun)->free_uses; | |
542 gimple_ssa_operands (cfun)->free_uses | |
543 = gimple_ssa_operands (cfun)->free_uses->next; | |
544 } | |
545 else | |
546 ret = (struct use_optype_d *) | |
547 ssa_operand_alloc (sizeof (struct use_optype_d)); | |
548 return ret; | |
549 } | |
550 | |
551 | |
552 /* Allocate a vop with NUM elements. */ | |
553 | |
554 static inline struct voptype_d * | |
555 alloc_vop (int num) | |
556 { | |
557 struct voptype_d *ret = NULL; | |
558 int alloc_size = 0; | |
559 | |
560 int bucket = vop_free_bucket_index (num); | |
561 if (bucket != -1) | |
562 { | |
563 /* If there is a free operand, use it. */ | |
564 if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL) | |
565 { | |
566 ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket]; | |
567 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = | |
568 gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next; | |
569 } | |
570 else | |
571 alloc_size = vop_free_bucket_size(bucket); | |
572 } | |
573 else | |
574 alloc_size = num; | |
575 | |
576 if (alloc_size > 0) | |
577 ret = (struct voptype_d *)ssa_operand_alloc ( | |
578 sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t)); | |
579 | |
580 VUSE_VECT_NUM_ELEM (ret->usev) = num; | |
581 return ret; | |
582 } | |
583 | |
584 | |
585 /* This routine makes sure that PTR is in an immediate use list, and makes | |
586 sure the stmt pointer is set to the current stmt. */ | |
587 | |
588 static inline void | |
589 set_virtual_use_link (use_operand_p ptr, gimple stmt) | |
590 { | |
591 /* fold_stmt may have changed the stmt pointers. */ | |
592 if (ptr->loc.stmt != stmt) | |
593 ptr->loc.stmt = stmt; | |
594 | |
595 /* If this use isn't in a list, add it to the correct list. */ | |
596 if (!ptr->prev) | |
597 link_imm_use (ptr, *(ptr->use)); | |
598 } | |
599 | |
600 | |
601 /* Adds OP to the list of defs after LAST. */ | |
602 | |
603 static inline def_optype_p | |
604 add_def_op (tree *op, def_optype_p last) | |
605 { | |
606 def_optype_p new_def; | |
607 | |
608 new_def = alloc_def (); | |
609 DEF_OP_PTR (new_def) = op; | |
610 last->next = new_def; | |
611 new_def->next = NULL; | |
612 return new_def; | |
613 } | |
614 | |
615 | |
616 /* Adds OP to the list of uses of statement STMT after LAST. */ | |
617 | |
618 static inline use_optype_p | |
619 add_use_op (gimple stmt, tree *op, use_optype_p last) | |
620 { | |
621 use_optype_p new_use; | |
622 | |
623 new_use = alloc_use (); | |
624 USE_OP_PTR (new_use)->use = op; | |
625 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); | |
626 last->next = new_use; | |
627 new_use->next = NULL; | |
628 return new_use; | |
629 } | |
630 | |
631 | |
632 /* Return a virtual op pointer with NUM elements which are all | |
633 initialized to OP and are linked into the immediate uses for STMT. | |
634 The new vop is appended after PREV. */ | |
635 | |
636 static inline voptype_p | |
637 add_vop (gimple stmt, tree op, int num, voptype_p prev) | |
638 { | |
639 voptype_p new_vop; | |
640 int x; | |
641 | |
642 new_vop = alloc_vop (num); | |
643 for (x = 0; x < num; x++) | |
644 { | |
645 VUSE_OP_PTR (new_vop, x)->prev = NULL; | |
646 SET_VUSE_OP (new_vop, x, op); | |
647 VUSE_OP_PTR (new_vop, x)->use = &new_vop->usev.uses[x].use_var; | |
648 link_imm_use_stmt (VUSE_OP_PTR (new_vop, x), | |
649 new_vop->usev.uses[x].use_var, stmt); | |
650 } | |
651 | |
652 if (prev) | |
653 prev->next = new_vop; | |
654 new_vop->next = NULL; | |
655 return new_vop; | |
656 } | |
657 | |
658 | |
659 /* Adds OP to the list of vuses of statement STMT after LAST, and moves | |
660 LAST to the new element. */ | |
661 | |
662 static inline voptype_p | |
663 add_vuse_op (gimple stmt, tree op, int num, voptype_p last) | |
664 { | |
665 voptype_p new_vop = add_vop (stmt, op, num, last); | |
666 VDEF_RESULT (new_vop) = NULL_TREE; | |
667 return new_vop; | |
668 } | |
669 | |
670 | |
671 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves | |
672 LAST to the new element. */ | |
673 | |
674 static inline voptype_p | |
675 add_vdef_op (gimple stmt, tree op, int num, voptype_p last) | |
676 { | |
677 voptype_p new_vop = add_vop (stmt, op, num, last); | |
678 VDEF_RESULT (new_vop) = op; | |
679 return new_vop; | |
680 } | |
681 | |
682 | |
683 /* Takes elements from build_defs and turns them into def operands of STMT. | |
684 TODO -- Make build_defs VEC of tree *. */ | |
685 | |
686 static inline void | |
687 finalize_ssa_defs (gimple stmt) | |
688 { | |
689 unsigned new_i; | |
690 struct def_optype_d new_list; | |
691 def_optype_p old_ops, last; | |
692 unsigned int num = VEC_length (tree, build_defs); | |
693 | |
694 /* There should only be a single real definition per assignment. */ | |
695 gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1); | |
696 | |
697 new_list.next = NULL; | |
698 last = &new_list; | |
699 | |
700 old_ops = gimple_def_ops (stmt); | |
701 | |
702 new_i = 0; | |
703 | |
704 /* Check for the common case of 1 def that hasn't changed. */ | |
705 if (old_ops && old_ops->next == NULL && num == 1 | |
706 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops)) | |
707 return; | |
708 | |
709 /* If there is anything in the old list, free it. */ | |
710 if (old_ops) | |
711 { | |
712 old_ops->next = gimple_ssa_operands (cfun)->free_defs; | |
713 gimple_ssa_operands (cfun)->free_defs = old_ops; | |
714 } | |
715 | |
716 /* If there is anything remaining in the build_defs list, simply emit it. */ | |
717 for ( ; new_i < num; new_i++) | |
718 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last); | |
719 | |
720 /* Now set the stmt's operands. */ | |
721 gimple_set_def_ops (stmt, new_list.next); | |
722 | |
723 #ifdef ENABLE_CHECKING | |
724 { | |
725 def_optype_p ptr; | |
726 unsigned x = 0; | |
727 for (ptr = gimple_def_ops (stmt); ptr; ptr = ptr->next) | |
728 x++; | |
729 | |
730 gcc_assert (x == num); | |
731 } | |
732 #endif | |
733 } | |
734 | |
735 | |
736 /* Takes elements from build_uses and turns them into use operands of STMT. | |
737 TODO -- Make build_uses VEC of tree *. */ | |
738 | |
739 static inline void | |
740 finalize_ssa_uses (gimple stmt) | |
741 { | |
742 unsigned new_i; | |
743 struct use_optype_d new_list; | |
744 use_optype_p old_ops, ptr, last; | |
745 | |
746 new_list.next = NULL; | |
747 last = &new_list; | |
748 | |
749 old_ops = gimple_use_ops (stmt); | |
750 | |
751 /* If there is anything in the old list, free it. */ | |
752 if (old_ops) | |
753 { | |
754 for (ptr = old_ops; ptr; ptr = ptr->next) | |
755 delink_imm_use (USE_OP_PTR (ptr)); | |
756 old_ops->next = gimple_ssa_operands (cfun)->free_uses; | |
757 gimple_ssa_operands (cfun)->free_uses = old_ops; | |
758 } | |
759 | |
760 /* Now create nodes for all the new nodes. */ | |
761 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++) | |
762 last = add_use_op (stmt, | |
763 (tree *) VEC_index (tree, build_uses, new_i), | |
764 last); | |
765 | |
766 /* Now set the stmt's operands. */ | |
767 gimple_set_use_ops (stmt, new_list.next); | |
768 | |
769 #ifdef ENABLE_CHECKING | |
770 { | |
771 unsigned x = 0; | |
772 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) | |
773 x++; | |
774 | |
775 gcc_assert (x == VEC_length (tree, build_uses)); | |
776 } | |
777 #endif | |
778 } | |
779 | |
780 | |
781 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of | |
782 STMT. */ | |
783 | |
784 static inline void | |
785 finalize_ssa_vdefs (gimple stmt) | |
786 { | |
787 unsigned new_i; | |
788 struct voptype_d new_list; | |
789 voptype_p old_ops, ptr, last; | |
790 | |
791 /* Set the symbols referenced by STMT. */ | |
792 gimple_set_stored_syms (stmt, build_stores, &operands_bitmap_obstack); | |
793 | |
794 /* If aliases have not been computed, do not instantiate a virtual | |
795 operator on STMT. Initially, we only compute the SSA form on | |
796 GIMPLE registers. The virtual SSA form is only computed after | |
797 alias analysis, so virtual operators will remain unrenamed and | |
798 the verifier will complain. However, alias analysis needs to | |
799 access symbol load/store information, so we need to compute | |
800 those. */ | |
801 if (!gimple_aliases_computed_p (cfun)) | |
802 return; | |
803 | |
804 new_list.next = NULL; | |
805 last = &new_list; | |
806 | |
807 old_ops = gimple_vdef_ops (stmt); | |
808 new_i = 0; | |
809 while (old_ops && new_i < VEC_length (tree, build_vdefs)) | |
810 { | |
811 tree op = VEC_index (tree, build_vdefs, new_i); | |
812 unsigned new_uid = get_name_decl (op); | |
813 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops)); | |
814 | |
815 /* FIXME, for now each VDEF operator should have at most one | |
816 operand in their RHS. */ | |
817 gcc_assert (VDEF_NUM (old_ops) == 1); | |
818 | |
819 if (old_uid == new_uid) | |
820 { | |
821 /* If the symbols are the same, reuse the existing operand. */ | |
822 last->next = old_ops; | |
823 last = old_ops; | |
824 old_ops = old_ops->next; | |
825 last->next = NULL; | |
826 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt); | |
827 new_i++; | |
828 } | |
829 else if (old_uid < new_uid) | |
830 { | |
831 /* If old is less than new, old goes to the free list. */ | |
832 voptype_p next; | |
833 delink_imm_use (VDEF_OP_PTR (old_ops, 0)); | |
834 next = old_ops->next; | |
835 add_vop_to_freelist (old_ops); | |
836 old_ops = next; | |
837 } | |
838 else | |
839 { | |
840 /* This is a new operand. */ | |
841 last = add_vdef_op (stmt, op, 1, last); | |
842 new_i++; | |
843 } | |
844 } | |
845 | |
846 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */ | |
847 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++) | |
848 last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last); | |
849 | |
850 /* If there is anything in the old list, free it. */ | |
851 if (old_ops) | |
852 { | |
853 for (ptr = old_ops; ptr; ptr = last) | |
854 { | |
855 last = ptr->next; | |
856 delink_imm_use (VDEF_OP_PTR (ptr, 0)); | |
857 add_vop_to_freelist (ptr); | |
858 } | |
859 } | |
860 | |
861 /* Now set STMT's operands. */ | |
862 gimple_set_vdef_ops (stmt, new_list.next); | |
863 | |
864 #ifdef ENABLE_CHECKING | |
865 { | |
866 unsigned x = 0; | |
867 for (ptr = gimple_vdef_ops (stmt); ptr; ptr = ptr->next) | |
868 x++; | |
869 | |
870 gcc_assert (x == VEC_length (tree, build_vdefs)); | |
871 } | |
872 #endif | |
873 } | |
874 | |
875 | |
876 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of | |
877 STMT. */ | |
878 | |
879 static inline void | |
880 finalize_ssa_vuse_ops (gimple stmt) | |
881 { | |
882 unsigned new_i, old_i; | |
883 voptype_p old_ops, last; | |
884 VEC(tree,heap) *new_ops; | |
885 | |
886 /* Set the symbols referenced by STMT. */ | |
887 gimple_set_loaded_syms (stmt, build_loads, &operands_bitmap_obstack); | |
888 | |
889 /* If aliases have not been computed, do not instantiate a virtual | |
890 operator on STMT. Initially, we only compute the SSA form on | |
891 GIMPLE registers. The virtual SSA form is only computed after | |
892 alias analysis, so virtual operators will remain unrenamed and | |
893 the verifier will complain. However, alias analysis needs to | |
894 access symbol load/store information, so we need to compute | |
895 those. */ | |
896 if (!gimple_aliases_computed_p (cfun)) | |
897 return; | |
898 | |
899 /* STMT should have at most one VUSE operator. */ | |
900 old_ops = gimple_vuse_ops (stmt); | |
901 gcc_assert (old_ops == NULL || old_ops->next == NULL); | |
902 | |
903 new_ops = NULL; | |
904 new_i = old_i = 0; | |
905 while (old_ops | |
906 && old_i < VUSE_NUM (old_ops) | |
907 && new_i < VEC_length (tree, build_vuses)) | |
908 { | |
909 tree new_op = VEC_index (tree, build_vuses, new_i); | |
910 tree old_op = VUSE_OP (old_ops, old_i); | |
911 unsigned new_uid = get_name_decl (new_op); | |
912 unsigned old_uid = get_name_decl (old_op); | |
913 | |
914 if (old_uid == new_uid) | |
915 { | |
916 /* If the symbols are the same, reuse the existing operand. */ | |
917 VEC_safe_push (tree, heap, new_ops, old_op); | |
918 new_i++; | |
919 old_i++; | |
920 } | |
921 else if (old_uid < new_uid) | |
922 { | |
923 /* If OLD_UID is less than NEW_UID, the old operand has | |
924 disappeared, skip to the next old operand. */ | |
925 old_i++; | |
926 } | |
927 else | |
928 { | |
929 /* This is a new operand. */ | |
930 VEC_safe_push (tree, heap, new_ops, new_op); | |
931 new_i++; | |
932 } | |
933 } | |
934 | |
935 /* If there is anything remaining in the build_vuses list, simply emit it. */ | |
936 for ( ; new_i < VEC_length (tree, build_vuses); new_i++) | |
937 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i)); | |
938 | |
939 /* If there is anything in the old list, free it. */ | |
940 if (old_ops) | |
941 { | |
942 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++) | |
943 delink_imm_use (VUSE_OP_PTR (old_ops, old_i)); | |
944 add_vop_to_freelist (old_ops); | |
945 gimple_set_vuse_ops (stmt, NULL); | |
946 } | |
947 | |
948 /* If there are any operands, instantiate a VUSE operator for STMT. */ | |
949 if (new_ops) | |
950 { | |
951 tree op; | |
952 unsigned i; | |
953 | |
954 last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL); | |
955 | |
956 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++) | |
957 SET_USE (VUSE_OP_PTR (last, (int) i), op); | |
958 | |
959 gimple_set_vuse_ops (stmt, last); | |
960 VEC_free (tree, heap, new_ops); | |
961 } | |
962 | |
963 #ifdef ENABLE_CHECKING | |
964 { | |
965 unsigned x; | |
966 | |
967 if (gimple_vuse_ops (stmt)) | |
968 { | |
969 gcc_assert (gimple_vuse_ops (stmt)->next == NULL); | |
970 x = VUSE_NUM (gimple_vuse_ops (stmt)); | |
971 } | |
972 else | |
973 x = 0; | |
974 | |
975 gcc_assert (x == VEC_length (tree, build_vuses)); | |
976 } | |
977 #endif | |
978 } | |
979 | |
980 /* Return a new VUSE operand vector for STMT. */ | |
981 | |
982 static void | |
983 finalize_ssa_vuses (gimple stmt) | |
984 { | |
985 unsigned num, num_vdefs; | |
986 unsigned vuse_index; | |
987 | |
988 /* Remove superfluous VUSE operands. If the statement already has a | |
989 VDEF operator for a variable 'a', then a VUSE for 'a' is not | |
990 needed because VDEFs imply a VUSE of the variable. For instance, | |
991 suppose that variable 'a' is pointed-to by p and q: | |
992 | |
993 # VUSE <a_2> | |
994 # a_3 = VDEF <a_2> | |
995 *p = *q; | |
996 | |
997 The VUSE <a_2> is superfluous because it is implied by the | |
998 VDEF operator. */ | |
999 num = VEC_length (tree, build_vuses); | |
1000 num_vdefs = VEC_length (tree, build_vdefs); | |
1001 | |
1002 if (num > 0 && num_vdefs > 0) | |
1003 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); ) | |
1004 { | |
1005 tree vuse; | |
1006 vuse = VEC_index (tree, build_vuses, vuse_index); | |
1007 if (TREE_CODE (vuse) != SSA_NAME) | |
1008 { | |
1009 var_ann_t ann = var_ann (vuse); | |
1010 ann->in_vuse_list = 0; | |
1011 if (ann->in_vdef_list) | |
1012 { | |
1013 VEC_ordered_remove (tree, build_vuses, vuse_index); | |
1014 continue; | |
1015 } | |
1016 } | |
1017 vuse_index++; | |
1018 } | |
1019 | |
1020 finalize_ssa_vuse_ops (stmt); | |
1021 } | |
1022 | |
1023 | |
1024 /* Clear the in_list bits and empty the build array for VDEFs and | |
1025 VUSEs. */ | |
1026 | |
1027 static inline void | |
1028 cleanup_build_arrays (void) | |
1029 { | |
1030 unsigned i; | |
1031 tree t; | |
1032 | |
1033 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++) | |
1034 if (TREE_CODE (t) != SSA_NAME) | |
1035 var_ann (t)->in_vdef_list = false; | |
1036 | |
1037 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++) | |
1038 if (TREE_CODE (t) != SSA_NAME) | |
1039 var_ann (t)->in_vuse_list = false; | |
1040 | |
1041 VEC_truncate (tree, build_vdefs, 0); | |
1042 VEC_truncate (tree, build_vuses, 0); | |
1043 VEC_truncate (tree, build_defs, 0); | |
1044 VEC_truncate (tree, build_uses, 0); | |
1045 bitmap_clear (build_loads); | |
1046 bitmap_clear (build_stores); | |
1047 } | |
1048 | |
1049 | |
1050 /* Finalize all the build vectors, fill the new ones into INFO. */ | |
1051 | |
1052 static inline void | |
1053 finalize_ssa_stmt_operands (gimple stmt) | |
1054 { | |
1055 finalize_ssa_defs (stmt); | |
1056 finalize_ssa_uses (stmt); | |
1057 if (gimple_has_mem_ops (stmt)) | |
1058 { | |
1059 finalize_ssa_vdefs (stmt); | |
1060 finalize_ssa_vuses (stmt); | |
1061 } | |
1062 cleanup_build_arrays (); | |
1063 } | |
1064 | |
1065 | |
1066 /* Start the process of building up operands vectors in INFO. */ | |
1067 | |
1068 static inline void | |
1069 start_ssa_stmt_operands (void) | |
1070 { | |
1071 gcc_assert (VEC_length (tree, build_defs) == 0); | |
1072 gcc_assert (VEC_length (tree, build_uses) == 0); | |
1073 gcc_assert (VEC_length (tree, build_vuses) == 0); | |
1074 gcc_assert (VEC_length (tree, build_vdefs) == 0); | |
1075 gcc_assert (bitmap_empty_p (build_loads)); | |
1076 gcc_assert (bitmap_empty_p (build_stores)); | |
1077 } | |
1078 | |
1079 | |
1080 /* Add DEF_P to the list of pointers to operands. */ | |
1081 | |
1082 static inline void | |
1083 append_def (tree *def_p) | |
1084 { | |
1085 VEC_safe_push (tree, heap, build_defs, (tree) def_p); | |
1086 } | |
1087 | |
1088 | |
1089 /* Add USE_P to the list of pointers to operands. */ | |
1090 | |
1091 static inline void | |
1092 append_use (tree *use_p) | |
1093 { | |
1094 VEC_safe_push (tree, heap, build_uses, (tree) use_p); | |
1095 } | |
1096 | |
1097 | |
1098 /* Add VAR to the set of variables that require a VDEF operator. */ | |
1099 | |
1100 static inline void | |
1101 append_vdef (tree var) | |
1102 { | |
1103 tree sym; | |
1104 | |
1105 if (TREE_CODE (var) != SSA_NAME) | |
1106 { | |
1107 tree mpt; | |
1108 var_ann_t ann; | |
1109 | |
1110 /* If VAR belongs to a memory partition, use it instead of VAR. */ | |
1111 mpt = memory_partition (var); | |
1112 if (mpt) | |
1113 var = mpt; | |
1114 | |
1115 /* Don't allow duplicate entries. */ | |
1116 ann = get_var_ann (var); | |
1117 if (ann->in_vdef_list) | |
1118 return; | |
1119 | |
1120 ann->in_vdef_list = true; | |
1121 sym = var; | |
1122 } | |
1123 else | |
1124 sym = SSA_NAME_VAR (var); | |
1125 | |
1126 VEC_safe_push (tree, heap, build_vdefs, var); | |
1127 bitmap_set_bit (build_stores, DECL_UID (sym)); | |
1128 } | |
1129 | |
1130 | |
1131 /* Add VAR to the set of variables that require a VUSE operator. */ | |
1132 | |
1133 static inline void | |
1134 append_vuse (tree var) | |
1135 { | |
1136 tree sym; | |
1137 | |
1138 if (TREE_CODE (var) != SSA_NAME) | |
1139 { | |
1140 tree mpt; | |
1141 var_ann_t ann; | |
1142 | |
1143 /* If VAR belongs to a memory partition, use it instead of VAR. */ | |
1144 mpt = memory_partition (var); | |
1145 if (mpt) | |
1146 var = mpt; | |
1147 | |
1148 /* Don't allow duplicate entries. */ | |
1149 ann = get_var_ann (var); | |
1150 if (ann->in_vuse_list) | |
1151 return; | |
1152 else if (ann->in_vdef_list) | |
1153 { | |
1154 /* We don't want a vuse if we already have a vdef, but we must | |
1155 still put this in build_loads. */ | |
1156 bitmap_set_bit (build_loads, DECL_UID (var)); | |
1157 return; | |
1158 } | |
1159 | |
1160 ann->in_vuse_list = true; | |
1161 sym = var; | |
1162 } | |
1163 else | |
1164 sym = SSA_NAME_VAR (var); | |
1165 | |
1166 VEC_safe_push (tree, heap, build_vuses, var); | |
1167 bitmap_set_bit (build_loads, DECL_UID (sym)); | |
1168 } | |
1169 | |
1170 | |
1171 /* REF is a tree that contains the entire pointer dereference | |
1172 expression, if available, or NULL otherwise. ALIAS is the variable | |
1173 we are asking if REF can access. OFFSET and SIZE come from the | |
1174 memory access expression that generated this virtual operand. | |
1175 | |
1176 XXX: We should handle the NO_ALIAS attributes here. */ | |
1177 | |
1178 static bool | |
1179 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, | |
1180 HOST_WIDE_INT size) | |
1181 { | |
1182 bool offsetgtz = offset > 0; | |
1183 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset; | |
1184 tree base = ref ? get_base_address (ref) : NULL; | |
1185 | |
1186 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be | |
1187 using a call-clobbered memory tag. By definition, call-clobbered | |
1188 memory tags can always touch .GLOBAL_VAR. */ | |
1189 if (alias == gimple_global_var (cfun)) | |
1190 return true; | |
1191 | |
1192 /* If ref is a TARGET_MEM_REF, just return true, as we can't really | |
1193 disambiguate them right now. */ | |
1194 if (ref && TREE_CODE (ref) == TARGET_MEM_REF) | |
1195 return true; | |
1196 | |
1197 /* Without strict aliasing, it is impossible for a component access | |
1198 through a pointer to touch a random variable, unless that | |
1199 variable *is* a structure or a pointer. | |
1200 | |
1201 That is, given p->c, and some random global variable b, | |
1202 there is no legal way that p->c could be an access to b. | |
1203 | |
1204 Without strict aliasing on, we consider it legal to do something | |
1205 like: | |
1206 | |
1207 struct foos { int l; }; | |
1208 int foo; | |
1209 static struct foos *getfoo(void); | |
1210 int main (void) | |
1211 { | |
1212 struct foos *f = getfoo(); | |
1213 f->l = 1; | |
1214 foo = 2; | |
1215 if (f->l == 1) | |
1216 abort(); | |
1217 exit(0); | |
1218 } | |
1219 static struct foos *getfoo(void) | |
1220 { return (struct foos *)&foo; } | |
1221 | |
1222 (taken from 20000623-1.c) | |
1223 | |
1224 The docs also say/imply that access through union pointers | |
1225 is legal (but *not* if you take the address of the union member, | |
1226 i.e. the inverse), such that you can do | |
1227 | |
1228 typedef union { | |
1229 int d; | |
1230 } U; | |
1231 | |
1232 int rv; | |
1233 void breakme() | |
1234 { | |
1235 U *rv0; | |
1236 U *pretmp = (U*)&rv; | |
1237 rv0 = pretmp; | |
1238 rv0->d = 42; | |
1239 } | |
1240 To implement this, we just punt on accesses through union | |
1241 pointers entirely. | |
1242 | |
1243 Another case we have to allow is accessing a variable | |
1244 through an array access at offset zero. This happens from | |
1245 code generated by the fortran frontend like | |
1246 | |
1247 char[1:1] & my_char_ref; | |
1248 char my_char; | |
1249 my_char_ref_1 = (char[1:1] &) &my_char; | |
1250 D.874_2 = (*my_char_ref_1)[1]{lb: 1 sz: 1}; | |
1251 */ | |
1252 if (ref | |
1253 && flag_strict_aliasing | |
1254 && TREE_CODE (ref) != INDIRECT_REF | |
1255 && !MTAG_P (alias) | |
1256 && base | |
1257 && (TREE_CODE (base) != INDIRECT_REF | |
1258 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE) | |
1259 && (TREE_CODE (base) != INDIRECT_REF | |
1260 || TREE_CODE (ref) != ARRAY_REF | |
1261 || offset != 0 | |
1262 || (DECL_SIZE (alias) | |
1263 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST | |
1264 && size != -1 | |
1265 && (unsigned HOST_WIDE_INT)size | |
1266 != TREE_INT_CST_LOW (DECL_SIZE (alias)))) | |
1267 && !AGGREGATE_TYPE_P (TREE_TYPE (alias)) | |
1268 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE | |
1269 && !var_ann (alias)->is_heapvar | |
1270 /* When the struct has may_alias attached to it, we need not to | |
1271 return true. */ | |
1272 && get_alias_set (base)) | |
1273 { | |
1274 #ifdef ACCESS_DEBUGGING | |
1275 fprintf (stderr, "Access to "); | |
1276 print_generic_expr (stderr, ref, 0); | |
1277 fprintf (stderr, " may not touch "); | |
1278 print_generic_expr (stderr, alias, 0); | |
1279 fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1280 #endif | |
1281 return false; | |
1282 } | |
1283 | |
1284 /* If the offset of the access is greater than the size of one of | |
1285 the possible aliases, it can't be touching that alias, because it | |
1286 would be past the end of the structure. */ | |
1287 else if (ref | |
1288 && flag_strict_aliasing | |
1289 && TREE_CODE (ref) != INDIRECT_REF | |
1290 && !MTAG_P (alias) | |
1291 && !var_ann (alias)->is_heapvar | |
1292 && !POINTER_TYPE_P (TREE_TYPE (alias)) | |
1293 && offsetgtz | |
1294 && DECL_SIZE (alias) | |
1295 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST | |
1296 && uoffset >= TREE_INT_CST_LOW (DECL_SIZE (alias))) | |
1297 { | |
1298 #ifdef ACCESS_DEBUGGING | |
1299 fprintf (stderr, "Access to "); | |
1300 print_generic_expr (stderr, ref, 0); | |
1301 fprintf (stderr, " may not touch "); | |
1302 print_generic_expr (stderr, alias, 0); | |
1303 fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1304 #endif | |
1305 return false; | |
1306 } | |
1307 | |
1308 return true; | |
1309 } | |
1310 | |
1311 /* Add VAR to the virtual operands for STMT. FLAGS is as in | |
1312 get_expr_operands. FULL_REF is a tree that contains the entire | |
1313 pointer dereference expression, if available, or NULL otherwise. | |
1314 OFFSET and SIZE come from the memory access expression that | |
1315 generated this virtual operand. IS_CALL_SITE is true if the | |
1316 affected statement is a call site. */ | |
1317 | |
1318 static void | |
1319 add_virtual_operand (tree var, gimple stmt, int flags, | |
1320 tree full_ref, HOST_WIDE_INT offset, | |
1321 HOST_WIDE_INT size, bool is_call_site) | |
1322 { | |
1323 bitmap aliases = NULL; | |
1324 tree sym; | |
1325 var_ann_t v_ann; | |
1326 | |
1327 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); | |
1328 v_ann = var_ann (sym); | |
1329 | |
1330 /* Mark the statement as having memory operands. */ | |
1331 gimple_set_references_memory (stmt, true); | |
1332 | |
1333 /* If the variable cannot be modified and this is a VDEF change | |
1334 it into a VUSE. This happens when read-only variables are marked | |
1335 call-clobbered and/or aliased to writable variables. So we only | |
1336 check that this only happens on non-specific stores. | |
1337 | |
1338 Note that if this is a specific store, i.e. associated with a | |
1339 MODIFY_EXPR, then we can't suppress the VDEF, lest we run | |
1340 into validation problems. | |
1341 | |
1342 This can happen when programs cast away const, leaving us with a | |
1343 store to read-only memory. If the statement is actually executed | |
1344 at runtime, then the program is ill formed. If the statement is | |
1345 not executed then all is well. At the very least, we cannot ICE. */ | |
1346 if ((flags & opf_implicit) && unmodifiable_var_p (var)) | |
1347 flags &= ~opf_def; | |
1348 | |
1349 /* The variable is not a GIMPLE register. Add it (or its aliases) to | |
1350 virtual operands, unless the caller has specifically requested | |
1351 not to add virtual operands (used when adding operands inside an | |
1352 ADDR_EXPR expression). */ | |
1353 if (flags & opf_no_vops) | |
1354 return; | |
1355 | |
1356 if (MTAG_P (var)) | |
1357 aliases = MTAG_ALIASES (var); | |
1358 | |
1359 if (aliases == NULL) | |
1360 { | |
1361 if (!gimple_aliases_computed_p (cfun) && (flags & opf_def)) | |
1362 gimple_set_has_volatile_ops (stmt, true); | |
1363 | |
1364 /* The variable is not aliased or it is an alias tag. */ | |
1365 if (flags & opf_def) | |
1366 append_vdef (var); | |
1367 else | |
1368 append_vuse (var); | |
1369 } | |
1370 else | |
1371 { | |
1372 bitmap_iterator bi; | |
1373 unsigned int i; | |
1374 bool none_added = true; | |
1375 | |
1376 /* The variable is aliased. Add its aliases to the virtual | |
1377 operands. */ | |
1378 gcc_assert (!bitmap_empty_p (aliases)); | |
1379 | |
1380 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi) | |
1381 { | |
1382 tree al = referenced_var (i); | |
1383 | |
1384 /* Call-clobbered tags may have non-call-clobbered | |
1385 symbols in their alias sets. Ignore them if we are | |
1386 adding VOPs for a call site. */ | |
1387 if (is_call_site && !is_call_clobbered (al)) | |
1388 continue; | |
1389 | |
1390 /* If we do not know the full reference tree or if the access is | |
1391 unspecified [0, -1], we cannot prune it. Otherwise try doing | |
1392 so using access_can_touch_variable. */ | |
1393 if (full_ref | |
1394 && !access_can_touch_variable (full_ref, al, offset, size)) | |
1395 continue; | |
1396 | |
1397 if (flags & opf_def) | |
1398 append_vdef (al); | |
1399 else | |
1400 append_vuse (al); | |
1401 none_added = false; | |
1402 } | |
1403 | |
1404 if (flags & opf_def) | |
1405 { | |
1406 /* If the variable is also an alias tag, add a virtual | |
1407 operand for it, otherwise we will miss representing | |
1408 references to the members of the variable's alias set. | |
1409 This fixes the bug in gcc.c-torture/execute/20020503-1.c. | |
1410 | |
1411 It is also necessary to add bare defs on clobbers for | |
1412 SMT's, so that bare SMT uses caused by pruning all the | |
1413 aliases will link up properly with calls. In order to | |
1414 keep the number of these bare defs we add down to the | |
1415 minimum necessary, we keep track of which SMT's were used | |
1416 alone in statement vdefs or VUSEs. */ | |
1417 if (none_added | |
1418 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG | |
1419 && is_call_site)) | |
1420 append_vdef (var); | |
1421 } | |
1422 else | |
1423 { | |
1424 /* Even if no aliases have been added, we still need to | |
1425 establish def-use and use-def chains, lest | |
1426 transformations think that this is not a memory | |
1427 reference. For an example of this scenario, see | |
1428 testsuite/g++.dg/opt/cleanup1.C. */ | |
1429 if (none_added) | |
1430 append_vuse (var); | |
1431 } | |
1432 } | |
1433 } | |
1434 | |
1435 | |
1436 /* Add *VAR_P to the appropriate operand array for statement STMT. | |
1437 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, | |
1438 it will be added to the statement's real operands, otherwise it is | |
1439 added to virtual operands. */ | |
1440 | |
1441 static void | |
1442 add_stmt_operand (tree *var_p, gimple stmt, int flags) | |
1443 { | |
1444 tree var, sym; | |
1445 var_ann_t v_ann; | |
1446 | |
1447 gcc_assert (SSA_VAR_P (*var_p)); | |
1448 | |
1449 var = *var_p; | |
1450 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); | |
1451 v_ann = var_ann (sym); | |
1452 | |
1453 /* Mark statements with volatile operands. */ | |
1454 if (TREE_THIS_VOLATILE (sym)) | |
1455 gimple_set_has_volatile_ops (stmt, true); | |
1456 | |
1457 if (is_gimple_reg (sym)) | |
1458 { | |
1459 /* The variable is a GIMPLE register. Add it to real operands. */ | |
1460 if (flags & opf_def) | |
1461 append_def (var_p); | |
1462 else | |
1463 append_use (var_p); | |
1464 } | |
1465 else | |
1466 add_virtual_operand (var, stmt, flags, NULL_TREE, 0, -1, false); | |
1467 } | |
1468 | |
1469 /* Subroutine of get_indirect_ref_operands. ADDR is the address | |
1470 that is dereferenced, the meaning of the rest of the arguments | |
1471 is the same as in get_indirect_ref_operands. */ | |
1472 | |
1473 static void | |
1474 get_addr_dereference_operands (gimple stmt, tree *addr, int flags, | |
1475 tree full_ref, HOST_WIDE_INT offset, | |
1476 HOST_WIDE_INT size, bool recurse_on_base) | |
1477 { | |
1478 tree ptr = *addr; | |
1479 | |
1480 /* Mark the statement as having memory operands. */ | |
1481 gimple_set_references_memory (stmt, true); | |
1482 | |
1483 if (SSA_VAR_P (ptr)) | |
1484 { | |
1485 struct ptr_info_def *pi = NULL; | |
1486 | |
1487 /* If PTR has flow-sensitive points-to information, use it. */ | |
1488 if (TREE_CODE (ptr) == SSA_NAME | |
1489 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL | |
1490 && pi->name_mem_tag) | |
1491 { | |
1492 /* PTR has its own memory tag. Use it. */ | |
1493 add_virtual_operand (pi->name_mem_tag, stmt, flags, | |
1494 full_ref, offset, size, false); | |
1495 } | |
1496 else | |
1497 { | |
1498 /* If PTR is not an SSA_NAME or it doesn't have a name | |
1499 tag, use its symbol memory tag. */ | |
1500 var_ann_t v_ann; | |
1501 | |
1502 /* If we are emitting debugging dumps, display a warning if | |
1503 PTR is an SSA_NAME with no flow-sensitive alias | |
1504 information. That means that we may need to compute | |
1505 aliasing again or that a propagation pass forgot to | |
1506 update the alias information on the pointers. */ | |
1507 if (dump_file | |
1508 && TREE_CODE (ptr) == SSA_NAME | |
1509 && (pi == NULL | |
1510 || (pi->name_mem_tag == NULL_TREE | |
1511 && !pi->pt_anything)) | |
1512 && gimple_aliases_computed_p (cfun)) | |
1513 { | |
1514 fprintf (dump_file, | |
1515 "NOTE: no flow-sensitive alias info for "); | |
1516 print_generic_expr (dump_file, ptr, dump_flags); | |
1517 fprintf (dump_file, " in "); | |
1518 print_gimple_stmt (dump_file, stmt, 0, 0); | |
1519 } | |
1520 | |
1521 if (TREE_CODE (ptr) == SSA_NAME) | |
1522 ptr = SSA_NAME_VAR (ptr); | |
1523 v_ann = var_ann (ptr); | |
1524 | |
1525 /* If we don't know what this pointer points to then we have | |
1526 to make sure to not prune virtual operands based on offset | |
1527 and size. */ | |
1528 if (v_ann->symbol_mem_tag) | |
1529 { | |
1530 add_virtual_operand (v_ann->symbol_mem_tag, stmt, flags, | |
1531 full_ref, 0, -1, false); | |
1532 /* Make sure we add the SMT itself. */ | |
1533 if (!(flags & opf_no_vops)) | |
1534 { | |
1535 if (flags & opf_def) | |
1536 append_vdef (v_ann->symbol_mem_tag); | |
1537 else | |
1538 append_vuse (v_ann->symbol_mem_tag); | |
1539 } | |
1540 } | |
1541 | |
1542 /* Aliasing information is missing; mark statement as | |
1543 volatile so we won't optimize it out too actively. */ | |
1544 else if (!gimple_aliases_computed_p (cfun) | |
1545 && (flags & opf_def)) | |
1546 gimple_set_has_volatile_ops (stmt, true); | |
1547 } | |
1548 } | |
1549 else if (TREE_CODE (ptr) == INTEGER_CST) | |
1550 { | |
1551 /* If a constant is used as a pointer, we can't generate a real | |
1552 operand for it but we mark the statement volatile to prevent | |
1553 optimizations from messing things up. */ | |
1554 gimple_set_has_volatile_ops (stmt, true); | |
1555 return; | |
1556 } | |
1557 else | |
1558 { | |
1559 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */ | |
1560 gcc_unreachable (); | |
1561 } | |
1562 | |
1563 /* If requested, add a USE operand for the base pointer. */ | |
1564 if (recurse_on_base) | |
1565 get_expr_operands (stmt, addr, opf_use); | |
1566 } | |
1567 | |
1568 | |
1569 /* A subroutine of get_expr_operands to handle INDIRECT_REF, | |
1570 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. | |
1571 | |
1572 STMT is the statement being processed, EXPR is the INDIRECT_REF | |
1573 that got us here. | |
1574 | |
1575 FLAGS is as in get_expr_operands. | |
1576 | |
1577 FULL_REF contains the full pointer dereference expression, if we | |
1578 have it, or NULL otherwise. | |
1579 | |
1580 OFFSET and SIZE are the location of the access inside the | |
1581 dereferenced pointer, if known. | |
1582 | |
1583 RECURSE_ON_BASE should be set to true if we want to continue | |
1584 calling get_expr_operands on the base pointer, and false if | |
1585 something else will do it for us. */ | |
1586 | |
1587 static void | |
1588 get_indirect_ref_operands (gimple stmt, tree expr, int flags, tree full_ref, | |
1589 HOST_WIDE_INT offset, HOST_WIDE_INT size, | |
1590 bool recurse_on_base) | |
1591 { | |
1592 tree *pptr = &TREE_OPERAND (expr, 0); | |
1593 | |
1594 if (TREE_THIS_VOLATILE (expr)) | |
1595 gimple_set_has_volatile_ops (stmt, true); | |
1596 | |
1597 get_addr_dereference_operands (stmt, pptr, flags, full_ref, offset, size, | |
1598 recurse_on_base); | |
1599 } | |
1600 | |
1601 | |
1602 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ | |
1603 | |
1604 static void | |
1605 get_tmr_operands (gimple stmt, tree expr, int flags) | |
1606 { | |
1607 tree tag; | |
1608 | |
1609 /* Mark the statement as having memory operands. */ | |
1610 gimple_set_references_memory (stmt, true); | |
1611 | |
1612 /* First record the real operands. */ | |
1613 get_expr_operands (stmt, &TMR_BASE (expr), opf_use); | |
1614 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use); | |
1615 | |
1616 if (TMR_SYMBOL (expr)) | |
1617 gimple_add_to_addresses_taken (stmt, TMR_SYMBOL (expr)); | |
1618 | |
1619 tag = TMR_TAG (expr); | |
1620 if (!tag) | |
1621 { | |
1622 /* Something weird, so ensure that we will be careful. */ | |
1623 gimple_set_has_volatile_ops (stmt, true); | |
1624 return; | |
1625 } | |
1626 if (!MTAG_P (tag)) | |
1627 { | |
1628 get_expr_operands (stmt, &tag, flags); | |
1629 return; | |
1630 } | |
1631 | |
1632 add_virtual_operand (tag, stmt, flags, expr, 0, -1, false); | |
1633 } | |
1634 | |
1635 | |
1636 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call | |
1637 clobbered variables in the function. */ | |
1638 | |
1639 static void | |
1640 add_call_clobber_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED) | |
1641 { | |
1642 unsigned u; | |
1643 bitmap_iterator bi; | |
1644 bitmap not_read_b, not_written_b; | |
1645 | |
1646 gcc_assert (!(gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))); | |
1647 | |
1648 /* If we created .GLOBAL_VAR earlier, just use it. */ | |
1649 if (gimple_global_var (cfun)) | |
1650 { | |
1651 tree var = gimple_global_var (cfun); | |
1652 add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true); | |
1653 return; | |
1654 } | |
1655 | |
1656 /* Get info for local and module level statics. There is a bit | |
1657 set for each static if the call being processed does not read | |
1658 or write that variable. */ | |
1659 not_read_b = callee ? ipa_reference_get_not_read_global (cgraph_node (callee)) : NULL; | |
1660 not_written_b = callee ? ipa_reference_get_not_written_global (cgraph_node (callee)) : NULL; | |
1661 | |
1662 /* Add a VDEF operand for every call clobbered variable. */ | |
1663 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi) | |
1664 { | |
1665 tree var = referenced_var_lookup (u); | |
1666 tree real_var = var; | |
1667 bool not_read; | |
1668 bool not_written; | |
1669 | |
1670 not_read = not_read_b | |
1671 ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) | |
1672 : false; | |
1673 | |
1674 not_written = not_written_b | |
1675 ? bitmap_bit_p (not_written_b, DECL_UID (real_var)) | |
1676 : false; | |
1677 gcc_assert (!unmodifiable_var_p (var)); | |
1678 | |
1679 clobber_stats.clobbered_vars++; | |
1680 | |
1681 /* See if this variable is really clobbered by this function. */ | |
1682 | |
1683 if (not_written) | |
1684 { | |
1685 clobber_stats.static_write_clobbers_avoided++; | |
1686 if (!not_read) | |
1687 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); | |
1688 else | |
1689 clobber_stats.static_read_clobbers_avoided++; | |
1690 } | |
1691 else | |
1692 add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true); | |
1693 } | |
1694 } | |
1695 | |
1696 | |
1697 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the | |
1698 function. */ | |
1699 | |
1700 static void | |
1701 add_call_read_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED) | |
1702 { | |
1703 unsigned u; | |
1704 bitmap_iterator bi; | |
1705 bitmap not_read_b; | |
1706 | |
1707 /* Const functions do not reference memory. */ | |
1708 if (gimple_call_flags (stmt) & ECF_CONST) | |
1709 return; | |
1710 | |
1711 not_read_b = callee ? ipa_reference_get_not_read_global (cgraph_node (callee)) : NULL; | |
1712 | |
1713 /* For pure functions we compute non-escaped uses separately. */ | |
1714 if (gimple_call_flags (stmt) & ECF_PURE) | |
1715 EXECUTE_IF_SET_IN_BITMAP (gimple_call_used_vars (cfun), 0, u, bi) | |
1716 { | |
1717 tree var = referenced_var_lookup (u); | |
1718 tree real_var = var; | |
1719 bool not_read; | |
1720 | |
1721 if (unmodifiable_var_p (var)) | |
1722 continue; | |
1723 | |
1724 not_read = not_read_b | |
1725 ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) | |
1726 : false; | |
1727 | |
1728 clobber_stats.readonly_clobbers++; | |
1729 | |
1730 /* See if this variable is really used by this function. */ | |
1731 if (!not_read) | |
1732 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); | |
1733 else | |
1734 clobber_stats.static_readonly_clobbers_avoided++; | |
1735 } | |
1736 | |
1737 /* Add a VUSE for .GLOBAL_VAR if it has been created. See | |
1738 add_referenced_var for the heuristic used to decide whether to | |
1739 create .GLOBAL_VAR. */ | |
1740 if (gimple_global_var (cfun)) | |
1741 { | |
1742 tree var = gimple_global_var (cfun); | |
1743 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); | |
1744 return; | |
1745 } | |
1746 | |
1747 /* Add a VUSE for each call-clobbered variable. */ | |
1748 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi) | |
1749 { | |
1750 tree var = referenced_var (u); | |
1751 tree real_var = var; | |
1752 bool not_read; | |
1753 | |
1754 clobber_stats.readonly_clobbers++; | |
1755 | |
1756 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) | |
1757 : false; | |
1758 | |
1759 if (not_read) | |
1760 { | |
1761 clobber_stats.static_readonly_clobbers_avoided++; | |
1762 continue; | |
1763 } | |
1764 | |
1765 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); | |
1766 } | |
1767 } | |
1768 | |
1769 | |
1770 /* If STMT is a call that may clobber globals and other symbols that | |
1771 escape, add them to the VDEF/VUSE lists for it. */ | |
1772 | |
1773 static void | |
1774 maybe_add_call_clobbered_vops (gimple stmt) | |
1775 { | |
1776 int call_flags = gimple_call_flags (stmt); | |
1777 | |
1778 /* Mark the statement as having memory operands. */ | |
1779 gimple_set_references_memory (stmt, true); | |
1780 | |
1781 /* If aliases have been computed already, add VDEF or VUSE | |
1782 operands for all the symbols that have been found to be | |
1783 call-clobbered. */ | |
1784 if (gimple_aliases_computed_p (cfun) && !(call_flags & ECF_NOVOPS)) | |
1785 { | |
1786 /* A 'pure' or a 'const' function never call-clobbers anything. | |
1787 A 'noreturn' function might, but since we don't return anyway | |
1788 there is no point in recording that. */ | |
1789 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) | |
1790 add_call_clobber_ops (stmt, gimple_call_fndecl (stmt)); | |
1791 else if (!(call_flags & ECF_CONST)) | |
1792 add_call_read_ops (stmt, gimple_call_fndecl (stmt)); | |
1793 } | |
1794 } | |
1795 | |
1796 | |
1797 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ | |
1798 | |
1799 static void | |
1800 get_asm_expr_operands (gimple stmt) | |
1801 { | |
1802 size_t i, noutputs; | |
1803 const char **oconstraints; | |
1804 const char *constraint; | |
1805 bool allows_mem, allows_reg, is_inout; | |
1806 | |
1807 noutputs = gimple_asm_noutputs (stmt); | |
1808 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
1809 | |
1810 /* Gather all output operands. */ | |
1811 for (i = 0; i < gimple_asm_noutputs (stmt); i++) | |
1812 { | |
1813 tree link = gimple_asm_output_op (stmt, i); | |
1814 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
1815 oconstraints[i] = constraint; | |
1816 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
1817 &allows_reg, &is_inout); | |
1818 | |
1819 /* This should have been split in gimplify_asm_expr. */ | |
1820 gcc_assert (!allows_reg || !is_inout); | |
1821 | |
1822 /* Memory operands are addressable. Note that STMT needs the | |
1823 address of this operand. */ | |
1824 if (!allows_reg && allows_mem) | |
1825 { | |
1826 tree t = get_base_address (TREE_VALUE (link)); | |
1827 if (t && DECL_P (t)) | |
1828 gimple_add_to_addresses_taken (stmt, t); | |
1829 } | |
1830 | |
1831 get_expr_operands (stmt, &TREE_VALUE (link), opf_def); | |
1832 } | |
1833 | |
1834 /* Gather all input operands. */ | |
1835 for (i = 0; i < gimple_asm_ninputs (stmt); i++) | |
1836 { | |
1837 tree link = gimple_asm_input_op (stmt, i); | |
1838 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
1839 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, | |
1840 &allows_mem, &allows_reg); | |
1841 | |
1842 /* Memory operands are addressable. Note that STMT needs the | |
1843 address of this operand. */ | |
1844 if (!allows_reg && allows_mem) | |
1845 { | |
1846 tree t = get_base_address (TREE_VALUE (link)); | |
1847 if (t && DECL_P (t)) | |
1848 gimple_add_to_addresses_taken (stmt, t); | |
1849 } | |
1850 | |
1851 get_expr_operands (stmt, &TREE_VALUE (link), 0); | |
1852 } | |
1853 | |
1854 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ | |
1855 for (i = 0; i < gimple_asm_nclobbers (stmt); i++) | |
1856 { | |
1857 tree link = gimple_asm_clobber_op (stmt, i); | |
1858 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) | |
1859 { | |
1860 unsigned i; | |
1861 bitmap_iterator bi; | |
1862 | |
1863 /* Mark the statement as having memory operands. */ | |
1864 gimple_set_references_memory (stmt, true); | |
1865 | |
1866 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi) | |
1867 { | |
1868 tree var = referenced_var (i); | |
1869 add_stmt_operand (&var, stmt, opf_def | opf_implicit); | |
1870 } | |
1871 | |
1872 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi) | |
1873 { | |
1874 tree var = referenced_var (i); | |
1875 add_stmt_operand (&var, stmt, opf_def | opf_implicit); | |
1876 } | |
1877 break; | |
1878 } | |
1879 } | |
1880 } | |
1881 | |
1882 | |
1883 /* Recursively scan the expression pointed to by EXPR_P in statement | |
1884 STMT. FLAGS is one of the OPF_* constants modifying how to | |
1885 interpret the operands found. */ | |
1886 | |
1887 static void | |
1888 get_expr_operands (gimple stmt, tree *expr_p, int flags) | |
1889 { | |
1890 enum tree_code code; | |
1891 enum tree_code_class codeclass; | |
1892 tree expr = *expr_p; | |
1893 | |
1894 if (expr == NULL) | |
1895 return; | |
1896 | |
1897 code = TREE_CODE (expr); | |
1898 codeclass = TREE_CODE_CLASS (code); | |
1899 | |
1900 switch (code) | |
1901 { | |
1902 case ADDR_EXPR: | |
1903 /* Taking the address of a variable does not represent a | |
1904 reference to it, but the fact that the statement takes its | |
1905 address will be of interest to some passes (e.g. alias | |
1906 resolution). */ | |
1907 gimple_add_to_addresses_taken (stmt, TREE_OPERAND (expr, 0)); | |
1908 | |
1909 /* If the address is invariant, there may be no interesting | |
1910 variable references inside. */ | |
1911 if (is_gimple_min_invariant (expr)) | |
1912 return; | |
1913 | |
1914 /* Otherwise, there may be variables referenced inside but there | |
1915 should be no VUSEs created, since the referenced objects are | |
1916 not really accessed. The only operands that we should find | |
1917 here are ARRAY_REF indices which will always be real operands | |
1918 (GIMPLE does not allow non-registers as array indices). */ | |
1919 flags |= opf_no_vops; | |
1920 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
1921 return; | |
1922 | |
1923 case SSA_NAME: | |
1924 case SYMBOL_MEMORY_TAG: | |
1925 case NAME_MEMORY_TAG: | |
1926 add_stmt_operand (expr_p, stmt, flags); | |
1927 return; | |
1928 | |
1929 case VAR_DECL: | |
1930 case PARM_DECL: | |
1931 case RESULT_DECL: | |
1932 add_stmt_operand (expr_p, stmt, flags); | |
1933 return; | |
1934 | |
1935 case MISALIGNED_INDIRECT_REF: | |
1936 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
1937 /* fall through */ | |
1938 | |
1939 case ALIGN_INDIRECT_REF: | |
1940 case INDIRECT_REF: | |
1941 get_indirect_ref_operands (stmt, expr, flags, expr, 0, -1, true); | |
1942 return; | |
1943 | |
1944 case TARGET_MEM_REF: | |
1945 get_tmr_operands (stmt, expr, flags); | |
1946 return; | |
1947 | |
1948 case ARRAY_REF: | |
1949 case ARRAY_RANGE_REF: | |
1950 case COMPONENT_REF: | |
1951 case REALPART_EXPR: | |
1952 case IMAGPART_EXPR: | |
1953 { | |
1954 tree ref; | |
1955 HOST_WIDE_INT offset, size, maxsize; | |
1956 | |
1957 if (TREE_THIS_VOLATILE (expr)) | |
1958 gimple_set_has_volatile_ops (stmt, true); | |
1959 | |
1960 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize); | |
1961 if (TREE_CODE (ref) == INDIRECT_REF) | |
1962 { | |
1963 get_indirect_ref_operands (stmt, ref, flags, expr, offset, | |
1964 maxsize, false); | |
1965 flags |= opf_no_vops; | |
1966 } | |
1967 | |
1968 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
1969 | |
1970 if (code == COMPONENT_REF) | |
1971 { | |
1972 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) | |
1973 gimple_set_has_volatile_ops (stmt, true); | |
1974 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use); | |
1975 } | |
1976 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) | |
1977 { | |
1978 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use); | |
1979 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use); | |
1980 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use); | |
1981 } | |
1982 | |
1983 return; | |
1984 } | |
1985 | |
1986 case WITH_SIZE_EXPR: | |
1987 /* WITH_SIZE_EXPR is a pass-through reference to its first argument, | |
1988 and an rvalue reference to its second argument. */ | |
1989 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use); | |
1990 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
1991 return; | |
1992 | |
1993 case COND_EXPR: | |
1994 case VEC_COND_EXPR: | |
1995 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use); | |
1996 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use); | |
1997 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use); | |
1998 return; | |
1999 | |
2000 case CONSTRUCTOR: | |
2001 { | |
2002 /* General aggregate CONSTRUCTORs have been decomposed, but they | |
2003 are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
2004 constructor_elt *ce; | |
2005 unsigned HOST_WIDE_INT idx; | |
2006 | |
2007 for (idx = 0; | |
2008 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); | |
2009 idx++) | |
2010 get_expr_operands (stmt, &ce->value, opf_use); | |
2011 | |
2012 return; | |
2013 } | |
2014 | |
2015 case BIT_FIELD_REF: | |
2016 if (TREE_THIS_VOLATILE (expr)) | |
2017 gimple_set_has_volatile_ops (stmt, true); | |
2018 /* FALLTHRU */ | |
2019 | |
2020 case TRUTH_NOT_EXPR: | |
2021 case VIEW_CONVERT_EXPR: | |
2022 do_unary: | |
2023 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
2024 return; | |
2025 | |
2026 case TRUTH_AND_EXPR: | |
2027 case TRUTH_OR_EXPR: | |
2028 case TRUTH_XOR_EXPR: | |
2029 case COMPOUND_EXPR: | |
2030 case OBJ_TYPE_REF: | |
2031 case ASSERT_EXPR: | |
2032 do_binary: | |
2033 { | |
2034 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
2035 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
2036 return; | |
2037 } | |
2038 | |
2039 case DOT_PROD_EXPR: | |
2040 case REALIGN_LOAD_EXPR: | |
2041 { | |
2042 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
2043 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
2044 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); | |
2045 return; | |
2046 } | |
2047 | |
2048 case CHANGE_DYNAMIC_TYPE_EXPR: | |
2049 gcc_unreachable (); | |
2050 | |
2051 case FUNCTION_DECL: | |
2052 case LABEL_DECL: | |
2053 case CONST_DECL: | |
2054 case CASE_LABEL_EXPR: | |
2055 case FILTER_EXPR: | |
2056 case EXC_PTR_EXPR: | |
2057 /* Expressions that make no memory references. */ | |
2058 return; | |
2059 | |
2060 default: | |
2061 if (codeclass == tcc_unary) | |
2062 goto do_unary; | |
2063 if (codeclass == tcc_binary || codeclass == tcc_comparison) | |
2064 goto do_binary; | |
2065 if (codeclass == tcc_constant || codeclass == tcc_type) | |
2066 return; | |
2067 } | |
2068 | |
2069 /* If we get here, something has gone wrong. */ | |
2070 #ifdef ENABLE_CHECKING | |
2071 fprintf (stderr, "unhandled expression in get_expr_operands():\n"); | |
2072 debug_tree (expr); | |
2073 fputs ("\n", stderr); | |
2074 #endif | |
2075 gcc_unreachable (); | |
2076 } | |
2077 | |
2078 | |
2079 /* Parse STMT looking for operands. When finished, the various | |
2080 build_* operand vectors will have potential operands in them. */ | |
2081 | |
2082 static void | |
2083 parse_ssa_operands (gimple stmt) | |
2084 { | |
2085 enum gimple_code code = gimple_code (stmt); | |
2086 | |
2087 if (code == GIMPLE_ASM) | |
2088 get_asm_expr_operands (stmt); | |
2089 else | |
2090 { | |
2091 size_t i, start = 0; | |
2092 | |
2093 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) | |
2094 { | |
2095 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); | |
2096 start = 1; | |
2097 } | |
2098 | |
2099 for (i = start; i < gimple_num_ops (stmt); i++) | |
2100 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); | |
2101 | |
2102 /* Add call-clobbered operands, if needed. */ | |
2103 if (code == GIMPLE_CALL) | |
2104 maybe_add_call_clobbered_vops (stmt); | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2105 |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2106 /* Make sure the return value is addressable in case of NRV. */ |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2107 if (code == GIMPLE_CALL |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2108 && gimple_call_lhs (stmt) != NULL_TREE |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2109 && gimple_call_return_slot_opt_p (stmt) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2110 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt)))) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
2111 gimple_add_to_addresses_taken (stmt, gimple_call_lhs (stmt)); |
0 | 2112 } |
2113 } | |
2114 | |
2115 | |
2116 /* Create an operands cache for STMT. */ | |
2117 | |
2118 static void | |
2119 build_ssa_operands (gimple stmt) | |
2120 { | |
2121 /* Initially assume that the statement has no volatile operands and | |
2122 makes no memory references. */ | |
2123 gimple_set_has_volatile_ops (stmt, false); | |
2124 gimple_set_references_memory (stmt, false); | |
2125 | |
2126 /* Just clear the bitmap so we don't end up reallocating it over and over. */ | |
2127 if (gimple_addresses_taken (stmt)) | |
2128 bitmap_clear (gimple_addresses_taken (stmt)); | |
2129 | |
2130 start_ssa_stmt_operands (); | |
2131 parse_ssa_operands (stmt); | |
2132 operand_build_sort_virtual (build_vuses); | |
2133 operand_build_sort_virtual (build_vdefs); | |
2134 finalize_ssa_stmt_operands (stmt); | |
2135 | |
2136 /* For added safety, assume that statements with volatile operands | |
2137 also reference memory. */ | |
2138 if (gimple_has_volatile_ops (stmt)) | |
2139 gimple_set_references_memory (stmt, true); | |
2140 } | |
2141 | |
2142 | |
2143 /* Releases the operands of STMT back to their freelists, and clears | |
2144 the stmt operand lists. */ | |
2145 | |
2146 void | |
2147 free_stmt_operands (gimple stmt) | |
2148 { | |
2149 def_optype_p defs = gimple_def_ops (stmt), last_def; | |
2150 use_optype_p uses = gimple_use_ops (stmt), last_use; | |
2151 voptype_p vuses = gimple_vuse_ops (stmt); | |
2152 voptype_p vdefs = gimple_vdef_ops (stmt), vdef, next_vdef; | |
2153 unsigned i; | |
2154 | |
2155 if (defs) | |
2156 { | |
2157 for (last_def = defs; last_def->next; last_def = last_def->next) | |
2158 continue; | |
2159 last_def->next = gimple_ssa_operands (cfun)->free_defs; | |
2160 gimple_ssa_operands (cfun)->free_defs = defs; | |
2161 gimple_set_def_ops (stmt, NULL); | |
2162 } | |
2163 | |
2164 if (uses) | |
2165 { | |
2166 for (last_use = uses; last_use->next; last_use = last_use->next) | |
2167 delink_imm_use (USE_OP_PTR (last_use)); | |
2168 delink_imm_use (USE_OP_PTR (last_use)); | |
2169 last_use->next = gimple_ssa_operands (cfun)->free_uses; | |
2170 gimple_ssa_operands (cfun)->free_uses = uses; | |
2171 gimple_set_use_ops (stmt, NULL); | |
2172 } | |
2173 | |
2174 if (vuses) | |
2175 { | |
2176 for (i = 0; i < VUSE_NUM (vuses); i++) | |
2177 delink_imm_use (VUSE_OP_PTR (vuses, i)); | |
2178 add_vop_to_freelist (vuses); | |
2179 gimple_set_vuse_ops (stmt, NULL); | |
2180 } | |
2181 | |
2182 if (vdefs) | |
2183 { | |
2184 for (vdef = vdefs; vdef; vdef = next_vdef) | |
2185 { | |
2186 next_vdef = vdef->next; | |
2187 delink_imm_use (VDEF_OP_PTR (vdef, 0)); | |
2188 add_vop_to_freelist (vdef); | |
2189 } | |
2190 gimple_set_vdef_ops (stmt, NULL); | |
2191 } | |
2192 | |
2193 if (gimple_has_ops (stmt)) | |
2194 gimple_set_addresses_taken (stmt, NULL); | |
2195 | |
2196 if (gimple_has_mem_ops (stmt)) | |
2197 { | |
2198 gimple_set_stored_syms (stmt, NULL, &operands_bitmap_obstack); | |
2199 gimple_set_loaded_syms (stmt, NULL, &operands_bitmap_obstack); | |
2200 } | |
2201 } | |
2202 | |
2203 | |
2204 /* Get the operands of statement STMT. */ | |
2205 | |
2206 void | |
2207 update_stmt_operands (gimple stmt) | |
2208 { | |
2209 /* If update_stmt_operands is called before SSA is initialized, do | |
2210 nothing. */ | |
2211 if (!ssa_operands_active ()) | |
2212 return; | |
2213 | |
2214 timevar_push (TV_TREE_OPS); | |
2215 | |
2216 gcc_assert (gimple_modified_p (stmt)); | |
2217 build_ssa_operands (stmt); | |
2218 gimple_set_modified (stmt, false); | |
2219 | |
2220 timevar_pop (TV_TREE_OPS); | |
2221 } | |
2222 | |
2223 | |
2224 /* Copies virtual operands from SRC to DST. */ | |
2225 | |
2226 void | |
2227 copy_virtual_operands (gimple dest, gimple src) | |
2228 { | |
2229 unsigned int i, n; | |
2230 voptype_p src_vuses, dest_vuses; | |
2231 voptype_p src_vdefs, dest_vdefs; | |
2232 struct voptype_d vuse; | |
2233 struct voptype_d vdef; | |
2234 | |
2235 if (!gimple_has_mem_ops (src)) | |
2236 return; | |
2237 | |
2238 gimple_set_vdef_ops (dest, NULL); | |
2239 gimple_set_vuse_ops (dest, NULL); | |
2240 | |
2241 gimple_set_stored_syms (dest, gimple_stored_syms (src), | |
2242 &operands_bitmap_obstack); | |
2243 gimple_set_loaded_syms (dest, gimple_loaded_syms (src), | |
2244 &operands_bitmap_obstack); | |
2245 | |
2246 /* Copy all the VUSE operators and corresponding operands. */ | |
2247 dest_vuses = &vuse; | |
2248 for (src_vuses = gimple_vuse_ops (src); | |
2249 src_vuses; | |
2250 src_vuses = src_vuses->next) | |
2251 { | |
2252 n = VUSE_NUM (src_vuses); | |
2253 dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses); | |
2254 for (i = 0; i < n; i++) | |
2255 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i)); | |
2256 | |
2257 if (gimple_vuse_ops (dest) == NULL) | |
2258 gimple_set_vuse_ops (dest, vuse.next); | |
2259 } | |
2260 | |
2261 /* Copy all the VDEF operators and corresponding operands. */ | |
2262 dest_vdefs = &vdef; | |
2263 for (src_vdefs = gimple_vdef_ops (src); | |
2264 src_vdefs; | |
2265 src_vdefs = src_vdefs->next) | |
2266 { | |
2267 n = VUSE_NUM (src_vdefs); | |
2268 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs); | |
2269 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs); | |
2270 for (i = 0; i < n; i++) | |
2271 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i)); | |
2272 | |
2273 if (gimple_vdef_ops (dest) == NULL) | |
2274 gimple_set_vdef_ops (dest, vdef.next); | |
2275 } | |
2276 } | |
2277 | |
2278 | |
2279 /* Specifically for use in DOM's expression analysis. Given a store, we | |
2280 create an artificial stmt which looks like a load from the store, this can | |
2281 be used to eliminate redundant loads. OLD_OPS are the operands from the | |
2282 store stmt, and NEW_STMT is the new load which represents a load of the | |
2283 values stored. If DELINK_IMM_USES_P is specified, the immediate | |
2284 uses of this stmt will be de-linked. */ | |
2285 | |
2286 void | |
2287 create_ssa_artificial_load_stmt (gimple new_stmt, gimple old_stmt, | |
2288 bool delink_imm_uses_p) | |
2289 { | |
2290 tree op; | |
2291 ssa_op_iter iter; | |
2292 use_operand_p use_p; | |
2293 unsigned i; | |
2294 | |
2295 gimple_set_modified (new_stmt, false); | |
2296 | |
2297 /* Process NEW_STMT looking for operands. */ | |
2298 start_ssa_stmt_operands (); | |
2299 parse_ssa_operands (new_stmt); | |
2300 | |
2301 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++) | |
2302 if (TREE_CODE (op) != SSA_NAME) | |
2303 var_ann (op)->in_vuse_list = false; | |
2304 | |
2305 for (i = 0; VEC_iterate (tree, build_vdefs, i, op); i++) | |
2306 if (TREE_CODE (op) != SSA_NAME) | |
2307 var_ann (op)->in_vdef_list = false; | |
2308 | |
2309 /* Remove any virtual operands that were found. */ | |
2310 VEC_truncate (tree, build_vdefs, 0); | |
2311 VEC_truncate (tree, build_vuses, 0); | |
2312 | |
2313 /* Clear the loads and stores bitmaps. */ | |
2314 bitmap_clear (build_loads); | |
2315 bitmap_clear (build_stores); | |
2316 | |
2317 /* For each VDEF on the original statement, we want to create a | |
2318 VUSE of the VDEF result operand on the new statement. */ | |
2319 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF) | |
2320 append_vuse (op); | |
2321 | |
2322 finalize_ssa_stmt_operands (new_stmt); | |
2323 | |
2324 /* All uses in this fake stmt must not be in the immediate use lists. */ | |
2325 if (delink_imm_uses_p) | |
2326 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES) | |
2327 delink_imm_use (use_p); | |
2328 } | |
2329 | |
2330 | |
2331 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done | |
2332 to test the validity of the swap operation. */ | |
2333 | |
2334 void | |
2335 swap_tree_operands (gimple stmt, tree *exp0, tree *exp1) | |
2336 { | |
2337 tree op0, op1; | |
2338 op0 = *exp0; | |
2339 op1 = *exp1; | |
2340 | |
2341 /* If the operand cache is active, attempt to preserve the relative | |
2342 positions of these two operands in their respective immediate use | |
2343 lists. */ | |
2344 if (ssa_operands_active () && op0 != op1) | |
2345 { | |
2346 use_optype_p use0, use1, ptr; | |
2347 use0 = use1 = NULL; | |
2348 | |
2349 /* Find the 2 operands in the cache, if they are there. */ | |
2350 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) | |
2351 if (USE_OP_PTR (ptr)->use == exp0) | |
2352 { | |
2353 use0 = ptr; | |
2354 break; | |
2355 } | |
2356 | |
2357 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) | |
2358 if (USE_OP_PTR (ptr)->use == exp1) | |
2359 { | |
2360 use1 = ptr; | |
2361 break; | |
2362 } | |
2363 | |
2364 /* If both uses don't have operand entries, there isn't much we can do | |
2365 at this point. Presumably we don't need to worry about it. */ | |
2366 if (use0 && use1) | |
2367 { | |
2368 tree *tmp = USE_OP_PTR (use1)->use; | |
2369 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use; | |
2370 USE_OP_PTR (use0)->use = tmp; | |
2371 } | |
2372 } | |
2373 | |
2374 /* Now swap the data. */ | |
2375 *exp0 = op1; | |
2376 *exp1 = op0; | |
2377 } | |
2378 | |
2379 /* Add the base address of REF to SET. */ | |
2380 | |
2381 void | |
2382 add_to_addressable_set (tree ref, bitmap *set) | |
2383 { | |
2384 tree var; | |
2385 | |
2386 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF | |
2387 as the only thing we take the address of. If VAR is a structure, | |
2388 taking the address of a field means that the whole structure may | |
2389 be referenced using pointer arithmetic. See PR 21407 and the | |
2390 ensuing mailing list discussion. */ | |
2391 var = get_base_address (ref); | |
2392 if (var && SSA_VAR_P (var)) | |
2393 { | |
2394 if (*set == NULL) | |
2395 *set = BITMAP_ALLOC (&operands_bitmap_obstack); | |
2396 | |
2397 bitmap_set_bit (*set, DECL_UID (var)); | |
2398 TREE_ADDRESSABLE (var) = 1; | |
2399 } | |
2400 } | |
2401 | |
2402 | |
2403 /* Add the base address of REF to the set of addresses taken by STMT. | |
2404 REF may be a single variable whose address has been taken or any | |
2405 other valid GIMPLE memory reference (structure reference, array, | |
2406 etc). If the base address of REF is a decl that has sub-variables, | |
2407 also add all of its sub-variables. */ | |
2408 | |
2409 void | |
2410 gimple_add_to_addresses_taken (gimple stmt, tree ref) | |
2411 { | |
2412 gcc_assert (gimple_has_ops (stmt)); | |
2413 add_to_addressable_set (ref, gimple_addresses_taken_ptr (stmt)); | |
2414 } | |
2415 | |
2416 | |
2417 /* Scan the immediate_use list for VAR making sure its linked properly. | |
2418 Return TRUE if there is a problem and emit an error message to F. */ | |
2419 | |
2420 bool | |
2421 verify_imm_links (FILE *f, tree var) | |
2422 { | |
2423 use_operand_p ptr, prev, list; | |
2424 int count; | |
2425 | |
2426 gcc_assert (TREE_CODE (var) == SSA_NAME); | |
2427 | |
2428 list = &(SSA_NAME_IMM_USE_NODE (var)); | |
2429 gcc_assert (list->use == NULL); | |
2430 | |
2431 if (list->prev == NULL) | |
2432 { | |
2433 gcc_assert (list->next == NULL); | |
2434 return false; | |
2435 } | |
2436 | |
2437 prev = list; | |
2438 count = 0; | |
2439 for (ptr = list->next; ptr != list; ) | |
2440 { | |
2441 if (prev != ptr->prev) | |
2442 goto error; | |
2443 | |
2444 if (ptr->use == NULL) | |
2445 goto error; /* 2 roots, or SAFE guard node. */ | |
2446 else if (*(ptr->use) != var) | |
2447 goto error; | |
2448 | |
2449 prev = ptr; | |
2450 ptr = ptr->next; | |
2451 | |
2452 /* Avoid infinite loops. 50,000,000 uses probably indicates a | |
2453 problem. */ | |
2454 if (count++ > 50000000) | |
2455 goto error; | |
2456 } | |
2457 | |
2458 /* Verify list in the other direction. */ | |
2459 prev = list; | |
2460 for (ptr = list->prev; ptr != list; ) | |
2461 { | |
2462 if (prev != ptr->next) | |
2463 goto error; | |
2464 prev = ptr; | |
2465 ptr = ptr->prev; | |
2466 if (count-- < 0) | |
2467 goto error; | |
2468 } | |
2469 | |
2470 if (count != 0) | |
2471 goto error; | |
2472 | |
2473 return false; | |
2474 | |
2475 error: | |
2476 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) | |
2477 { | |
2478 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); | |
2479 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); | |
2480 } | |
2481 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, | |
2482 (void *)ptr->use); | |
2483 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
2484 fprintf(f, "\n"); | |
2485 return true; | |
2486 } | |
2487 | |
2488 | |
2489 /* Dump all the immediate uses to FILE. */ | |
2490 | |
2491 void | |
2492 dump_immediate_uses_for (FILE *file, tree var) | |
2493 { | |
2494 imm_use_iterator iter; | |
2495 use_operand_p use_p; | |
2496 | |
2497 gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
2498 | |
2499 print_generic_expr (file, var, TDF_SLIM); | |
2500 fprintf (file, " : -->"); | |
2501 if (has_zero_uses (var)) | |
2502 fprintf (file, " no uses.\n"); | |
2503 else | |
2504 if (has_single_use (var)) | |
2505 fprintf (file, " single use.\n"); | |
2506 else | |
2507 fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
2508 | |
2509 FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
2510 { | |
2511 if (use_p->loc.stmt == NULL && use_p->use == NULL) | |
2512 fprintf (file, "***end of stmt iterator marker***\n"); | |
2513 else | |
2514 if (!is_gimple_reg (USE_FROM_PTR (use_p))) | |
2515 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); | |
2516 else | |
2517 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); | |
2518 } | |
2519 fprintf(file, "\n"); | |
2520 } | |
2521 | |
2522 | |
2523 /* Dump all the immediate uses to FILE. */ | |
2524 | |
2525 void | |
2526 dump_immediate_uses (FILE *file) | |
2527 { | |
2528 tree var; | |
2529 unsigned int x; | |
2530 | |
2531 fprintf (file, "Immediate_uses: \n\n"); | |
2532 for (x = 1; x < num_ssa_names; x++) | |
2533 { | |
2534 var = ssa_name(x); | |
2535 if (!var) | |
2536 continue; | |
2537 dump_immediate_uses_for (file, var); | |
2538 } | |
2539 } | |
2540 | |
2541 | |
2542 /* Dump def-use edges on stderr. */ | |
2543 | |
2544 void | |
2545 debug_immediate_uses (void) | |
2546 { | |
2547 dump_immediate_uses (stderr); | |
2548 } | |
2549 | |
2550 | |
2551 /* Dump def-use edges on stderr. */ | |
2552 | |
2553 void | |
2554 debug_immediate_uses_for (tree var) | |
2555 { | |
2556 dump_immediate_uses_for (stderr, var); | |
2557 } | |
2558 | |
2559 | |
2560 /* Create a new change buffer for the statement pointed by STMT_P and | |
2561 push the buffer into SCB_STACK. Each change buffer | |
2562 records state information needed to determine what changed in the | |
2563 statement. Mainly, this keeps track of symbols that may need to be | |
2564 put into SSA form, SSA name replacements and other information | |
2565 needed to keep the SSA form up to date. */ | |
2566 | |
2567 void | |
2568 push_stmt_changes (gimple *stmt_p) | |
2569 { | |
2570 gimple stmt; | |
2571 scb_t buf; | |
2572 | |
2573 stmt = *stmt_p; | |
2574 | |
2575 /* It makes no sense to keep track of PHI nodes. */ | |
2576 if (gimple_code (stmt) == GIMPLE_PHI) | |
2577 return; | |
2578 | |
2579 buf = XNEW (struct scb_d); | |
2580 memset (buf, 0, sizeof *buf); | |
2581 | |
2582 buf->stmt_p = stmt_p; | |
2583 | |
2584 if (gimple_references_memory_p (stmt)) | |
2585 { | |
2586 tree op; | |
2587 ssa_op_iter i; | |
2588 | |
2589 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE) | |
2590 { | |
2591 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op; | |
2592 if (buf->loads == NULL) | |
2593 buf->loads = BITMAP_ALLOC (NULL); | |
2594 bitmap_set_bit (buf->loads, DECL_UID (sym)); | |
2595 } | |
2596 | |
2597 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF) | |
2598 { | |
2599 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op; | |
2600 if (buf->stores == NULL) | |
2601 buf->stores = BITMAP_ALLOC (NULL); | |
2602 bitmap_set_bit (buf->stores, DECL_UID (sym)); | |
2603 } | |
2604 } | |
2605 | |
2606 VEC_safe_push (scb_t, heap, scb_stack, buf); | |
2607 } | |
2608 | |
2609 | |
2610 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2 | |
2611 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */ | |
2612 | |
2613 static void | |
2614 mark_difference_for_renaming (bitmap s1, bitmap s2) | |
2615 { | |
2616 if (s1 == NULL && s2 == NULL) | |
2617 return; | |
2618 | |
2619 if (s1 && s2 == NULL) | |
2620 mark_set_for_renaming (s1); | |
2621 else if (s1 == NULL && s2) | |
2622 mark_set_for_renaming (s2); | |
2623 else if (!bitmap_equal_p (s1, s2)) | |
2624 { | |
2625 bitmap t1 = BITMAP_ALLOC (NULL); | |
2626 bitmap_xor (t1, s1, s2); | |
2627 mark_set_for_renaming (t1); | |
2628 BITMAP_FREE (t1); | |
2629 } | |
2630 } | |
2631 | |
2632 | |
2633 /* Pop the top SCB from SCB_STACK and act on the differences between | |
2634 what was recorded by push_stmt_changes and the current state of | |
2635 the statement. */ | |
2636 | |
2637 void | |
2638 pop_stmt_changes (gimple *stmt_p) | |
2639 { | |
2640 tree op; | |
2641 gimple stmt; | |
2642 ssa_op_iter iter; | |
2643 bitmap loads, stores; | |
2644 scb_t buf; | |
2645 | |
2646 stmt = *stmt_p; | |
2647 | |
2648 /* It makes no sense to keep track of PHI nodes. */ | |
2649 if (gimple_code (stmt) == GIMPLE_PHI) | |
2650 return; | |
2651 | |
2652 buf = VEC_pop (scb_t, scb_stack); | |
2653 gcc_assert (stmt_p == buf->stmt_p); | |
2654 | |
2655 /* Force an operand re-scan on the statement and mark any newly | |
2656 exposed variables. */ | |
2657 update_stmt (stmt); | |
2658 | |
2659 /* Determine whether any memory symbols need to be renamed. If the | |
2660 sets of loads and stores are different after the statement is | |
2661 modified, then the affected symbols need to be renamed. | |
2662 | |
2663 Note that it may be possible for the statement to not reference | |
2664 memory anymore, but we still need to act on the differences in | |
2665 the sets of symbols. */ | |
2666 loads = stores = NULL; | |
2667 if (gimple_references_memory_p (stmt)) | |
2668 { | |
2669 tree op; | |
2670 ssa_op_iter i; | |
2671 | |
2672 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE) | |
2673 { | |
2674 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op; | |
2675 if (loads == NULL) | |
2676 loads = BITMAP_ALLOC (NULL); | |
2677 bitmap_set_bit (loads, DECL_UID (sym)); | |
2678 } | |
2679 | |
2680 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF) | |
2681 { | |
2682 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op; | |
2683 if (stores == NULL) | |
2684 stores = BITMAP_ALLOC (NULL); | |
2685 bitmap_set_bit (stores, DECL_UID (sym)); | |
2686 } | |
2687 } | |
2688 | |
2689 /* If LOADS is different from BUF->LOADS, the affected | |
2690 symbols need to be marked for renaming. */ | |
2691 mark_difference_for_renaming (loads, buf->loads); | |
2692 | |
2693 /* Similarly for STORES and BUF->STORES. */ | |
2694 mark_difference_for_renaming (stores, buf->stores); | |
2695 | |
2696 /* Mark all the naked GIMPLE register operands for renaming. */ | |
2697 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE) | |
2698 if (DECL_P (op)) | |
2699 mark_sym_for_renaming (op); | |
2700 | |
2701 /* FIXME, need to add more finalizers here. Cleanup EH info, | |
2702 recompute invariants for address expressions, add | |
2703 SSA replacement mappings, etc. For instance, given | |
2704 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of | |
2705 the form: | |
2706 | |
2707 # SMT.4_20 = VDEF <SMT.4_16> | |
2708 D.1576_11 = 1.0e+0; | |
2709 | |
2710 So, the VDEF will disappear, but instead of marking SMT.4 for | |
2711 renaming it would be far more efficient to establish a | |
2712 replacement mapping that would replace every reference of | |
2713 SMT.4_20 with SMT.4_16. */ | |
2714 | |
2715 /* Free memory used by the buffer. */ | |
2716 BITMAP_FREE (buf->loads); | |
2717 BITMAP_FREE (buf->stores); | |
2718 BITMAP_FREE (loads); | |
2719 BITMAP_FREE (stores); | |
2720 buf->stmt_p = NULL; | |
2721 free (buf); | |
2722 } | |
2723 | |
2724 | |
2725 /* Discard the topmost change buffer from SCB_STACK. This is useful | |
2726 when the caller realized that it did not actually modified the | |
2727 statement. It avoids the expensive operand re-scan. */ | |
2728 | |
2729 void | |
2730 discard_stmt_changes (gimple *stmt_p) | |
2731 { | |
2732 scb_t buf; | |
2733 gimple stmt; | |
2734 | |
2735 /* It makes no sense to keep track of PHI nodes. */ | |
2736 stmt = *stmt_p; | |
2737 if (gimple_code (stmt) == GIMPLE_PHI) | |
2738 return; | |
2739 | |
2740 buf = VEC_pop (scb_t, scb_stack); | |
2741 gcc_assert (stmt_p == buf->stmt_p); | |
2742 | |
2743 /* Free memory used by the buffer. */ | |
2744 BITMAP_FREE (buf->loads); | |
2745 BITMAP_FREE (buf->stores); | |
2746 buf->stmt_p = NULL; | |
2747 free (buf); | |
2748 } |