Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-ssa-sccvn.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 58ad6c70ea60 |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* SCC value numbering for trees | |
2 Copyright (C) 2006, 2007, 2008, 2009 | |
3 Free Software Foundation, Inc. | |
4 Contributed by Daniel Berlin <dan@dberlin.org> | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify | |
9 it under the terms of the GNU General Public License as published by | |
10 the Free Software Foundation; either version 3, or (at your option) | |
11 any later version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, | |
14 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 GNU General Public License for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 #include "config.h" | |
23 #include "system.h" | |
24 #include "coretypes.h" | |
25 #include "tm.h" | |
26 #include "ggc.h" | |
27 #include "tree.h" | |
28 #include "basic-block.h" | |
29 #include "diagnostic.h" | |
30 #include "tree-inline.h" | |
31 #include "tree-flow.h" | |
32 #include "gimple.h" | |
33 #include "tree-dump.h" | |
34 #include "timevar.h" | |
35 #include "fibheap.h" | |
36 #include "hashtab.h" | |
37 #include "tree-iterator.h" | |
38 #include "real.h" | |
39 #include "alloc-pool.h" | |
40 #include "tree-pass.h" | |
41 #include "flags.h" | |
42 #include "bitmap.h" | |
43 #include "langhooks.h" | |
44 #include "cfgloop.h" | |
45 #include "params.h" | |
46 #include "tree-ssa-propagate.h" | |
47 #include "tree-ssa-sccvn.h" | |
48 | |
49 /* This algorithm is based on the SCC algorithm presented by Keith | |
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering" | |
51 (http://citeseer.ist.psu.edu/41805.html). In | |
52 straight line code, it is equivalent to a regular hash based value | |
53 numbering that is performed in reverse postorder. | |
54 | |
55 For code with cycles, there are two alternatives, both of which | |
56 require keeping the hashtables separate from the actual list of | |
57 value numbers for SSA names. | |
58 | |
59 1. Iterate value numbering in an RPO walk of the blocks, removing | |
60 all the entries from the hashtable after each iteration (but | |
61 keeping the SSA name->value number mapping between iterations). | |
62 Iterate until it does not change. | |
63 | |
64 2. Perform value numbering as part of an SCC walk on the SSA graph, | |
65 iterating only the cycles in the SSA graph until they do not change | |
66 (using a separate, optimistic hashtable for value numbering the SCC | |
67 operands). | |
68 | |
69 The second is not just faster in practice (because most SSA graph | |
70 cycles do not involve all the variables in the graph), it also has | |
71 some nice properties. | |
72 | |
73 One of these nice properties is that when we pop an SCC off the | |
74 stack, we are guaranteed to have processed all the operands coming from | |
75 *outside of that SCC*, so we do not need to do anything special to | |
76 ensure they have value numbers. | |
77 | |
78 Another nice property is that the SCC walk is done as part of a DFS | |
79 of the SSA graph, which makes it easy to perform combining and | |
80 simplifying operations at the same time. | |
81 | |
82 The code below is deliberately written in a way that makes it easy | |
83 to separate the SCC walk from the other work it does. | |
84 | |
85 In order to propagate constants through the code, we track which | |
86 expressions contain constants, and use those while folding. In | |
87 theory, we could also track expressions whose value numbers are | |
88 replaced, in case we end up folding based on expression | |
89 identities. | |
90 | |
91 In order to value number memory, we assign value numbers to vuses. | |
92 This enables us to note that, for example, stores to the same | |
93 address of the same value from the same starting memory states are | |
94 equivalent. | |
95 TODO: | |
96 | |
97 1. We can iterate only the changing portions of the SCC's, but | |
98 I have not seen an SCC big enough for this to be a win. | |
99 2. If you differentiate between phi nodes for loops and phi nodes | |
100 for if-then-else, you can properly consider phi nodes in different | |
101 blocks for equivalence. | |
102 3. We could value number vuses in more cases, particularly, whole | |
103 structure copies. | |
104 */ | |
105 | |
106 /* The set of hashtables and alloc_pool's for their items. */ | |
107 | |
108 typedef struct vn_tables_s | |
109 { | |
110 htab_t nary; | |
111 htab_t phis; | |
112 htab_t references; | |
113 struct obstack nary_obstack; | |
114 alloc_pool phis_pool; | |
115 alloc_pool references_pool; | |
116 } *vn_tables_t; | |
117 | |
118 static htab_t constant_to_value_id; | |
119 static bitmap constant_value_ids; | |
120 | |
121 | |
122 /* Valid hashtables storing information we have proven to be | |
123 correct. */ | |
124 | |
125 static vn_tables_t valid_info; | |
126 | |
127 /* Optimistic hashtables storing information we are making assumptions about | |
128 during iterations. */ | |
129 | |
130 static vn_tables_t optimistic_info; | |
131 | |
132 /* Pointer to the set of hashtables that is currently being used. | |
133 Should always point to either the optimistic_info, or the | |
134 valid_info. */ | |
135 | |
136 static vn_tables_t current_info; | |
137 | |
138 | |
139 /* Reverse post order index for each basic block. */ | |
140 | |
141 static int *rpo_numbers; | |
142 | |
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum) | |
144 | |
145 /* This represents the top of the VN lattice, which is the universal | |
146 value. */ | |
147 | |
148 tree VN_TOP; | |
149 | |
150 /* Unique counter for our value ids. */ | |
151 | |
152 static unsigned int next_value_id; | |
153 | |
154 /* Next DFS number and the stack for strongly connected component | |
155 detection. */ | |
156 | |
157 static unsigned int next_dfs_num; | |
158 static VEC (tree, heap) *sccstack; | |
159 | |
160 static bool may_insert; | |
161 | |
162 | |
163 DEF_VEC_P(vn_ssa_aux_t); | |
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap); | |
165 | |
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects | |
167 are allocated on an obstack for locality reasons, and to free them | |
168 without looping over the VEC. */ | |
169 | |
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table; | |
171 static struct obstack vn_ssa_aux_obstack; | |
172 | |
173 /* Return the value numbering information for a given SSA name. */ | |
174 | |
175 vn_ssa_aux_t | |
176 VN_INFO (tree name) | |
177 { | |
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table, | |
179 SSA_NAME_VERSION (name)); | |
180 gcc_assert (res); | |
181 return res; | |
182 } | |
183 | |
184 /* Set the value numbering info for a given SSA name to a given | |
185 value. */ | |
186 | |
187 static inline void | |
188 VN_INFO_SET (tree name, vn_ssa_aux_t value) | |
189 { | |
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table, | |
191 SSA_NAME_VERSION (name), value); | |
192 } | |
193 | |
194 /* Initialize the value numbering info for a given SSA name. | |
195 This should be called just once for every SSA name. */ | |
196 | |
197 vn_ssa_aux_t | |
198 VN_INFO_GET (tree name) | |
199 { | |
200 vn_ssa_aux_t newinfo; | |
201 | |
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux); | |
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux)); | |
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table)) | |
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, | |
206 SSA_NAME_VERSION (name) + 1); | |
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table, | |
208 SSA_NAME_VERSION (name), newinfo); | |
209 return newinfo; | |
210 } | |
211 | |
212 | |
213 /* Get the representative expression for the SSA_NAME NAME. Returns | |
214 the representative SSA_NAME if there is no expression associated with it. */ | |
215 | |
216 tree | |
217 vn_get_expr_for (tree name) | |
218 { | |
219 vn_ssa_aux_t vn = VN_INFO (name); | |
220 gimple def_stmt; | |
221 tree expr = NULL_TREE; | |
222 | |
223 if (vn->valnum == VN_TOP) | |
224 return name; | |
225 | |
226 /* If the value-number is a constant it is the representative | |
227 expression. */ | |
228 if (TREE_CODE (vn->valnum) != SSA_NAME) | |
229 return vn->valnum; | |
230 | |
231 /* Get to the information of the value of this SSA_NAME. */ | |
232 vn = VN_INFO (vn->valnum); | |
233 | |
234 /* If the value-number is a constant it is the representative | |
235 expression. */ | |
236 if (TREE_CODE (vn->valnum) != SSA_NAME) | |
237 return vn->valnum; | |
238 | |
239 /* Else if we have an expression, return it. */ | |
240 if (vn->expr != NULL_TREE) | |
241 return vn->expr; | |
242 | |
243 /* Otherwise use the defining statement to build the expression. */ | |
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum); | |
245 | |
246 /* If the value number is a default-definition or a PHI result | |
247 use it directly. */ | |
248 if (gimple_nop_p (def_stmt) | |
249 || gimple_code (def_stmt) == GIMPLE_PHI) | |
250 return vn->valnum; | |
251 | |
252 if (!is_gimple_assign (def_stmt)) | |
253 return vn->valnum; | |
254 | |
255 /* FIXME tuples. This is incomplete and likely will miss some | |
256 simplifications. */ | |
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt))) | |
258 { | |
259 case tcc_reference: | |
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR | |
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR | |
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR) | |
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME) | |
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt), | |
265 gimple_expr_type (def_stmt), | |
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0)); | |
267 break; | |
268 | |
269 case tcc_unary: | |
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt), | |
271 gimple_expr_type (def_stmt), | |
272 gimple_assign_rhs1 (def_stmt)); | |
273 break; | |
274 | |
275 case tcc_binary: | |
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt), | |
277 gimple_expr_type (def_stmt), | |
278 gimple_assign_rhs1 (def_stmt), | |
279 gimple_assign_rhs2 (def_stmt)); | |
280 break; | |
281 | |
282 default:; | |
283 } | |
284 if (expr == NULL_TREE) | |
285 return vn->valnum; | |
286 | |
287 /* Cache the expression. */ | |
288 vn->expr = expr; | |
289 | |
290 return expr; | |
291 } | |
292 | |
293 | |
294 /* Free a phi operation structure VP. */ | |
295 | |
296 static void | |
297 free_phi (void *vp) | |
298 { | |
299 vn_phi_t phi = (vn_phi_t) vp; | |
300 VEC_free (tree, heap, phi->phiargs); | |
301 } | |
302 | |
303 /* Free a reference operation structure VP. */ | |
304 | |
305 static void | |
306 free_reference (void *vp) | |
307 { | |
308 vn_reference_t vr = (vn_reference_t) vp; | |
309 VEC_free (vn_reference_op_s, heap, vr->operands); | |
310 } | |
311 | |
312 /* Hash table equality function for vn_constant_t. */ | |
313 | |
314 static int | |
315 vn_constant_eq (const void *p1, const void *p2) | |
316 { | |
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1; | |
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2; | |
319 | |
320 if (vc1->hashcode != vc2->hashcode) | |
321 return false; | |
322 | |
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant); | |
324 } | |
325 | |
326 /* Hash table hash function for vn_constant_t. */ | |
327 | |
328 static hashval_t | |
329 vn_constant_hash (const void *p1) | |
330 { | |
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1; | |
332 return vc1->hashcode; | |
333 } | |
334 | |
335 /* Lookup a value id for CONSTANT and return it. If it does not | |
336 exist returns 0. */ | |
337 | |
338 unsigned int | |
339 get_constant_value_id (tree constant) | |
340 { | |
341 void **slot; | |
342 struct vn_constant_s vc; | |
343 | |
344 vc.hashcode = vn_hash_constant_with_type (constant); | |
345 vc.constant = constant; | |
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc, | |
347 vc.hashcode, NO_INSERT); | |
348 if (slot) | |
349 return ((vn_constant_t)*slot)->value_id; | |
350 return 0; | |
351 } | |
352 | |
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a | |
354 new one and return it. If it does exist, return it. */ | |
355 | |
356 unsigned int | |
357 get_or_alloc_constant_value_id (tree constant) | |
358 { | |
359 void **slot; | |
360 vn_constant_t vc = XNEW (struct vn_constant_s); | |
361 | |
362 vc->hashcode = vn_hash_constant_with_type (constant); | |
363 vc->constant = constant; | |
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc, | |
365 vc->hashcode, INSERT); | |
366 if (*slot) | |
367 { | |
368 free (vc); | |
369 return ((vn_constant_t)*slot)->value_id; | |
370 } | |
371 vc->value_id = get_next_value_id (); | |
372 *slot = vc; | |
373 bitmap_set_bit (constant_value_ids, vc->value_id); | |
374 return vc->value_id; | |
375 } | |
376 | |
377 /* Return true if V is a value id for a constant. */ | |
378 | |
379 bool | |
380 value_id_constant_p (unsigned int v) | |
381 { | |
382 return bitmap_bit_p (constant_value_ids, v); | |
383 } | |
384 | |
385 /* Compare two reference operands P1 and P2 for equality. Return true if | |
386 they are equal, and false otherwise. */ | |
387 | |
388 static int | |
389 vn_reference_op_eq (const void *p1, const void *p2) | |
390 { | |
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1; | |
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2; | |
393 | |
394 return vro1->opcode == vro2->opcode | |
395 && types_compatible_p (vro1->type, vro2->type) | |
396 && expressions_equal_p (vro1->op0, vro2->op0) | |
397 && expressions_equal_p (vro1->op1, vro2->op1) | |
398 && expressions_equal_p (vro1->op2, vro2->op2); | |
399 } | |
400 | |
401 /* Compute the hash for a reference operand VRO1. */ | |
402 | |
403 static hashval_t | |
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1) | |
405 { | |
406 hashval_t result = 0; | |
407 if (vro1->op0) | |
408 result += iterative_hash_expr (vro1->op0, vro1->opcode); | |
409 if (vro1->op1) | |
410 result += iterative_hash_expr (vro1->op1, vro1->opcode); | |
411 if (vro1->op2) | |
412 result += iterative_hash_expr (vro1->op2, vro1->opcode); | |
413 return result; | |
414 } | |
415 | |
416 /* Return the hashcode for a given reference operation P1. */ | |
417 | |
418 static hashval_t | |
419 vn_reference_hash (const void *p1) | |
420 { | |
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1; | |
422 return vr1->hashcode; | |
423 } | |
424 | |
425 /* Compute a hash for the reference operation VR1 and return it. */ | |
426 | |
427 hashval_t | |
428 vn_reference_compute_hash (const vn_reference_t vr1) | |
429 { | |
430 hashval_t result = 0; | |
431 tree v; | |
432 int i; | |
433 vn_reference_op_t vro; | |
434 | |
435 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++) | |
436 result += iterative_hash_expr (v, 0); | |
437 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++) | |
438 result += vn_reference_op_compute_hash (vro); | |
439 | |
440 return result; | |
441 } | |
442 | |
443 /* Return true if reference operations P1 and P2 are equivalent. This | |
444 means they have the same set of operands and vuses. */ | |
445 | |
446 int | |
447 vn_reference_eq (const void *p1, const void *p2) | |
448 { | |
449 tree v; | |
450 int i; | |
451 vn_reference_op_t vro; | |
452 | |
453 const_vn_reference_t const vr1 = (const_vn_reference_t) p1; | |
454 const_vn_reference_t const vr2 = (const_vn_reference_t) p2; | |
455 if (vr1->hashcode != vr2->hashcode) | |
456 return false; | |
457 | |
458 if (vr1->vuses == vr2->vuses | |
459 && vr1->operands == vr2->operands) | |
460 return true; | |
461 | |
462 /* Impossible for them to be equivalent if they have different | |
463 number of vuses. */ | |
464 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses)) | |
465 return false; | |
466 | |
467 /* We require that address operands be canonicalized in a way that | |
468 two memory references will have the same operands if they are | |
469 equivalent. */ | |
470 if (VEC_length (vn_reference_op_s, vr1->operands) | |
471 != VEC_length (vn_reference_op_s, vr2->operands)) | |
472 return false; | |
473 | |
474 /* The memory state is more often different than the address of the | |
475 store/load, so check it first. */ | |
476 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++) | |
477 { | |
478 if (VEC_index (tree, vr2->vuses, i) != v) | |
479 return false; | |
480 } | |
481 | |
482 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++) | |
483 { | |
484 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i), | |
485 vro)) | |
486 return false; | |
487 } | |
488 return true; | |
489 } | |
490 | |
491 /* Place the vuses from STMT into *result. */ | |
492 | |
493 static inline void | |
494 vuses_to_vec (gimple stmt, VEC (tree, gc) **result) | |
495 { | |
496 ssa_op_iter iter; | |
497 tree vuse; | |
498 | |
499 if (!stmt) | |
500 return; | |
501 | |
502 VEC_reserve_exact (tree, gc, *result, | |
503 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES)); | |
504 | |
505 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES) | |
506 VEC_quick_push (tree, *result, vuse); | |
507 } | |
508 | |
509 | |
510 /* Copy the VUSE names in STMT into a vector, and return | |
511 the vector. */ | |
512 | |
513 static VEC (tree, gc) * | |
514 copy_vuses_from_stmt (gimple stmt) | |
515 { | |
516 VEC (tree, gc) *vuses = NULL; | |
517 | |
518 vuses_to_vec (stmt, &vuses); | |
519 | |
520 return vuses; | |
521 } | |
522 | |
523 /* Place the vdefs from STMT into *result. */ | |
524 | |
525 static inline void | |
526 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result) | |
527 { | |
528 ssa_op_iter iter; | |
529 tree vdef; | |
530 | |
531 if (!stmt) | |
532 return; | |
533 | |
534 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS)); | |
535 | |
536 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS) | |
537 VEC_quick_push (tree, *result, vdef); | |
538 } | |
539 | |
540 /* Copy the names of vdef results in STMT into a vector, and return | |
541 the vector. */ | |
542 | |
543 static VEC (tree, gc) * | |
544 copy_vdefs_from_stmt (gimple stmt) | |
545 { | |
546 VEC (tree, gc) *vdefs = NULL; | |
547 | |
548 vdefs_to_vec (stmt, &vdefs); | |
549 | |
550 return vdefs; | |
551 } | |
552 | |
553 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */ | |
554 static VEC (tree, gc) *shared_lookup_vops; | |
555 | |
556 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS. | |
557 This function will overwrite the current SHARED_LOOKUP_VOPS | |
558 variable. */ | |
559 | |
560 VEC (tree, gc) * | |
561 shared_vuses_from_stmt (gimple stmt) | |
562 { | |
563 VEC_truncate (tree, shared_lookup_vops, 0); | |
564 vuses_to_vec (stmt, &shared_lookup_vops); | |
565 | |
566 return shared_lookup_vops; | |
567 } | |
568 | |
569 /* Copy the operations present in load/store REF into RESULT, a vector of | |
570 vn_reference_op_s's. */ | |
571 | |
572 void | |
573 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) | |
574 { | |
575 if (TREE_CODE (ref) == TARGET_MEM_REF) | |
576 { | |
577 vn_reference_op_s temp; | |
578 | |
579 memset (&temp, 0, sizeof (temp)); | |
580 /* We do not care for spurious type qualifications. */ | |
581 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref)); | |
582 temp.opcode = TREE_CODE (ref); | |
583 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref); | |
584 temp.op1 = TMR_INDEX (ref); | |
585 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); | |
586 | |
587 memset (&temp, 0, sizeof (temp)); | |
588 temp.type = NULL_TREE; | |
589 temp.opcode = TREE_CODE (ref); | |
590 temp.op0 = TMR_STEP (ref); | |
591 temp.op1 = TMR_OFFSET (ref); | |
592 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); | |
593 return; | |
594 } | |
595 | |
596 /* For non-calls, store the information that makes up the address. */ | |
597 | |
598 while (ref) | |
599 { | |
600 vn_reference_op_s temp; | |
601 | |
602 memset (&temp, 0, sizeof (temp)); | |
603 /* We do not care for spurious type qualifications. */ | |
604 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref)); | |
605 temp.opcode = TREE_CODE (ref); | |
606 | |
607 switch (temp.opcode) | |
608 { | |
609 case ALIGN_INDIRECT_REF: | |
610 case INDIRECT_REF: | |
611 /* The only operand is the address, which gets its own | |
612 vn_reference_op_s structure. */ | |
613 break; | |
614 case MISALIGNED_INDIRECT_REF: | |
615 temp.op0 = TREE_OPERAND (ref, 1); | |
616 break; | |
617 case BIT_FIELD_REF: | |
618 /* Record bits and position. */ | |
619 temp.op0 = TREE_OPERAND (ref, 1); | |
620 temp.op1 = TREE_OPERAND (ref, 2); | |
621 break; | |
622 case COMPONENT_REF: | |
623 /* The field decl is enough to unambiguously specify the field, | |
624 a matching type is not necessary and a mismatching type | |
625 is always a spurious difference. */ | |
626 temp.type = NULL_TREE; | |
627 /* If this is a reference to a union member, record the union | |
628 member size as operand. Do so only if we are doing | |
629 expression insertion (during FRE), as PRE currently gets | |
630 confused with this. */ | |
631 if (may_insert | |
632 && TREE_OPERAND (ref, 2) == NULL_TREE | |
633 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE | |
634 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1))) | |
635 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1)))) | |
636 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1))); | |
637 else | |
638 { | |
639 /* Record field as operand. */ | |
640 temp.op0 = TREE_OPERAND (ref, 1); | |
641 temp.op1 = TREE_OPERAND (ref, 2); | |
642 } | |
643 break; | |
644 case ARRAY_RANGE_REF: | |
645 case ARRAY_REF: | |
646 /* Record index as operand. */ | |
647 temp.op0 = TREE_OPERAND (ref, 1); | |
648 temp.op1 = TREE_OPERAND (ref, 2); | |
649 temp.op2 = TREE_OPERAND (ref, 3); | |
650 break; | |
651 case STRING_CST: | |
652 case INTEGER_CST: | |
653 case COMPLEX_CST: | |
654 case VECTOR_CST: | |
655 case REAL_CST: | |
656 case CONSTRUCTOR: | |
657 case VAR_DECL: | |
658 case PARM_DECL: | |
659 case CONST_DECL: | |
660 case RESULT_DECL: | |
661 case SSA_NAME: | |
662 temp.op0 = ref; | |
663 break; | |
664 case ADDR_EXPR: | |
665 if (is_gimple_min_invariant (ref)) | |
666 { | |
667 temp.op0 = ref; | |
668 break; | |
669 } | |
670 /* Fallthrough. */ | |
671 /* These are only interesting for their operands, their | |
672 existence, and their type. They will never be the last | |
673 ref in the chain of references (IE they require an | |
674 operand), so we don't have to put anything | |
675 for op* as it will be handled by the iteration */ | |
676 case IMAGPART_EXPR: | |
677 case REALPART_EXPR: | |
678 case VIEW_CONVERT_EXPR: | |
679 break; | |
680 default: | |
681 gcc_unreachable (); | |
682 } | |
683 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); | |
684 | |
685 if (REFERENCE_CLASS_P (ref) | |
686 || (TREE_CODE (ref) == ADDR_EXPR | |
687 && !is_gimple_min_invariant (ref))) | |
688 ref = TREE_OPERAND (ref, 0); | |
689 else | |
690 ref = NULL_TREE; | |
691 } | |
692 } | |
693 | |
694 /* Re-create a reference tree from the reference ops OPS. | |
695 Returns NULL_TREE if the ops were not handled. | |
696 This routine needs to be kept in sync with copy_reference_ops_from_ref. */ | |
697 | |
698 static tree | |
699 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops) | |
700 { | |
701 vn_reference_op_t op; | |
702 unsigned i; | |
703 tree ref, *op0_p = &ref; | |
704 | |
705 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i) | |
706 { | |
707 switch (op->opcode) | |
708 { | |
709 case CALL_EXPR: | |
710 return NULL_TREE; | |
711 | |
712 case ALIGN_INDIRECT_REF: | |
713 case INDIRECT_REF: | |
714 *op0_p = build1 (op->opcode, op->type, NULL_TREE); | |
715 op0_p = &TREE_OPERAND (*op0_p, 0); | |
716 break; | |
717 | |
718 case MISALIGNED_INDIRECT_REF: | |
719 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type, | |
720 NULL_TREE, op->op0); | |
721 op0_p = &TREE_OPERAND (*op0_p, 0); | |
722 break; | |
723 | |
724 case BIT_FIELD_REF: | |
725 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE, | |
726 op->op0, op->op1); | |
727 op0_p = &TREE_OPERAND (*op0_p, 0); | |
728 break; | |
729 | |
730 case COMPONENT_REF: | |
731 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE, | |
732 op->op0, op->op1); | |
733 op0_p = &TREE_OPERAND (*op0_p, 0); | |
734 break; | |
735 | |
736 case ARRAY_RANGE_REF: | |
737 case ARRAY_REF: | |
738 *op0_p = build4 (op->opcode, op->type, NULL_TREE, | |
739 op->op0, op->op1, op->op2); | |
740 op0_p = &TREE_OPERAND (*op0_p, 0); | |
741 break; | |
742 | |
743 case STRING_CST: | |
744 case INTEGER_CST: | |
745 case COMPLEX_CST: | |
746 case VECTOR_CST: | |
747 case REAL_CST: | |
748 case CONSTRUCTOR: | |
749 case VAR_DECL: | |
750 case PARM_DECL: | |
751 case CONST_DECL: | |
752 case RESULT_DECL: | |
753 case SSA_NAME: | |
754 *op0_p = op->op0; | |
755 break; | |
756 | |
757 case ADDR_EXPR: | |
758 if (op->op0 != NULL_TREE) | |
759 { | |
760 gcc_assert (is_gimple_min_invariant (op->op0)); | |
761 *op0_p = op->op0; | |
762 break; | |
763 } | |
764 /* Fallthrough. */ | |
765 case IMAGPART_EXPR: | |
766 case REALPART_EXPR: | |
767 case VIEW_CONVERT_EXPR: | |
768 *op0_p = build1 (op->opcode, op->type, NULL_TREE); | |
769 op0_p = &TREE_OPERAND (*op0_p, 0); | |
770 break; | |
771 | |
772 default: | |
773 return NULL_TREE; | |
774 } | |
775 } | |
776 | |
777 return ref; | |
778 } | |
779 | |
780 /* Copy the operations present in load/store/call REF into RESULT, a vector of | |
781 vn_reference_op_s's. */ | |
782 | |
783 void | |
784 copy_reference_ops_from_call (gimple call, | |
785 VEC(vn_reference_op_s, heap) **result) | |
786 { | |
787 vn_reference_op_s temp; | |
788 unsigned i; | |
789 | |
790 /* Copy the type, opcode, function being called and static chain. */ | |
791 memset (&temp, 0, sizeof (temp)); | |
792 temp.type = gimple_call_return_type (call); | |
793 temp.opcode = CALL_EXPR; | |
794 temp.op0 = gimple_call_fn (call); | |
795 temp.op1 = gimple_call_chain (call); | |
796 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); | |
797 | |
798 /* Copy the call arguments. As they can be references as well, | |
799 just chain them together. */ | |
800 for (i = 0; i < gimple_call_num_args (call); ++i) | |
801 { | |
802 tree callarg = gimple_call_arg (call, i); | |
803 copy_reference_ops_from_ref (callarg, result); | |
804 } | |
805 } | |
806 | |
807 /* Create a vector of vn_reference_op_s structures from REF, a | |
808 REFERENCE_CLASS_P tree. The vector is not shared. */ | |
809 | |
810 static VEC(vn_reference_op_s, heap) * | |
811 create_reference_ops_from_ref (tree ref) | |
812 { | |
813 VEC (vn_reference_op_s, heap) *result = NULL; | |
814 | |
815 copy_reference_ops_from_ref (ref, &result); | |
816 return result; | |
817 } | |
818 | |
819 /* Create a vector of vn_reference_op_s structures from CALL, a | |
820 call statement. The vector is not shared. */ | |
821 | |
822 static VEC(vn_reference_op_s, heap) * | |
823 create_reference_ops_from_call (gimple call) | |
824 { | |
825 VEC (vn_reference_op_s, heap) *result = NULL; | |
826 | |
827 copy_reference_ops_from_call (call, &result); | |
828 return result; | |
829 } | |
830 | |
831 static VEC(vn_reference_op_s, heap) *shared_lookup_references; | |
832 | |
833 /* Create a vector of vn_reference_op_s structures from REF, a | |
834 REFERENCE_CLASS_P tree. The vector is shared among all callers of | |
835 this function. */ | |
836 | |
837 static VEC(vn_reference_op_s, heap) * | |
838 shared_reference_ops_from_ref (tree ref) | |
839 { | |
840 if (!ref) | |
841 return NULL; | |
842 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); | |
843 copy_reference_ops_from_ref (ref, &shared_lookup_references); | |
844 return shared_lookup_references; | |
845 } | |
846 | |
847 /* Create a vector of vn_reference_op_s structures from CALL, a | |
848 call statement. The vector is shared among all callers of | |
849 this function. */ | |
850 | |
851 static VEC(vn_reference_op_s, heap) * | |
852 shared_reference_ops_from_call (gimple call) | |
853 { | |
854 if (!call) | |
855 return NULL; | |
856 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); | |
857 copy_reference_ops_from_call (call, &shared_lookup_references); | |
858 return shared_lookup_references; | |
859 } | |
860 | |
861 | |
862 /* Transform any SSA_NAME's in a vector of vn_reference_op_s | |
863 structures into their value numbers. This is done in-place, and | |
864 the vector passed in is returned. */ | |
865 | |
866 static VEC (vn_reference_op_s, heap) * | |
867 valueize_refs (VEC (vn_reference_op_s, heap) *orig) | |
868 { | |
869 vn_reference_op_t vro; | |
870 int i; | |
871 | |
872 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++) | |
873 { | |
874 if (vro->opcode == SSA_NAME | |
875 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)) | |
876 { | |
877 vro->op0 = SSA_VAL (vro->op0); | |
878 /* If it transforms from an SSA_NAME to a constant, update | |
879 the opcode. */ | |
880 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME) | |
881 vro->opcode = TREE_CODE (vro->op0); | |
882 } | |
883 /* TODO: Do we want to valueize op2 and op1 of | |
884 ARRAY_REF/COMPONENT_REF for Ada */ | |
885 | |
886 } | |
887 | |
888 return orig; | |
889 } | |
890 | |
891 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into | |
892 their value numbers. This is done in-place, and the vector passed | |
893 in is returned. */ | |
894 | |
895 static VEC (tree, gc) * | |
896 valueize_vuses (VEC (tree, gc) *orig) | |
897 { | |
898 bool made_replacement = false; | |
899 tree vuse; | |
900 int i; | |
901 | |
902 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++) | |
903 { | |
904 if (vuse != SSA_VAL (vuse)) | |
905 { | |
906 made_replacement = true; | |
907 VEC_replace (tree, orig, i, SSA_VAL (vuse)); | |
908 } | |
909 } | |
910 | |
911 if (made_replacement && VEC_length (tree, orig) > 1) | |
912 sort_vuses (orig); | |
913 | |
914 return orig; | |
915 } | |
916 | |
917 /* Return the single reference statement defining all virtual uses | |
918 in VUSES or NULL_TREE, if there are multiple defining statements. | |
919 Take into account only definitions that alias REF if following | |
920 back-edges. */ | |
921 | |
922 static gimple | |
923 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses) | |
924 { | |
925 gimple def_stmt; | |
926 tree vuse; | |
927 unsigned int i; | |
928 | |
929 gcc_assert (VEC_length (tree, vuses) >= 1); | |
930 | |
931 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0)); | |
932 if (gimple_code (def_stmt) == GIMPLE_PHI) | |
933 { | |
934 /* We can only handle lookups over PHI nodes for a single | |
935 virtual operand. */ | |
936 if (VEC_length (tree, vuses) == 1) | |
937 { | |
938 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt); | |
939 goto cont; | |
940 } | |
941 else | |
942 return NULL; | |
943 } | |
944 | |
945 /* Verify each VUSE reaches the same defining stmt. */ | |
946 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i) | |
947 { | |
948 gimple tmp = SSA_NAME_DEF_STMT (vuse); | |
949 if (tmp != def_stmt) | |
950 return NULL; | |
951 } | |
952 | |
953 /* Now see if the definition aliases ref, and loop until it does. */ | |
954 cont: | |
955 while (def_stmt | |
956 && is_gimple_assign (def_stmt) | |
957 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt))) | |
958 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt); | |
959 | |
960 return def_stmt; | |
961 } | |
962 | |
963 /* Lookup a SCCVN reference operation VR in the current hash table. | |
964 Returns the resulting value number if it exists in the hash table, | |
965 NULL_TREE otherwise. VNRESULT will be filled in with the actual | |
966 vn_reference_t stored in the hashtable if something is found. */ | |
967 | |
968 static tree | |
969 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) | |
970 { | |
971 void **slot; | |
972 hashval_t hash; | |
973 | |
974 hash = vr->hashcode; | |
975 slot = htab_find_slot_with_hash (current_info->references, vr, | |
976 hash, NO_INSERT); | |
977 if (!slot && current_info == optimistic_info) | |
978 slot = htab_find_slot_with_hash (valid_info->references, vr, | |
979 hash, NO_INSERT); | |
980 if (slot) | |
981 { | |
982 if (vnresult) | |
983 *vnresult = (vn_reference_t)*slot; | |
984 return ((vn_reference_t)*slot)->result; | |
985 } | |
986 | |
987 return NULL_TREE; | |
988 } | |
989 | |
990 | |
991 /* Lookup a reference operation by it's parts, in the current hash table. | |
992 Returns the resulting value number if it exists in the hash table, | |
993 NULL_TREE otherwise. VNRESULT will be filled in with the actual | |
994 vn_reference_t stored in the hashtable if something is found. */ | |
995 | |
996 tree | |
997 vn_reference_lookup_pieces (VEC (tree, gc) *vuses, | |
998 VEC (vn_reference_op_s, heap) *operands, | |
999 vn_reference_t *vnresult, bool maywalk) | |
1000 { | |
1001 struct vn_reference_s vr1; | |
1002 tree result; | |
1003 if (vnresult) | |
1004 *vnresult = NULL; | |
1005 | |
1006 vr1.vuses = valueize_vuses (vuses); | |
1007 vr1.operands = valueize_refs (operands); | |
1008 vr1.hashcode = vn_reference_compute_hash (&vr1); | |
1009 result = vn_reference_lookup_1 (&vr1, vnresult); | |
1010 | |
1011 /* If there is a single defining statement for all virtual uses, we can | |
1012 use that, following virtual use-def chains. */ | |
1013 if (!result | |
1014 && maywalk | |
1015 && vr1.vuses | |
1016 && VEC_length (tree, vr1.vuses) >= 1) | |
1017 { | |
1018 tree ref = get_ref_from_reference_ops (operands); | |
1019 gimple def_stmt; | |
1020 if (ref | |
1021 && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses)) | |
1022 && is_gimple_assign (def_stmt)) | |
1023 { | |
1024 /* We are now at an aliasing definition for the vuses we want to | |
1025 look up. Re-do the lookup with the vdefs for this stmt. */ | |
1026 vdefs_to_vec (def_stmt, &vuses); | |
1027 vr1.vuses = valueize_vuses (vuses); | |
1028 vr1.hashcode = vn_reference_compute_hash (&vr1); | |
1029 result = vn_reference_lookup_1 (&vr1, vnresult); | |
1030 } | |
1031 } | |
1032 | |
1033 return result; | |
1034 } | |
1035 | |
1036 /* Lookup OP in the current hash table, and return the resulting value | |
1037 number if it exists in the hash table. Return NULL_TREE if it does | |
1038 not exist in the hash table or if the result field of the structure | |
1039 was NULL.. VNRESULT will be filled in with the vn_reference_t | |
1040 stored in the hashtable if one exists. */ | |
1041 | |
1042 tree | |
1043 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk, | |
1044 vn_reference_t *vnresult) | |
1045 { | |
1046 struct vn_reference_s vr1; | |
1047 tree result; | |
1048 gimple def_stmt; | |
1049 if (vnresult) | |
1050 *vnresult = NULL; | |
1051 | |
1052 vr1.vuses = valueize_vuses (vuses); | |
1053 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op)); | |
1054 vr1.hashcode = vn_reference_compute_hash (&vr1); | |
1055 result = vn_reference_lookup_1 (&vr1, vnresult); | |
1056 | |
1057 /* If there is a single defining statement for all virtual uses, we can | |
1058 use that, following virtual use-def chains. */ | |
1059 if (!result | |
1060 && maywalk | |
1061 && vr1.vuses | |
1062 && VEC_length (tree, vr1.vuses) >= 1 | |
1063 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses)) | |
1064 && is_gimple_assign (def_stmt)) | |
1065 { | |
1066 /* We are now at an aliasing definition for the vuses we want to | |
1067 look up. Re-do the lookup with the vdefs for this stmt. */ | |
1068 vdefs_to_vec (def_stmt, &vuses); | |
1069 vr1.vuses = valueize_vuses (vuses); | |
1070 vr1.hashcode = vn_reference_compute_hash (&vr1); | |
1071 result = vn_reference_lookup_1 (&vr1, vnresult); | |
1072 } | |
1073 | |
1074 return result; | |
1075 } | |
1076 | |
1077 | |
1078 /* Insert OP into the current hash table with a value number of | |
1079 RESULT, and return the resulting reference structure we created. */ | |
1080 | |
1081 vn_reference_t | |
1082 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses) | |
1083 { | |
1084 void **slot; | |
1085 vn_reference_t vr1; | |
1086 | |
1087 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); | |
1088 if (TREE_CODE (result) == SSA_NAME) | |
1089 vr1->value_id = VN_INFO (result)->value_id; | |
1090 else | |
1091 vr1->value_id = get_or_alloc_constant_value_id (result); | |
1092 vr1->vuses = valueize_vuses (vuses); | |
1093 vr1->operands = valueize_refs (create_reference_ops_from_ref (op)); | |
1094 vr1->hashcode = vn_reference_compute_hash (vr1); | |
1095 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result; | |
1096 | |
1097 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode, | |
1098 INSERT); | |
1099 | |
1100 /* Because we lookup stores using vuses, and value number failures | |
1101 using the vdefs (see visit_reference_op_store for how and why), | |
1102 it's possible that on failure we may try to insert an already | |
1103 inserted store. This is not wrong, there is no ssa name for a | |
1104 store that we could use as a differentiator anyway. Thus, unlike | |
1105 the other lookup functions, you cannot gcc_assert (!*slot) | |
1106 here. */ | |
1107 | |
1108 /* But free the old slot in case of a collision. */ | |
1109 if (*slot) | |
1110 free_reference (*slot); | |
1111 | |
1112 *slot = vr1; | |
1113 return vr1; | |
1114 } | |
1115 | |
1116 /* Insert a reference by it's pieces into the current hash table with | |
1117 a value number of RESULT. Return the resulting reference | |
1118 structure we created. */ | |
1119 | |
1120 vn_reference_t | |
1121 vn_reference_insert_pieces (VEC (tree, gc) *vuses, | |
1122 VEC (vn_reference_op_s, heap) *operands, | |
1123 tree result, unsigned int value_id) | |
1124 | |
1125 { | |
1126 void **slot; | |
1127 vn_reference_t vr1; | |
1128 | |
1129 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); | |
1130 vr1->value_id = value_id; | |
1131 vr1->vuses = valueize_vuses (vuses); | |
1132 vr1->operands = valueize_refs (operands); | |
1133 vr1->hashcode = vn_reference_compute_hash (vr1); | |
1134 if (result && TREE_CODE (result) == SSA_NAME) | |
1135 result = SSA_VAL (result); | |
1136 vr1->result = result; | |
1137 | |
1138 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode, | |
1139 INSERT); | |
1140 | |
1141 /* At this point we should have all the things inserted that we have | |
1142 seen before, and we should never try inserting something that | |
1143 already exists. */ | |
1144 gcc_assert (!*slot); | |
1145 if (*slot) | |
1146 free_reference (*slot); | |
1147 | |
1148 *slot = vr1; | |
1149 return vr1; | |
1150 } | |
1151 | |
1152 /* Compute and return the hash value for nary operation VBO1. */ | |
1153 | |
1154 inline hashval_t | |
1155 vn_nary_op_compute_hash (const vn_nary_op_t vno1) | |
1156 { | |
1157 hashval_t hash = 0; | |
1158 unsigned i; | |
1159 | |
1160 for (i = 0; i < vno1->length; ++i) | |
1161 if (TREE_CODE (vno1->op[i]) == SSA_NAME) | |
1162 vno1->op[i] = SSA_VAL (vno1->op[i]); | |
1163 | |
1164 if (vno1->length == 2 | |
1165 && commutative_tree_code (vno1->opcode) | |
1166 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false)) | |
1167 { | |
1168 tree temp = vno1->op[0]; | |
1169 vno1->op[0] = vno1->op[1]; | |
1170 vno1->op[1] = temp; | |
1171 } | |
1172 | |
1173 for (i = 0; i < vno1->length; ++i) | |
1174 hash += iterative_hash_expr (vno1->op[i], vno1->opcode); | |
1175 | |
1176 return hash; | |
1177 } | |
1178 | |
1179 /* Return the computed hashcode for nary operation P1. */ | |
1180 | |
1181 static hashval_t | |
1182 vn_nary_op_hash (const void *p1) | |
1183 { | |
1184 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1; | |
1185 return vno1->hashcode; | |
1186 } | |
1187 | |
1188 /* Compare nary operations P1 and P2 and return true if they are | |
1189 equivalent. */ | |
1190 | |
1191 int | |
1192 vn_nary_op_eq (const void *p1, const void *p2) | |
1193 { | |
1194 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1; | |
1195 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2; | |
1196 unsigned i; | |
1197 | |
1198 if (vno1->hashcode != vno2->hashcode) | |
1199 return false; | |
1200 | |
1201 if (vno1->opcode != vno2->opcode | |
1202 || !types_compatible_p (vno1->type, vno2->type)) | |
1203 return false; | |
1204 | |
1205 for (i = 0; i < vno1->length; ++i) | |
1206 if (!expressions_equal_p (vno1->op[i], vno2->op[i])) | |
1207 return false; | |
1208 | |
1209 return true; | |
1210 } | |
1211 | |
1212 /* Lookup a n-ary operation by its pieces and return the resulting value | |
1213 number if it exists in the hash table. Return NULL_TREE if it does | |
1214 not exist in the hash table or if the result field of the operation | |
1215 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable | |
1216 if it exists. */ | |
1217 | |
1218 tree | |
1219 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, | |
1220 tree type, tree op0, tree op1, tree op2, | |
1221 tree op3, vn_nary_op_t *vnresult) | |
1222 { | |
1223 void **slot; | |
1224 struct vn_nary_op_s vno1; | |
1225 if (vnresult) | |
1226 *vnresult = NULL; | |
1227 vno1.opcode = code; | |
1228 vno1.length = length; | |
1229 vno1.type = type; | |
1230 vno1.op[0] = op0; | |
1231 vno1.op[1] = op1; | |
1232 vno1.op[2] = op2; | |
1233 vno1.op[3] = op3; | |
1234 vno1.hashcode = vn_nary_op_compute_hash (&vno1); | |
1235 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode, | |
1236 NO_INSERT); | |
1237 if (!slot && current_info == optimistic_info) | |
1238 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode, | |
1239 NO_INSERT); | |
1240 if (!slot) | |
1241 return NULL_TREE; | |
1242 if (vnresult) | |
1243 *vnresult = (vn_nary_op_t)*slot; | |
1244 return ((vn_nary_op_t)*slot)->result; | |
1245 } | |
1246 | |
1247 /* Lookup OP in the current hash table, and return the resulting value | |
1248 number if it exists in the hash table. Return NULL_TREE if it does | |
1249 not exist in the hash table or if the result field of the operation | |
1250 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable | |
1251 if it exists. */ | |
1252 | |
1253 tree | |
1254 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult) | |
1255 { | |
1256 void **slot; | |
1257 struct vn_nary_op_s vno1; | |
1258 unsigned i; | |
1259 | |
1260 if (vnresult) | |
1261 *vnresult = NULL; | |
1262 vno1.opcode = TREE_CODE (op); | |
1263 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op)); | |
1264 vno1.type = TREE_TYPE (op); | |
1265 for (i = 0; i < vno1.length; ++i) | |
1266 vno1.op[i] = TREE_OPERAND (op, i); | |
1267 vno1.hashcode = vn_nary_op_compute_hash (&vno1); | |
1268 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode, | |
1269 NO_INSERT); | |
1270 if (!slot && current_info == optimistic_info) | |
1271 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode, | |
1272 NO_INSERT); | |
1273 if (!slot) | |
1274 return NULL_TREE; | |
1275 if (vnresult) | |
1276 *vnresult = (vn_nary_op_t)*slot; | |
1277 return ((vn_nary_op_t)*slot)->result; | |
1278 } | |
1279 | |
1280 /* Lookup the rhs of STMT in the current hash table, and return the resulting | |
1281 value number if it exists in the hash table. Return NULL_TREE if | |
1282 it does not exist in the hash table. VNRESULT will contain the | |
1283 vn_nary_op_t from the hashtable if it exists. */ | |
1284 | |
1285 tree | |
1286 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult) | |
1287 { | |
1288 void **slot; | |
1289 struct vn_nary_op_s vno1; | |
1290 unsigned i; | |
1291 | |
1292 if (vnresult) | |
1293 *vnresult = NULL; | |
1294 vno1.opcode = gimple_assign_rhs_code (stmt); | |
1295 vno1.length = gimple_num_ops (stmt) - 1; | |
1296 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt)); | |
1297 for (i = 0; i < vno1.length; ++i) | |
1298 vno1.op[i] = gimple_op (stmt, i + 1); | |
1299 if (vno1.opcode == REALPART_EXPR | |
1300 || vno1.opcode == IMAGPART_EXPR | |
1301 || vno1.opcode == VIEW_CONVERT_EXPR) | |
1302 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0); | |
1303 vno1.hashcode = vn_nary_op_compute_hash (&vno1); | |
1304 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode, | |
1305 NO_INSERT); | |
1306 if (!slot && current_info == optimistic_info) | |
1307 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode, | |
1308 NO_INSERT); | |
1309 if (!slot) | |
1310 return NULL_TREE; | |
1311 if (vnresult) | |
1312 *vnresult = (vn_nary_op_t)*slot; | |
1313 return ((vn_nary_op_t)*slot)->result; | |
1314 } | |
1315 | |
1316 /* Insert a n-ary operation into the current hash table using it's | |
1317 pieces. Return the vn_nary_op_t structure we created and put in | |
1318 the hashtable. */ | |
1319 | |
1320 vn_nary_op_t | |
1321 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, | |
1322 tree type, tree op0, | |
1323 tree op1, tree op2, tree op3, | |
1324 tree result, | |
1325 unsigned int value_id) | |
1326 { | |
1327 void **slot; | |
1328 vn_nary_op_t vno1; | |
1329 | |
1330 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack, | |
1331 (sizeof (struct vn_nary_op_s) | |
1332 - sizeof (tree) * (4 - length))); | |
1333 vno1->value_id = value_id; | |
1334 vno1->opcode = code; | |
1335 vno1->length = length; | |
1336 vno1->type = type; | |
1337 if (length >= 1) | |
1338 vno1->op[0] = op0; | |
1339 if (length >= 2) | |
1340 vno1->op[1] = op1; | |
1341 if (length >= 3) | |
1342 vno1->op[2] = op2; | |
1343 if (length >= 4) | |
1344 vno1->op[3] = op3; | |
1345 vno1->result = result; | |
1346 vno1->hashcode = vn_nary_op_compute_hash (vno1); | |
1347 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode, | |
1348 INSERT); | |
1349 gcc_assert (!*slot); | |
1350 | |
1351 *slot = vno1; | |
1352 return vno1; | |
1353 | |
1354 } | |
1355 | |
1356 /* Insert OP into the current hash table with a value number of | |
1357 RESULT. Return the vn_nary_op_t structure we created and put in | |
1358 the hashtable. */ | |
1359 | |
1360 vn_nary_op_t | |
1361 vn_nary_op_insert (tree op, tree result) | |
1362 { | |
1363 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op)); | |
1364 void **slot; | |
1365 vn_nary_op_t vno1; | |
1366 unsigned i; | |
1367 | |
1368 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack, | |
1369 (sizeof (struct vn_nary_op_s) | |
1370 - sizeof (tree) * (4 - length))); | |
1371 vno1->value_id = VN_INFO (result)->value_id; | |
1372 vno1->opcode = TREE_CODE (op); | |
1373 vno1->length = length; | |
1374 vno1->type = TREE_TYPE (op); | |
1375 for (i = 0; i < vno1->length; ++i) | |
1376 vno1->op[i] = TREE_OPERAND (op, i); | |
1377 vno1->result = result; | |
1378 vno1->hashcode = vn_nary_op_compute_hash (vno1); | |
1379 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode, | |
1380 INSERT); | |
1381 gcc_assert (!*slot); | |
1382 | |
1383 *slot = vno1; | |
1384 return vno1; | |
1385 } | |
1386 | |
1387 /* Insert the rhs of STMT into the current hash table with a value number of | |
1388 RESULT. */ | |
1389 | |
1390 vn_nary_op_t | |
1391 vn_nary_op_insert_stmt (gimple stmt, tree result) | |
1392 { | |
1393 unsigned length = gimple_num_ops (stmt) - 1; | |
1394 void **slot; | |
1395 vn_nary_op_t vno1; | |
1396 unsigned i; | |
1397 | |
1398 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack, | |
1399 (sizeof (struct vn_nary_op_s) | |
1400 - sizeof (tree) * (4 - length))); | |
1401 vno1->value_id = VN_INFO (result)->value_id; | |
1402 vno1->opcode = gimple_assign_rhs_code (stmt); | |
1403 vno1->length = length; | |
1404 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt)); | |
1405 for (i = 0; i < vno1->length; ++i) | |
1406 vno1->op[i] = gimple_op (stmt, i + 1); | |
1407 if (vno1->opcode == REALPART_EXPR | |
1408 || vno1->opcode == IMAGPART_EXPR | |
1409 || vno1->opcode == VIEW_CONVERT_EXPR) | |
1410 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0); | |
1411 vno1->result = result; | |
1412 vno1->hashcode = vn_nary_op_compute_hash (vno1); | |
1413 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode, | |
1414 INSERT); | |
1415 gcc_assert (!*slot); | |
1416 | |
1417 *slot = vno1; | |
1418 return vno1; | |
1419 } | |
1420 | |
1421 /* Compute a hashcode for PHI operation VP1 and return it. */ | |
1422 | |
1423 static inline hashval_t | |
1424 vn_phi_compute_hash (vn_phi_t vp1) | |
1425 { | |
1426 hashval_t result = 0; | |
1427 int i; | |
1428 tree phi1op; | |
1429 tree type; | |
1430 | |
1431 result = vp1->block->index; | |
1432 | |
1433 /* If all PHI arguments are constants we need to distinguish | |
1434 the PHI node via its type. */ | |
1435 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)); | |
1436 result += (INTEGRAL_TYPE_P (type) | |
1437 + (INTEGRAL_TYPE_P (type) | |
1438 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0)); | |
1439 | |
1440 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++) | |
1441 { | |
1442 if (phi1op == VN_TOP) | |
1443 continue; | |
1444 result += iterative_hash_expr (phi1op, result); | |
1445 } | |
1446 | |
1447 return result; | |
1448 } | |
1449 | |
1450 /* Return the computed hashcode for phi operation P1. */ | |
1451 | |
1452 static hashval_t | |
1453 vn_phi_hash (const void *p1) | |
1454 { | |
1455 const_vn_phi_t const vp1 = (const_vn_phi_t) p1; | |
1456 return vp1->hashcode; | |
1457 } | |
1458 | |
1459 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */ | |
1460 | |
1461 static int | |
1462 vn_phi_eq (const void *p1, const void *p2) | |
1463 { | |
1464 const_vn_phi_t const vp1 = (const_vn_phi_t) p1; | |
1465 const_vn_phi_t const vp2 = (const_vn_phi_t) p2; | |
1466 | |
1467 if (vp1->hashcode != vp2->hashcode) | |
1468 return false; | |
1469 | |
1470 if (vp1->block == vp2->block) | |
1471 { | |
1472 int i; | |
1473 tree phi1op; | |
1474 | |
1475 /* If the PHI nodes do not have compatible types | |
1476 they are not the same. */ | |
1477 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)), | |
1478 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0)))) | |
1479 return false; | |
1480 | |
1481 /* Any phi in the same block will have it's arguments in the | |
1482 same edge order, because of how we store phi nodes. */ | |
1483 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++) | |
1484 { | |
1485 tree phi2op = VEC_index (tree, vp2->phiargs, i); | |
1486 if (phi1op == VN_TOP || phi2op == VN_TOP) | |
1487 continue; | |
1488 if (!expressions_equal_p (phi1op, phi2op)) | |
1489 return false; | |
1490 } | |
1491 return true; | |
1492 } | |
1493 return false; | |
1494 } | |
1495 | |
1496 static VEC(tree, heap) *shared_lookup_phiargs; | |
1497 | |
1498 /* Lookup PHI in the current hash table, and return the resulting | |
1499 value number if it exists in the hash table. Return NULL_TREE if | |
1500 it does not exist in the hash table. */ | |
1501 | |
1502 static tree | |
1503 vn_phi_lookup (gimple phi) | |
1504 { | |
1505 void **slot; | |
1506 struct vn_phi_s vp1; | |
1507 unsigned i; | |
1508 | |
1509 VEC_truncate (tree, shared_lookup_phiargs, 0); | |
1510 | |
1511 /* Canonicalize the SSA_NAME's to their value number. */ | |
1512 for (i = 0; i < gimple_phi_num_args (phi); i++) | |
1513 { | |
1514 tree def = PHI_ARG_DEF (phi, i); | |
1515 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; | |
1516 VEC_safe_push (tree, heap, shared_lookup_phiargs, def); | |
1517 } | |
1518 vp1.phiargs = shared_lookup_phiargs; | |
1519 vp1.block = gimple_bb (phi); | |
1520 vp1.hashcode = vn_phi_compute_hash (&vp1); | |
1521 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode, | |
1522 NO_INSERT); | |
1523 if (!slot && current_info == optimistic_info) | |
1524 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode, | |
1525 NO_INSERT); | |
1526 if (!slot) | |
1527 return NULL_TREE; | |
1528 return ((vn_phi_t)*slot)->result; | |
1529 } | |
1530 | |
1531 /* Insert PHI into the current hash table with a value number of | |
1532 RESULT. */ | |
1533 | |
1534 static vn_phi_t | |
1535 vn_phi_insert (gimple phi, tree result) | |
1536 { | |
1537 void **slot; | |
1538 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool); | |
1539 unsigned i; | |
1540 VEC (tree, heap) *args = NULL; | |
1541 | |
1542 /* Canonicalize the SSA_NAME's to their value number. */ | |
1543 for (i = 0; i < gimple_phi_num_args (phi); i++) | |
1544 { | |
1545 tree def = PHI_ARG_DEF (phi, i); | |
1546 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; | |
1547 VEC_safe_push (tree, heap, args, def); | |
1548 } | |
1549 vp1->value_id = VN_INFO (result)->value_id; | |
1550 vp1->phiargs = args; | |
1551 vp1->block = gimple_bb (phi); | |
1552 vp1->result = result; | |
1553 vp1->hashcode = vn_phi_compute_hash (vp1); | |
1554 | |
1555 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode, | |
1556 INSERT); | |
1557 | |
1558 /* Because we iterate over phi operations more than once, it's | |
1559 possible the slot might already exist here, hence no assert.*/ | |
1560 *slot = vp1; | |
1561 return vp1; | |
1562 } | |
1563 | |
1564 | |
1565 /* Print set of components in strongly connected component SCC to OUT. */ | |
1566 | |
1567 static void | |
1568 print_scc (FILE *out, VEC (tree, heap) *scc) | |
1569 { | |
1570 tree var; | |
1571 unsigned int i; | |
1572 | |
1573 fprintf (out, "SCC consists of: "); | |
1574 for (i = 0; VEC_iterate (tree, scc, i, var); i++) | |
1575 { | |
1576 print_generic_expr (out, var, 0); | |
1577 fprintf (out, " "); | |
1578 } | |
1579 fprintf (out, "\n"); | |
1580 } | |
1581 | |
1582 /* Set the value number of FROM to TO, return true if it has changed | |
1583 as a result. */ | |
1584 | |
1585 static inline bool | |
1586 set_ssa_val_to (tree from, tree to) | |
1587 { | |
1588 tree currval; | |
1589 | |
1590 if (from != to | |
1591 && TREE_CODE (to) == SSA_NAME | |
1592 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to)) | |
1593 to = from; | |
1594 | |
1595 /* The only thing we allow as value numbers are VN_TOP, ssa_names | |
1596 and invariants. So assert that here. */ | |
1597 gcc_assert (to != NULL_TREE | |
1598 && (to == VN_TOP | |
1599 || TREE_CODE (to) == SSA_NAME | |
1600 || is_gimple_min_invariant (to))); | |
1601 | |
1602 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1603 { | |
1604 fprintf (dump_file, "Setting value number of "); | |
1605 print_generic_expr (dump_file, from, 0); | |
1606 fprintf (dump_file, " to "); | |
1607 print_generic_expr (dump_file, to, 0); | |
1608 } | |
1609 | |
1610 currval = SSA_VAL (from); | |
1611 | |
1612 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME)) | |
1613 { | |
1614 SSA_VAL (from) = to; | |
1615 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1616 fprintf (dump_file, " (changed)\n"); | |
1617 return true; | |
1618 } | |
1619 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1620 fprintf (dump_file, "\n"); | |
1621 return false; | |
1622 } | |
1623 | |
1624 /* Set all definitions in STMT to value number to themselves. | |
1625 Return true if a value number changed. */ | |
1626 | |
1627 static bool | |
1628 defs_to_varying (gimple stmt) | |
1629 { | |
1630 bool changed = false; | |
1631 ssa_op_iter iter; | |
1632 def_operand_p defp; | |
1633 | |
1634 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS) | |
1635 { | |
1636 tree def = DEF_FROM_PTR (defp); | |
1637 | |
1638 VN_INFO (def)->use_processed = true; | |
1639 changed |= set_ssa_val_to (def, def); | |
1640 } | |
1641 return changed; | |
1642 } | |
1643 | |
1644 static bool expr_has_constants (tree expr); | |
1645 static tree valueize_expr (tree expr); | |
1646 | |
1647 /* Visit a copy between LHS and RHS, return true if the value number | |
1648 changed. */ | |
1649 | |
1650 static bool | |
1651 visit_copy (tree lhs, tree rhs) | |
1652 { | |
1653 /* Follow chains of copies to their destination. */ | |
1654 while (TREE_CODE (rhs) == SSA_NAME | |
1655 && SSA_VAL (rhs) != rhs) | |
1656 rhs = SSA_VAL (rhs); | |
1657 | |
1658 /* The copy may have a more interesting constant filled expression | |
1659 (we don't, since we know our RHS is just an SSA name). */ | |
1660 if (TREE_CODE (rhs) == SSA_NAME) | |
1661 { | |
1662 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants; | |
1663 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr; | |
1664 } | |
1665 | |
1666 return set_ssa_val_to (lhs, rhs); | |
1667 } | |
1668 | |
1669 /* Visit a unary operator RHS, value number it, and return true if the | |
1670 value number of LHS has changed as a result. */ | |
1671 | |
1672 static bool | |
1673 visit_unary_op (tree lhs, gimple stmt) | |
1674 { | |
1675 bool changed = false; | |
1676 tree result = vn_nary_op_lookup_stmt (stmt, NULL); | |
1677 | |
1678 if (result) | |
1679 { | |
1680 changed = set_ssa_val_to (lhs, result); | |
1681 } | |
1682 else | |
1683 { | |
1684 changed = set_ssa_val_to (lhs, lhs); | |
1685 vn_nary_op_insert_stmt (stmt, lhs); | |
1686 } | |
1687 | |
1688 return changed; | |
1689 } | |
1690 | |
1691 /* Visit a binary operator RHS, value number it, and return true if the | |
1692 value number of LHS has changed as a result. */ | |
1693 | |
1694 static bool | |
1695 visit_binary_op (tree lhs, gimple stmt) | |
1696 { | |
1697 bool changed = false; | |
1698 tree result = vn_nary_op_lookup_stmt (stmt, NULL); | |
1699 | |
1700 if (result) | |
1701 { | |
1702 changed = set_ssa_val_to (lhs, result); | |
1703 } | |
1704 else | |
1705 { | |
1706 changed = set_ssa_val_to (lhs, lhs); | |
1707 vn_nary_op_insert_stmt (stmt, lhs); | |
1708 } | |
1709 | |
1710 return changed; | |
1711 } | |
1712 | |
1713 /* Visit a call STMT storing into LHS. Return true if the value number | |
1714 of the LHS has changed as a result. */ | |
1715 | |
1716 static bool | |
1717 visit_reference_op_call (tree lhs, gimple stmt) | |
1718 { | |
1719 bool changed = false; | |
1720 struct vn_reference_s vr1; | |
1721 tree result; | |
1722 | |
1723 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt)); | |
1724 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt)); | |
1725 vr1.hashcode = vn_reference_compute_hash (&vr1); | |
1726 result = vn_reference_lookup_1 (&vr1, NULL); | |
1727 if (result) | |
1728 { | |
1729 changed = set_ssa_val_to (lhs, result); | |
1730 if (TREE_CODE (result) == SSA_NAME | |
1731 && VN_INFO (result)->has_constants) | |
1732 VN_INFO (lhs)->has_constants = true; | |
1733 } | |
1734 else | |
1735 { | |
1736 void **slot; | |
1737 vn_reference_t vr2; | |
1738 changed = set_ssa_val_to (lhs, lhs); | |
1739 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool); | |
1740 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt)); | |
1741 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt)); | |
1742 vr2->hashcode = vr1.hashcode; | |
1743 vr2->result = lhs; | |
1744 slot = htab_find_slot_with_hash (current_info->references, | |
1745 vr2, vr2->hashcode, INSERT); | |
1746 if (*slot) | |
1747 free_reference (*slot); | |
1748 *slot = vr2; | |
1749 } | |
1750 | |
1751 return changed; | |
1752 } | |
1753 | |
1754 /* Visit a load from a reference operator RHS, part of STMT, value number it, | |
1755 and return true if the value number of the LHS has changed as a result. */ | |
1756 | |
1757 static bool | |
1758 visit_reference_op_load (tree lhs, tree op, gimple stmt) | |
1759 { | |
1760 bool changed = false; | |
1761 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true, | |
1762 NULL); | |
1763 | |
1764 /* We handle type-punning through unions by value-numbering based | |
1765 on offset and size of the access. Be prepared to handle a | |
1766 type-mismatch here via creating a VIEW_CONVERT_EXPR. */ | |
1767 if (result | |
1768 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op))) | |
1769 { | |
1770 /* We will be setting the value number of lhs to the value number | |
1771 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result). | |
1772 So first simplify and lookup this expression to see if it | |
1773 is already available. */ | |
1774 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result); | |
1775 if ((CONVERT_EXPR_P (val) | |
1776 || TREE_CODE (val) == VIEW_CONVERT_EXPR) | |
1777 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME) | |
1778 { | |
1779 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0))); | |
1780 if ((CONVERT_EXPR_P (tem) | |
1781 || TREE_CODE (tem) == VIEW_CONVERT_EXPR) | |
1782 && (tem = fold_unary_ignore_overflow (TREE_CODE (val), | |
1783 TREE_TYPE (val), tem))) | |
1784 val = tem; | |
1785 } | |
1786 result = val; | |
1787 if (!is_gimple_min_invariant (val) | |
1788 && TREE_CODE (val) != SSA_NAME) | |
1789 result = vn_nary_op_lookup (val, NULL); | |
1790 /* If the expression is not yet available, value-number lhs to | |
1791 a new SSA_NAME we create. */ | |
1792 if (!result && may_insert) | |
1793 { | |
1794 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL); | |
1795 /* Initialize value-number information properly. */ | |
1796 VN_INFO_GET (result)->valnum = result; | |
1797 VN_INFO (result)->value_id = get_next_value_id (); | |
1798 VN_INFO (result)->expr = val; | |
1799 VN_INFO (result)->has_constants = expr_has_constants (val); | |
1800 VN_INFO (result)->needs_insertion = true; | |
1801 /* As all "inserted" statements are singleton SCCs, insert | |
1802 to the valid table. This is strictly needed to | |
1803 avoid re-generating new value SSA_NAMEs for the same | |
1804 expression during SCC iteration over and over (the | |
1805 optimistic table gets cleared after each iteration). | |
1806 We do not need to insert into the optimistic table, as | |
1807 lookups there will fall back to the valid table. */ | |
1808 if (current_info == optimistic_info) | |
1809 { | |
1810 current_info = valid_info; | |
1811 vn_nary_op_insert (val, result); | |
1812 current_info = optimistic_info; | |
1813 } | |
1814 else | |
1815 vn_nary_op_insert (val, result); | |
1816 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1817 { | |
1818 fprintf (dump_file, "Inserting name "); | |
1819 print_generic_expr (dump_file, result, 0); | |
1820 fprintf (dump_file, " for expression "); | |
1821 print_generic_expr (dump_file, val, 0); | |
1822 fprintf (dump_file, "\n"); | |
1823 } | |
1824 } | |
1825 } | |
1826 | |
1827 if (result) | |
1828 { | |
1829 changed = set_ssa_val_to (lhs, result); | |
1830 if (TREE_CODE (result) == SSA_NAME | |
1831 && VN_INFO (result)->has_constants) | |
1832 { | |
1833 VN_INFO (lhs)->expr = VN_INFO (result)->expr; | |
1834 VN_INFO (lhs)->has_constants = true; | |
1835 } | |
1836 } | |
1837 else | |
1838 { | |
1839 changed = set_ssa_val_to (lhs, lhs); | |
1840 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt)); | |
1841 } | |
1842 | |
1843 return changed; | |
1844 } | |
1845 | |
1846 | |
1847 /* Visit a store to a reference operator LHS, part of STMT, value number it, | |
1848 and return true if the value number of the LHS has changed as a result. */ | |
1849 | |
1850 static bool | |
1851 visit_reference_op_store (tree lhs, tree op, gimple stmt) | |
1852 { | |
1853 bool changed = false; | |
1854 tree result; | |
1855 bool resultsame = false; | |
1856 | |
1857 /* First we want to lookup using the *vuses* from the store and see | |
1858 if there the last store to this location with the same address | |
1859 had the same value. | |
1860 | |
1861 The vuses represent the memory state before the store. If the | |
1862 memory state, address, and value of the store is the same as the | |
1863 last store to this location, then this store will produce the | |
1864 same memory state as that store. | |
1865 | |
1866 In this case the vdef versions for this store are value numbered to those | |
1867 vuse versions, since they represent the same memory state after | |
1868 this store. | |
1869 | |
1870 Otherwise, the vdefs for the store are used when inserting into | |
1871 the table, since the store generates a new memory state. */ | |
1872 | |
1873 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false, | |
1874 NULL); | |
1875 | |
1876 if (result) | |
1877 { | |
1878 if (TREE_CODE (result) == SSA_NAME) | |
1879 result = SSA_VAL (result); | |
1880 if (TREE_CODE (op) == SSA_NAME) | |
1881 op = SSA_VAL (op); | |
1882 resultsame = expressions_equal_p (result, op); | |
1883 } | |
1884 | |
1885 if (!result || !resultsame) | |
1886 { | |
1887 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt); | |
1888 int i; | |
1889 tree vdef; | |
1890 | |
1891 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1892 { | |
1893 fprintf (dump_file, "No store match\n"); | |
1894 fprintf (dump_file, "Value numbering store "); | |
1895 print_generic_expr (dump_file, lhs, 0); | |
1896 fprintf (dump_file, " to "); | |
1897 print_generic_expr (dump_file, op, 0); | |
1898 fprintf (dump_file, "\n"); | |
1899 } | |
1900 /* Have to set value numbers before insert, since insert is | |
1901 going to valueize the references in-place. */ | |
1902 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++) | |
1903 { | |
1904 VN_INFO (vdef)->use_processed = true; | |
1905 changed |= set_ssa_val_to (vdef, vdef); | |
1906 } | |
1907 | |
1908 /* Do not insert structure copies into the tables. */ | |
1909 if (is_gimple_min_invariant (op) | |
1910 || is_gimple_reg (op)) | |
1911 vn_reference_insert (lhs, op, vdefs); | |
1912 } | |
1913 else | |
1914 { | |
1915 /* We had a match, so value number the vdefs to have the value | |
1916 number of the vuses they came from. */ | |
1917 ssa_op_iter op_iter; | |
1918 def_operand_p var; | |
1919 vuse_vec_p vv; | |
1920 | |
1921 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1922 fprintf (dump_file, "Store matched earlier value," | |
1923 "value numbering store vdefs to matching vuses.\n"); | |
1924 | |
1925 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter) | |
1926 { | |
1927 tree def = DEF_FROM_PTR (var); | |
1928 tree use; | |
1929 | |
1930 /* Uh, if the vuse is a multiuse, we can't really do much | |
1931 here, sadly, since we don't know which value number of | |
1932 which vuse to use. */ | |
1933 if (VUSE_VECT_NUM_ELEM (*vv) != 1) | |
1934 use = def; | |
1935 else | |
1936 use = VUSE_ELEMENT_VAR (*vv, 0); | |
1937 | |
1938 VN_INFO (def)->use_processed = true; | |
1939 changed |= set_ssa_val_to (def, SSA_VAL (use)); | |
1940 } | |
1941 } | |
1942 | |
1943 return changed; | |
1944 } | |
1945 | |
1946 /* Visit and value number PHI, return true if the value number | |
1947 changed. */ | |
1948 | |
1949 static bool | |
1950 visit_phi (gimple phi) | |
1951 { | |
1952 bool changed = false; | |
1953 tree result; | |
1954 tree sameval = VN_TOP; | |
1955 bool allsame = true; | |
1956 unsigned i; | |
1957 | |
1958 /* TODO: We could check for this in init_sccvn, and replace this | |
1959 with a gcc_assert. */ | |
1960 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))) | |
1961 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi)); | |
1962 | |
1963 /* See if all non-TOP arguments have the same value. TOP is | |
1964 equivalent to everything, so we can ignore it. */ | |
1965 for (i = 0; i < gimple_phi_num_args (phi); i++) | |
1966 { | |
1967 tree def = PHI_ARG_DEF (phi, i); | |
1968 | |
1969 if (TREE_CODE (def) == SSA_NAME) | |
1970 def = SSA_VAL (def); | |
1971 if (def == VN_TOP) | |
1972 continue; | |
1973 if (sameval == VN_TOP) | |
1974 { | |
1975 sameval = def; | |
1976 } | |
1977 else | |
1978 { | |
1979 if (!expressions_equal_p (def, sameval)) | |
1980 { | |
1981 allsame = false; | |
1982 break; | |
1983 } | |
1984 } | |
1985 } | |
1986 | |
1987 /* If all value numbered to the same value, the phi node has that | |
1988 value. */ | |
1989 if (allsame) | |
1990 { | |
1991 if (is_gimple_min_invariant (sameval)) | |
1992 { | |
1993 VN_INFO (PHI_RESULT (phi))->has_constants = true; | |
1994 VN_INFO (PHI_RESULT (phi))->expr = sameval; | |
1995 } | |
1996 else | |
1997 { | |
1998 VN_INFO (PHI_RESULT (phi))->has_constants = false; | |
1999 VN_INFO (PHI_RESULT (phi))->expr = sameval; | |
2000 } | |
2001 | |
2002 if (TREE_CODE (sameval) == SSA_NAME) | |
2003 return visit_copy (PHI_RESULT (phi), sameval); | |
2004 | |
2005 return set_ssa_val_to (PHI_RESULT (phi), sameval); | |
2006 } | |
2007 | |
2008 /* Otherwise, see if it is equivalent to a phi node in this block. */ | |
2009 result = vn_phi_lookup (phi); | |
2010 if (result) | |
2011 { | |
2012 if (TREE_CODE (result) == SSA_NAME) | |
2013 changed = visit_copy (PHI_RESULT (phi), result); | |
2014 else | |
2015 changed = set_ssa_val_to (PHI_RESULT (phi), result); | |
2016 } | |
2017 else | |
2018 { | |
2019 vn_phi_insert (phi, PHI_RESULT (phi)); | |
2020 VN_INFO (PHI_RESULT (phi))->has_constants = false; | |
2021 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi); | |
2022 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi)); | |
2023 } | |
2024 | |
2025 return changed; | |
2026 } | |
2027 | |
2028 /* Return true if EXPR contains constants. */ | |
2029 | |
2030 static bool | |
2031 expr_has_constants (tree expr) | |
2032 { | |
2033 switch (TREE_CODE_CLASS (TREE_CODE (expr))) | |
2034 { | |
2035 case tcc_unary: | |
2036 return is_gimple_min_invariant (TREE_OPERAND (expr, 0)); | |
2037 | |
2038 case tcc_binary: | |
2039 return is_gimple_min_invariant (TREE_OPERAND (expr, 0)) | |
2040 || is_gimple_min_invariant (TREE_OPERAND (expr, 1)); | |
2041 /* Constants inside reference ops are rarely interesting, but | |
2042 it can take a lot of looking to find them. */ | |
2043 case tcc_reference: | |
2044 case tcc_declaration: | |
2045 return false; | |
2046 default: | |
2047 return is_gimple_min_invariant (expr); | |
2048 } | |
2049 return false; | |
2050 } | |
2051 | |
2052 /* Return true if STMT contains constants. */ | |
2053 | |
2054 static bool | |
2055 stmt_has_constants (gimple stmt) | |
2056 { | |
2057 if (gimple_code (stmt) != GIMPLE_ASSIGN) | |
2058 return false; | |
2059 | |
2060 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) | |
2061 { | |
2062 case GIMPLE_UNARY_RHS: | |
2063 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt)); | |
2064 | |
2065 case GIMPLE_BINARY_RHS: | |
2066 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt)) | |
2067 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))); | |
2068 case GIMPLE_SINGLE_RHS: | |
2069 /* Constants inside reference ops are rarely interesting, but | |
2070 it can take a lot of looking to find them. */ | |
2071 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt)); | |
2072 default: | |
2073 gcc_unreachable (); | |
2074 } | |
2075 return false; | |
2076 } | |
2077 | |
2078 /* Replace SSA_NAMES in expr with their value numbers, and return the | |
2079 result. | |
2080 This is performed in place. */ | |
2081 | |
2082 static tree | |
2083 valueize_expr (tree expr) | |
2084 { | |
2085 switch (TREE_CODE_CLASS (TREE_CODE (expr))) | |
2086 { | |
2087 case tcc_unary: | |
2088 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME | |
2089 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP) | |
2090 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0)); | |
2091 break; | |
2092 case tcc_binary: | |
2093 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME | |
2094 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP) | |
2095 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0)); | |
2096 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME | |
2097 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP) | |
2098 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1)); | |
2099 break; | |
2100 default: | |
2101 break; | |
2102 } | |
2103 return expr; | |
2104 } | |
2105 | |
2106 /* Simplify the binary expression RHS, and return the result if | |
2107 simplified. */ | |
2108 | |
2109 static tree | |
2110 simplify_binary_expression (gimple stmt) | |
2111 { | |
2112 tree result = NULL_TREE; | |
2113 tree op0 = gimple_assign_rhs1 (stmt); | |
2114 tree op1 = gimple_assign_rhs2 (stmt); | |
2115 | |
2116 /* This will not catch every single case we could combine, but will | |
2117 catch those with constants. The goal here is to simultaneously | |
2118 combine constants between expressions, but avoid infinite | |
2119 expansion of expressions during simplification. */ | |
2120 if (TREE_CODE (op0) == SSA_NAME) | |
2121 { | |
2122 if (VN_INFO (op0)->has_constants | |
2123 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison) | |
2124 op0 = valueize_expr (vn_get_expr_for (op0)); | |
2125 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0) | |
2126 op0 = SSA_VAL (op0); | |
2127 } | |
2128 | |
2129 if (TREE_CODE (op1) == SSA_NAME) | |
2130 { | |
2131 if (VN_INFO (op1)->has_constants) | |
2132 op1 = valueize_expr (vn_get_expr_for (op1)); | |
2133 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1) | |
2134 op1 = SSA_VAL (op1); | |
2135 } | |
2136 | |
2137 /* Avoid folding if nothing changed. */ | |
2138 if (op0 == gimple_assign_rhs1 (stmt) | |
2139 && op1 == gimple_assign_rhs2 (stmt)) | |
2140 return NULL_TREE; | |
2141 | |
2142 fold_defer_overflow_warnings (); | |
2143 | |
2144 result = fold_binary (gimple_assign_rhs_code (stmt), | |
2145 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1); | |
2146 if (result) | |
2147 STRIP_USELESS_TYPE_CONVERSION (result); | |
2148 | |
2149 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result), | |
2150 stmt, 0); | |
2151 | |
2152 /* Make sure result is not a complex expression consisting | |
2153 of operators of operators (IE (a + b) + (a + c)) | |
2154 Otherwise, we will end up with unbounded expressions if | |
2155 fold does anything at all. */ | |
2156 if (result && valid_gimple_rhs_p (result)) | |
2157 return result; | |
2158 | |
2159 return NULL_TREE; | |
2160 } | |
2161 | |
2162 /* Simplify the unary expression RHS, and return the result if | |
2163 simplified. */ | |
2164 | |
2165 static tree | |
2166 simplify_unary_expression (gimple stmt) | |
2167 { | |
2168 tree result = NULL_TREE; | |
2169 tree orig_op0, op0 = gimple_assign_rhs1 (stmt); | |
2170 | |
2171 /* We handle some tcc_reference codes here that are all | |
2172 GIMPLE_ASSIGN_SINGLE codes. */ | |
2173 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR | |
2174 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR | |
2175 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR) | |
2176 op0 = TREE_OPERAND (op0, 0); | |
2177 | |
2178 if (TREE_CODE (op0) != SSA_NAME) | |
2179 return NULL_TREE; | |
2180 | |
2181 orig_op0 = op0; | |
2182 if (VN_INFO (op0)->has_constants) | |
2183 op0 = valueize_expr (vn_get_expr_for (op0)); | |
2184 else if (gimple_assign_cast_p (stmt) | |
2185 || gimple_assign_rhs_code (stmt) == REALPART_EXPR | |
2186 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR | |
2187 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR) | |
2188 { | |
2189 /* We want to do tree-combining on conversion-like expressions. | |
2190 Make sure we feed only SSA_NAMEs or constants to fold though. */ | |
2191 tree tem = valueize_expr (vn_get_expr_for (op0)); | |
2192 if (UNARY_CLASS_P (tem) | |
2193 || BINARY_CLASS_P (tem) | |
2194 || TREE_CODE (tem) == VIEW_CONVERT_EXPR | |
2195 || TREE_CODE (tem) == SSA_NAME | |
2196 || is_gimple_min_invariant (tem)) | |
2197 op0 = tem; | |
2198 } | |
2199 | |
2200 /* Avoid folding if nothing changed, but remember the expression. */ | |
2201 if (op0 == orig_op0) | |
2202 return NULL_TREE; | |
2203 | |
2204 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt), | |
2205 gimple_expr_type (stmt), op0); | |
2206 if (result) | |
2207 { | |
2208 STRIP_USELESS_TYPE_CONVERSION (result); | |
2209 if (valid_gimple_rhs_p (result)) | |
2210 return result; | |
2211 } | |
2212 | |
2213 return NULL_TREE; | |
2214 } | |
2215 | |
2216 /* Try to simplify RHS using equivalences and constant folding. */ | |
2217 | |
2218 static tree | |
2219 try_to_simplify (gimple stmt) | |
2220 { | |
2221 tree tem; | |
2222 | |
2223 /* For stores we can end up simplifying a SSA_NAME rhs. Just return | |
2224 in this case, there is no point in doing extra work. */ | |
2225 if (gimple_assign_copy_p (stmt) | |
2226 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) | |
2227 return NULL_TREE; | |
2228 | |
2229 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))) | |
2230 { | |
2231 case tcc_declaration: | |
2232 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt)); | |
2233 if (tem) | |
2234 return tem; | |
2235 break; | |
2236 | |
2237 case tcc_reference: | |
2238 /* Do not do full-blown reference lookup here, but simplify | |
2239 reads from constant aggregates. */ | |
2240 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt)); | |
2241 if (tem) | |
2242 return tem; | |
2243 | |
2244 /* Fallthrough for some codes that can operate on registers. */ | |
2245 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR | |
2246 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR | |
2247 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR)) | |
2248 break; | |
2249 /* We could do a little more with unary ops, if they expand | |
2250 into binary ops, but it's debatable whether it is worth it. */ | |
2251 case tcc_unary: | |
2252 return simplify_unary_expression (stmt); | |
2253 break; | |
2254 case tcc_comparison: | |
2255 case tcc_binary: | |
2256 return simplify_binary_expression (stmt); | |
2257 break; | |
2258 default: | |
2259 break; | |
2260 } | |
2261 | |
2262 return NULL_TREE; | |
2263 } | |
2264 | |
2265 /* Visit and value number USE, return true if the value number | |
2266 changed. */ | |
2267 | |
2268 static bool | |
2269 visit_use (tree use) | |
2270 { | |
2271 bool changed = false; | |
2272 gimple stmt = SSA_NAME_DEF_STMT (use); | |
2273 | |
2274 VN_INFO (use)->use_processed = true; | |
2275 | |
2276 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)); | |
2277 if (dump_file && (dump_flags & TDF_DETAILS) | |
2278 && !SSA_NAME_IS_DEFAULT_DEF (use)) | |
2279 { | |
2280 fprintf (dump_file, "Value numbering "); | |
2281 print_generic_expr (dump_file, use, 0); | |
2282 fprintf (dump_file, " stmt = "); | |
2283 print_gimple_stmt (dump_file, stmt, 0, 0); | |
2284 } | |
2285 | |
2286 /* Handle uninitialized uses. */ | |
2287 if (SSA_NAME_IS_DEFAULT_DEF (use)) | |
2288 changed = set_ssa_val_to (use, use); | |
2289 else | |
2290 { | |
2291 if (gimple_code (stmt) == GIMPLE_PHI) | |
2292 changed = visit_phi (stmt); | |
2293 else if (!gimple_has_lhs (stmt) | |
2294 || gimple_has_volatile_ops (stmt) | |
2295 || stmt_could_throw_p (stmt)) | |
2296 changed = defs_to_varying (stmt); | |
2297 else if (is_gimple_assign (stmt)) | |
2298 { | |
2299 tree lhs = gimple_assign_lhs (stmt); | |
2300 tree simplified; | |
2301 | |
2302 /* Shortcut for copies. Simplifying copies is pointless, | |
2303 since we copy the expression and value they represent. */ | |
2304 if (gimple_assign_copy_p (stmt) | |
2305 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | |
2306 && TREE_CODE (lhs) == SSA_NAME) | |
2307 { | |
2308 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt)); | |
2309 goto done; | |
2310 } | |
2311 simplified = try_to_simplify (stmt); | |
2312 if (simplified) | |
2313 { | |
2314 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2315 { | |
2316 fprintf (dump_file, "RHS "); | |
2317 print_gimple_expr (dump_file, stmt, 0, 0); | |
2318 fprintf (dump_file, " simplified to "); | |
2319 print_generic_expr (dump_file, simplified, 0); | |
2320 if (TREE_CODE (lhs) == SSA_NAME) | |
2321 fprintf (dump_file, " has constants %d\n", | |
2322 expr_has_constants (simplified)); | |
2323 else | |
2324 fprintf (dump_file, "\n"); | |
2325 } | |
2326 } | |
2327 /* Setting value numbers to constants will occasionally | |
2328 screw up phi congruence because constants are not | |
2329 uniquely associated with a single ssa name that can be | |
2330 looked up. */ | |
2331 if (simplified | |
2332 && is_gimple_min_invariant (simplified) | |
2333 && TREE_CODE (lhs) == SSA_NAME) | |
2334 { | |
2335 VN_INFO (lhs)->expr = simplified; | |
2336 VN_INFO (lhs)->has_constants = true; | |
2337 changed = set_ssa_val_to (lhs, simplified); | |
2338 goto done; | |
2339 } | |
2340 else if (simplified | |
2341 && TREE_CODE (simplified) == SSA_NAME | |
2342 && TREE_CODE (lhs) == SSA_NAME) | |
2343 { | |
2344 changed = visit_copy (lhs, simplified); | |
2345 goto done; | |
2346 } | |
2347 else if (simplified) | |
2348 { | |
2349 if (TREE_CODE (lhs) == SSA_NAME) | |
2350 { | |
2351 VN_INFO (lhs)->has_constants = expr_has_constants (simplified); | |
2352 /* We have to unshare the expression or else | |
2353 valuizing may change the IL stream. */ | |
2354 VN_INFO (lhs)->expr = unshare_expr (simplified); | |
2355 } | |
2356 } | |
2357 else if (stmt_has_constants (stmt) | |
2358 && TREE_CODE (lhs) == SSA_NAME) | |
2359 VN_INFO (lhs)->has_constants = true; | |
2360 else if (TREE_CODE (lhs) == SSA_NAME) | |
2361 { | |
2362 /* We reset expr and constantness here because we may | |
2363 have been value numbering optimistically, and | |
2364 iterating. They may become non-constant in this case, | |
2365 even if they were optimistically constant. */ | |
2366 | |
2367 VN_INFO (lhs)->has_constants = false; | |
2368 VN_INFO (lhs)->expr = NULL_TREE; | |
2369 } | |
2370 | |
2371 if ((TREE_CODE (lhs) == SSA_NAME | |
2372 /* We can substitute SSA_NAMEs that are live over | |
2373 abnormal edges with their constant value. */ | |
2374 && !(gimple_assign_copy_p (stmt) | |
2375 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))) | |
2376 && !(simplified | |
2377 && is_gimple_min_invariant (simplified)) | |
2378 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | |
2379 /* Stores or copies from SSA_NAMEs that are live over | |
2380 abnormal edges are a problem. */ | |
2381 || (gimple_assign_single_p (stmt) | |
2382 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | |
2383 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))) | |
2384 changed = defs_to_varying (stmt); | |
2385 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs)) | |
2386 { | |
2387 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt); | |
2388 } | |
2389 else if (TREE_CODE (lhs) == SSA_NAME) | |
2390 { | |
2391 if ((gimple_assign_copy_p (stmt) | |
2392 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))) | |
2393 || (simplified | |
2394 && is_gimple_min_invariant (simplified))) | |
2395 { | |
2396 VN_INFO (lhs)->has_constants = true; | |
2397 if (simplified) | |
2398 changed = set_ssa_val_to (lhs, simplified); | |
2399 else | |
2400 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt)); | |
2401 } | |
2402 else | |
2403 { | |
2404 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) | |
2405 { | |
2406 case GIMPLE_UNARY_RHS: | |
2407 changed = visit_unary_op (lhs, stmt); | |
2408 break; | |
2409 case GIMPLE_BINARY_RHS: | |
2410 changed = visit_binary_op (lhs, stmt); | |
2411 break; | |
2412 case GIMPLE_SINGLE_RHS: | |
2413 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))) | |
2414 { | |
2415 case tcc_reference: | |
2416 /* VOP-less references can go through unary case. */ | |
2417 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR | |
2418 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR | |
2419 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR ) | |
2420 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME) | |
2421 { | |
2422 changed = visit_unary_op (lhs, stmt); | |
2423 break; | |
2424 } | |
2425 /* Fallthrough. */ | |
2426 case tcc_declaration: | |
2427 changed = visit_reference_op_load | |
2428 (lhs, gimple_assign_rhs1 (stmt), stmt); | |
2429 break; | |
2430 case tcc_expression: | |
2431 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR) | |
2432 { | |
2433 changed = visit_unary_op (lhs, stmt); | |
2434 break; | |
2435 } | |
2436 /* Fallthrough. */ | |
2437 default: | |
2438 changed = defs_to_varying (stmt); | |
2439 } | |
2440 break; | |
2441 default: | |
2442 changed = defs_to_varying (stmt); | |
2443 break; | |
2444 } | |
2445 } | |
2446 } | |
2447 else | |
2448 changed = defs_to_varying (stmt); | |
2449 } | |
2450 else if (is_gimple_call (stmt)) | |
2451 { | |
2452 tree lhs = gimple_call_lhs (stmt); | |
2453 | |
2454 /* ??? We could try to simplify calls. */ | |
2455 | |
2456 if (stmt_has_constants (stmt) | |
2457 && TREE_CODE (lhs) == SSA_NAME) | |
2458 VN_INFO (lhs)->has_constants = true; | |
2459 else if (TREE_CODE (lhs) == SSA_NAME) | |
2460 { | |
2461 /* We reset expr and constantness here because we may | |
2462 have been value numbering optimistically, and | |
2463 iterating. They may become non-constant in this case, | |
2464 even if they were optimistically constant. */ | |
2465 VN_INFO (lhs)->has_constants = false; | |
2466 VN_INFO (lhs)->expr = NULL_TREE; | |
2467 } | |
2468 | |
2469 if (TREE_CODE (lhs) == SSA_NAME | |
2470 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | |
2471 changed = defs_to_varying (stmt); | |
2472 /* ??? We should handle stores from calls. */ | |
2473 else if (TREE_CODE (lhs) == SSA_NAME) | |
2474 { | |
2475 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)) | |
2476 changed = visit_reference_op_call (lhs, stmt); | |
2477 else | |
2478 changed = defs_to_varying (stmt); | |
2479 } | |
2480 else | |
2481 changed = defs_to_varying (stmt); | |
2482 } | |
2483 } | |
2484 done: | |
2485 return changed; | |
2486 } | |
2487 | |
2488 /* Compare two operands by reverse postorder index */ | |
2489 | |
2490 static int | |
2491 compare_ops (const void *pa, const void *pb) | |
2492 { | |
2493 const tree opa = *((const tree *)pa); | |
2494 const tree opb = *((const tree *)pb); | |
2495 gimple opstmta = SSA_NAME_DEF_STMT (opa); | |
2496 gimple opstmtb = SSA_NAME_DEF_STMT (opb); | |
2497 basic_block bba; | |
2498 basic_block bbb; | |
2499 | |
2500 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb)) | |
2501 return 0; | |
2502 else if (gimple_nop_p (opstmta)) | |
2503 return -1; | |
2504 else if (gimple_nop_p (opstmtb)) | |
2505 return 1; | |
2506 | |
2507 bba = gimple_bb (opstmta); | |
2508 bbb = gimple_bb (opstmtb); | |
2509 | |
2510 if (!bba && !bbb) | |
2511 return 0; | |
2512 else if (!bba) | |
2513 return -1; | |
2514 else if (!bbb) | |
2515 return 1; | |
2516 | |
2517 if (bba == bbb) | |
2518 { | |
2519 if (gimple_code (opstmta) == GIMPLE_PHI | |
2520 && gimple_code (opstmtb) == GIMPLE_PHI) | |
2521 return 0; | |
2522 else if (gimple_code (opstmta) == GIMPLE_PHI) | |
2523 return -1; | |
2524 else if (gimple_code (opstmtb) == GIMPLE_PHI) | |
2525 return 1; | |
2526 return gimple_uid (opstmta) - gimple_uid (opstmtb); | |
2527 } | |
2528 return rpo_numbers[bba->index] - rpo_numbers[bbb->index]; | |
2529 } | |
2530 | |
2531 /* Sort an array containing members of a strongly connected component | |
2532 SCC so that the members are ordered by RPO number. | |
2533 This means that when the sort is complete, iterating through the | |
2534 array will give you the members in RPO order. */ | |
2535 | |
2536 static void | |
2537 sort_scc (VEC (tree, heap) *scc) | |
2538 { | |
2539 qsort (VEC_address (tree, scc), | |
2540 VEC_length (tree, scc), | |
2541 sizeof (tree), | |
2542 compare_ops); | |
2543 } | |
2544 | |
2545 /* Process a strongly connected component in the SSA graph. */ | |
2546 | |
2547 static void | |
2548 process_scc (VEC (tree, heap) *scc) | |
2549 { | |
2550 /* If the SCC has a single member, just visit it. */ | |
2551 | |
2552 if (VEC_length (tree, scc) == 1) | |
2553 { | |
2554 tree use = VEC_index (tree, scc, 0); | |
2555 if (!VN_INFO (use)->use_processed) | |
2556 visit_use (use); | |
2557 } | |
2558 else | |
2559 { | |
2560 tree var; | |
2561 unsigned int i; | |
2562 unsigned int iterations = 0; | |
2563 bool changed = true; | |
2564 | |
2565 /* Iterate over the SCC with the optimistic table until it stops | |
2566 changing. */ | |
2567 current_info = optimistic_info; | |
2568 while (changed) | |
2569 { | |
2570 changed = false; | |
2571 iterations++; | |
2572 /* As we are value-numbering optimistically we have to | |
2573 clear the expression tables and the simplified expressions | |
2574 in each iteration until we converge. */ | |
2575 htab_empty (optimistic_info->nary); | |
2576 htab_empty (optimistic_info->phis); | |
2577 htab_empty (optimistic_info->references); | |
2578 obstack_free (&optimistic_info->nary_obstack, NULL); | |
2579 gcc_obstack_init (&optimistic_info->nary_obstack); | |
2580 empty_alloc_pool (optimistic_info->phis_pool); | |
2581 empty_alloc_pool (optimistic_info->references_pool); | |
2582 for (i = 0; VEC_iterate (tree, scc, i, var); i++) | |
2583 VN_INFO (var)->expr = NULL_TREE; | |
2584 for (i = 0; VEC_iterate (tree, scc, i, var); i++) | |
2585 changed |= visit_use (var); | |
2586 } | |
2587 | |
2588 statistics_histogram_event (cfun, "SCC iterations", iterations); | |
2589 | |
2590 /* Finally, visit the SCC once using the valid table. */ | |
2591 current_info = valid_info; | |
2592 for (i = 0; VEC_iterate (tree, scc, i, var); i++) | |
2593 visit_use (var); | |
2594 } | |
2595 } | |
2596 | |
2597 DEF_VEC_O(ssa_op_iter); | |
2598 DEF_VEC_ALLOC_O(ssa_op_iter,heap); | |
2599 | |
2600 /* Pop the components of the found SCC for NAME off the SCC stack | |
2601 and process them. Returns true if all went well, false if | |
2602 we run into resource limits. */ | |
2603 | |
2604 static bool | |
2605 extract_and_process_scc_for_name (tree name) | |
2606 { | |
2607 VEC (tree, heap) *scc = NULL; | |
2608 tree x; | |
2609 | |
2610 /* Found an SCC, pop the components off the SCC stack and | |
2611 process them. */ | |
2612 do | |
2613 { | |
2614 x = VEC_pop (tree, sccstack); | |
2615 | |
2616 VN_INFO (x)->on_sccstack = false; | |
2617 VEC_safe_push (tree, heap, scc, x); | |
2618 } while (x != name); | |
2619 | |
2620 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */ | |
2621 if (VEC_length (tree, scc) | |
2622 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE)) | |
2623 { | |
2624 if (dump_file) | |
2625 fprintf (dump_file, "WARNING: Giving up with SCCVN due to " | |
2626 "SCC size %u exceeding %u\n", VEC_length (tree, scc), | |
2627 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE)); | |
2628 return false; | |
2629 } | |
2630 | |
2631 if (VEC_length (tree, scc) > 1) | |
2632 sort_scc (scc); | |
2633 | |
2634 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2635 print_scc (dump_file, scc); | |
2636 | |
2637 process_scc (scc); | |
2638 | |
2639 VEC_free (tree, heap, scc); | |
2640 | |
2641 return true; | |
2642 } | |
2643 | |
2644 /* Depth first search on NAME to discover and process SCC's in the SSA | |
2645 graph. | |
2646 Execution of this algorithm relies on the fact that the SCC's are | |
2647 popped off the stack in topological order. | |
2648 Returns true if successful, false if we stopped processing SCC's due | |
2649 to resource constraints. */ | |
2650 | |
2651 static bool | |
2652 DFS (tree name) | |
2653 { | |
2654 VEC(ssa_op_iter, heap) *itervec = NULL; | |
2655 VEC(tree, heap) *namevec = NULL; | |
2656 use_operand_p usep = NULL; | |
2657 gimple defstmt; | |
2658 tree use; | |
2659 ssa_op_iter iter; | |
2660 | |
2661 start_over: | |
2662 /* SCC info */ | |
2663 VN_INFO (name)->dfsnum = next_dfs_num++; | |
2664 VN_INFO (name)->visited = true; | |
2665 VN_INFO (name)->low = VN_INFO (name)->dfsnum; | |
2666 | |
2667 VEC_safe_push (tree, heap, sccstack, name); | |
2668 VN_INFO (name)->on_sccstack = true; | |
2669 defstmt = SSA_NAME_DEF_STMT (name); | |
2670 | |
2671 /* Recursively DFS on our operands, looking for SCC's. */ | |
2672 if (!gimple_nop_p (defstmt)) | |
2673 { | |
2674 /* Push a new iterator. */ | |
2675 if (gimple_code (defstmt) == GIMPLE_PHI) | |
2676 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES); | |
2677 else | |
2678 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES); | |
2679 } | |
2680 else | |
2681 clear_and_done_ssa_iter (&iter); | |
2682 | |
2683 while (1) | |
2684 { | |
2685 /* If we are done processing uses of a name, go up the stack | |
2686 of iterators and process SCCs as we found them. */ | |
2687 if (op_iter_done (&iter)) | |
2688 { | |
2689 /* See if we found an SCC. */ | |
2690 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum) | |
2691 if (!extract_and_process_scc_for_name (name)) | |
2692 { | |
2693 VEC_free (tree, heap, namevec); | |
2694 VEC_free (ssa_op_iter, heap, itervec); | |
2695 return false; | |
2696 } | |
2697 | |
2698 /* Check if we are done. */ | |
2699 if (VEC_empty (tree, namevec)) | |
2700 { | |
2701 VEC_free (tree, heap, namevec); | |
2702 VEC_free (ssa_op_iter, heap, itervec); | |
2703 return true; | |
2704 } | |
2705 | |
2706 /* Restore the last use walker and continue walking there. */ | |
2707 use = name; | |
2708 name = VEC_pop (tree, namevec); | |
2709 memcpy (&iter, VEC_last (ssa_op_iter, itervec), | |
2710 sizeof (ssa_op_iter)); | |
2711 VEC_pop (ssa_op_iter, itervec); | |
2712 goto continue_walking; | |
2713 } | |
2714 | |
2715 use = USE_FROM_PTR (usep); | |
2716 | |
2717 /* Since we handle phi nodes, we will sometimes get | |
2718 invariants in the use expression. */ | |
2719 if (TREE_CODE (use) == SSA_NAME) | |
2720 { | |
2721 if (! (VN_INFO (use)->visited)) | |
2722 { | |
2723 /* Recurse by pushing the current use walking state on | |
2724 the stack and starting over. */ | |
2725 VEC_safe_push(ssa_op_iter, heap, itervec, &iter); | |
2726 VEC_safe_push(tree, heap, namevec, name); | |
2727 name = use; | |
2728 goto start_over; | |
2729 | |
2730 continue_walking: | |
2731 VN_INFO (name)->low = MIN (VN_INFO (name)->low, | |
2732 VN_INFO (use)->low); | |
2733 } | |
2734 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum | |
2735 && VN_INFO (use)->on_sccstack) | |
2736 { | |
2737 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum, | |
2738 VN_INFO (name)->low); | |
2739 } | |
2740 } | |
2741 | |
2742 usep = op_iter_next_use (&iter); | |
2743 } | |
2744 } | |
2745 | |
2746 /* Allocate a value number table. */ | |
2747 | |
2748 static void | |
2749 allocate_vn_table (vn_tables_t table) | |
2750 { | |
2751 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi); | |
2752 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL); | |
2753 table->references = htab_create (23, vn_reference_hash, vn_reference_eq, | |
2754 free_reference); | |
2755 | |
2756 gcc_obstack_init (&table->nary_obstack); | |
2757 table->phis_pool = create_alloc_pool ("VN phis", | |
2758 sizeof (struct vn_phi_s), | |
2759 30); | |
2760 table->references_pool = create_alloc_pool ("VN references", | |
2761 sizeof (struct vn_reference_s), | |
2762 30); | |
2763 } | |
2764 | |
2765 /* Free a value number table. */ | |
2766 | |
2767 static void | |
2768 free_vn_table (vn_tables_t table) | |
2769 { | |
2770 htab_delete (table->phis); | |
2771 htab_delete (table->nary); | |
2772 htab_delete (table->references); | |
2773 obstack_free (&table->nary_obstack, NULL); | |
2774 free_alloc_pool (table->phis_pool); | |
2775 free_alloc_pool (table->references_pool); | |
2776 } | |
2777 | |
2778 static void | |
2779 init_scc_vn (void) | |
2780 { | |
2781 size_t i; | |
2782 int j; | |
2783 int *rpo_numbers_temp; | |
2784 | |
2785 calculate_dominance_info (CDI_DOMINATORS); | |
2786 sccstack = NULL; | |
2787 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq, | |
2788 free); | |
2789 | |
2790 constant_value_ids = BITMAP_ALLOC (NULL); | |
2791 | |
2792 next_dfs_num = 1; | |
2793 next_value_id = 1; | |
2794 | |
2795 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1); | |
2796 /* VEC_alloc doesn't actually grow it to the right size, it just | |
2797 preallocates the space to do so. */ | |
2798 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1); | |
2799 gcc_obstack_init (&vn_ssa_aux_obstack); | |
2800 | |
2801 shared_lookup_phiargs = NULL; | |
2802 shared_lookup_vops = NULL; | |
2803 shared_lookup_references = NULL; | |
2804 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS); | |
2805 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS); | |
2806 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false); | |
2807 | |
2808 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that | |
2809 the i'th block in RPO order is bb. We want to map bb's to RPO | |
2810 numbers, so we need to rearrange this array. */ | |
2811 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++) | |
2812 rpo_numbers[rpo_numbers_temp[j]] = j; | |
2813 | |
2814 XDELETE (rpo_numbers_temp); | |
2815 | |
2816 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top"); | |
2817 | |
2818 /* Create the VN_INFO structures, and initialize value numbers to | |
2819 TOP. */ | |
2820 for (i = 0; i < num_ssa_names; i++) | |
2821 { | |
2822 tree name = ssa_name (i); | |
2823 if (name) | |
2824 { | |
2825 VN_INFO_GET (name)->valnum = VN_TOP; | |
2826 VN_INFO (name)->expr = NULL_TREE; | |
2827 VN_INFO (name)->value_id = 0; | |
2828 } | |
2829 } | |
2830 | |
2831 renumber_gimple_stmt_uids (); | |
2832 | |
2833 /* Create the valid and optimistic value numbering tables. */ | |
2834 valid_info = XCNEW (struct vn_tables_s); | |
2835 allocate_vn_table (valid_info); | |
2836 optimistic_info = XCNEW (struct vn_tables_s); | |
2837 allocate_vn_table (optimistic_info); | |
2838 } | |
2839 | |
2840 void | |
2841 free_scc_vn (void) | |
2842 { | |
2843 size_t i; | |
2844 | |
2845 htab_delete (constant_to_value_id); | |
2846 BITMAP_FREE (constant_value_ids); | |
2847 VEC_free (tree, heap, shared_lookup_phiargs); | |
2848 VEC_free (tree, gc, shared_lookup_vops); | |
2849 VEC_free (vn_reference_op_s, heap, shared_lookup_references); | |
2850 XDELETEVEC (rpo_numbers); | |
2851 | |
2852 for (i = 0; i < num_ssa_names; i++) | |
2853 { | |
2854 tree name = ssa_name (i); | |
2855 if (name | |
2856 && VN_INFO (name)->needs_insertion) | |
2857 release_ssa_name (name); | |
2858 } | |
2859 obstack_free (&vn_ssa_aux_obstack, NULL); | |
2860 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table); | |
2861 | |
2862 VEC_free (tree, heap, sccstack); | |
2863 free_vn_table (valid_info); | |
2864 XDELETE (valid_info); | |
2865 free_vn_table (optimistic_info); | |
2866 XDELETE (optimistic_info); | |
2867 } | |
2868 | |
2869 /* Set the value ids in the valid hash tables. */ | |
2870 | |
2871 static void | |
2872 set_hashtable_value_ids (void) | |
2873 { | |
2874 htab_iterator hi; | |
2875 vn_nary_op_t vno; | |
2876 vn_reference_t vr; | |
2877 vn_phi_t vp; | |
2878 | |
2879 /* Now set the value ids of the things we had put in the hash | |
2880 table. */ | |
2881 | |
2882 FOR_EACH_HTAB_ELEMENT (valid_info->nary, | |
2883 vno, vn_nary_op_t, hi) | |
2884 { | |
2885 if (vno->result) | |
2886 { | |
2887 if (TREE_CODE (vno->result) == SSA_NAME) | |
2888 vno->value_id = VN_INFO (vno->result)->value_id; | |
2889 else if (is_gimple_min_invariant (vno->result)) | |
2890 vno->value_id = get_or_alloc_constant_value_id (vno->result); | |
2891 } | |
2892 } | |
2893 | |
2894 FOR_EACH_HTAB_ELEMENT (valid_info->phis, | |
2895 vp, vn_phi_t, hi) | |
2896 { | |
2897 if (vp->result) | |
2898 { | |
2899 if (TREE_CODE (vp->result) == SSA_NAME) | |
2900 vp->value_id = VN_INFO (vp->result)->value_id; | |
2901 else if (is_gimple_min_invariant (vp->result)) | |
2902 vp->value_id = get_or_alloc_constant_value_id (vp->result); | |
2903 } | |
2904 } | |
2905 | |
2906 FOR_EACH_HTAB_ELEMENT (valid_info->references, | |
2907 vr, vn_reference_t, hi) | |
2908 { | |
2909 if (vr->result) | |
2910 { | |
2911 if (TREE_CODE (vr->result) == SSA_NAME) | |
2912 vr->value_id = VN_INFO (vr->result)->value_id; | |
2913 else if (is_gimple_min_invariant (vr->result)) | |
2914 vr->value_id = get_or_alloc_constant_value_id (vr->result); | |
2915 } | |
2916 } | |
2917 } | |
2918 | |
2919 /* Do SCCVN. Returns true if it finished, false if we bailed out | |
2920 due to resource constraints. */ | |
2921 | |
2922 bool | |
2923 run_scc_vn (bool may_insert_arg) | |
2924 { | |
2925 size_t i; | |
2926 tree param; | |
2927 bool changed = true; | |
2928 | |
2929 may_insert = may_insert_arg; | |
2930 | |
2931 init_scc_vn (); | |
2932 current_info = valid_info; | |
2933 | |
2934 for (param = DECL_ARGUMENTS (current_function_decl); | |
2935 param; | |
2936 param = TREE_CHAIN (param)) | |
2937 { | |
2938 if (gimple_default_def (cfun, param) != NULL) | |
2939 { | |
2940 tree def = gimple_default_def (cfun, param); | |
2941 SSA_VAL (def) = def; | |
2942 } | |
2943 } | |
2944 | |
2945 for (i = 1; i < num_ssa_names; ++i) | |
2946 { | |
2947 tree name = ssa_name (i); | |
2948 if (name | |
2949 && VN_INFO (name)->visited == false | |
2950 && !has_zero_uses (name)) | |
2951 if (!DFS (name)) | |
2952 { | |
2953 free_scc_vn (); | |
2954 may_insert = false; | |
2955 return false; | |
2956 } | |
2957 } | |
2958 | |
2959 /* Initialize the value ids. */ | |
2960 | |
2961 for (i = 1; i < num_ssa_names; ++i) | |
2962 { | |
2963 tree name = ssa_name (i); | |
2964 vn_ssa_aux_t info; | |
2965 if (!name) | |
2966 continue; | |
2967 info = VN_INFO (name); | |
2968 if (info->valnum == name) | |
2969 info->value_id = get_next_value_id (); | |
2970 else if (is_gimple_min_invariant (info->valnum)) | |
2971 info->value_id = get_or_alloc_constant_value_id (info->valnum); | |
2972 } | |
2973 | |
2974 /* Propagate until they stop changing. */ | |
2975 while (changed) | |
2976 { | |
2977 changed = false; | |
2978 for (i = 1; i < num_ssa_names; ++i) | |
2979 { | |
2980 tree name = ssa_name (i); | |
2981 vn_ssa_aux_t info; | |
2982 if (!name) | |
2983 continue; | |
2984 info = VN_INFO (name); | |
2985 if (TREE_CODE (info->valnum) == SSA_NAME | |
2986 && info->valnum != name | |
2987 && info->value_id != VN_INFO (info->valnum)->value_id) | |
2988 { | |
2989 changed = true; | |
2990 info->value_id = VN_INFO (info->valnum)->value_id; | |
2991 } | |
2992 } | |
2993 } | |
2994 | |
2995 set_hashtable_value_ids (); | |
2996 | |
2997 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2998 { | |
2999 fprintf (dump_file, "Value numbers:\n"); | |
3000 for (i = 0; i < num_ssa_names; i++) | |
3001 { | |
3002 tree name = ssa_name (i); | |
3003 if (name | |
3004 && VN_INFO (name)->visited | |
3005 && SSA_VAL (name) != name) | |
3006 { | |
3007 print_generic_expr (dump_file, name, 0); | |
3008 fprintf (dump_file, " = "); | |
3009 print_generic_expr (dump_file, SSA_VAL (name), 0); | |
3010 fprintf (dump_file, "\n"); | |
3011 } | |
3012 } | |
3013 } | |
3014 | |
3015 may_insert = false; | |
3016 return true; | |
3017 } | |
3018 | |
3019 /* Return the maximum value id we have ever seen. */ | |
3020 | |
3021 unsigned int | |
3022 get_max_value_id (void) | |
3023 { | |
3024 return next_value_id; | |
3025 } | |
3026 | |
3027 /* Return the next unique value id. */ | |
3028 | |
3029 unsigned int | |
3030 get_next_value_id (void) | |
3031 { | |
3032 return next_value_id++; | |
3033 } | |
3034 | |
3035 | |
3036 /* Compare two expressions E1 and E2 and return true if they are equal. */ | |
3037 | |
3038 bool | |
3039 expressions_equal_p (tree e1, tree e2) | |
3040 { | |
3041 /* The obvious case. */ | |
3042 if (e1 == e2) | |
3043 return true; | |
3044 | |
3045 /* If only one of them is null, they cannot be equal. */ | |
3046 if (!e1 || !e2) | |
3047 return false; | |
3048 | |
3049 /* Recurse on elements of lists. */ | |
3050 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST) | |
3051 { | |
3052 tree lop1 = e1; | |
3053 tree lop2 = e2; | |
3054 for (lop1 = e1, lop2 = e2; | |
3055 lop1 || lop2; | |
3056 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2)) | |
3057 { | |
3058 if (!lop1 || !lop2) | |
3059 return false; | |
3060 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2))) | |
3061 return false; | |
3062 } | |
3063 return true; | |
3064 } | |
3065 | |
3066 /* Now perform the actual comparison. */ | |
3067 if (TREE_CODE (e1) == TREE_CODE (e2) | |
3068 && operand_equal_p (e1, e2, OEP_PURE_SAME)) | |
3069 return true; | |
3070 | |
3071 return false; | |
3072 } | |
3073 | |
3074 /* Sort the VUSE array so that we can do equality comparisons | |
3075 quicker on two vuse vecs. */ | |
3076 | |
3077 void | |
3078 sort_vuses (VEC (tree,gc) *vuses) | |
3079 { | |
3080 if (VEC_length (tree, vuses) > 1) | |
3081 qsort (VEC_address (tree, vuses), | |
3082 VEC_length (tree, vuses), | |
3083 sizeof (tree), | |
3084 operand_build_cmp); | |
3085 } | |
3086 | |
3087 /* Sort the VUSE array so that we can do equality comparisons | |
3088 quicker on two vuse vecs. */ | |
3089 | |
3090 void | |
3091 sort_vuses_heap (VEC (tree,heap) *vuses) | |
3092 { | |
3093 if (VEC_length (tree, vuses) > 1) | |
3094 qsort (VEC_address (tree, vuses), | |
3095 VEC_length (tree, vuses), | |
3096 sizeof (tree), | |
3097 operand_build_cmp); | |
3098 } | |
3099 | |
3100 | |
3101 /* Return true if the nary operation NARY may trap. This is a copy | |
3102 of stmt_could_throw_1_p adjusted to the SCCVN IL. */ | |
3103 | |
3104 bool | |
3105 vn_nary_may_trap (vn_nary_op_t nary) | |
3106 { | |
3107 tree type; | |
3108 tree rhs2; | |
3109 bool honor_nans = false; | |
3110 bool honor_snans = false; | |
3111 bool fp_operation = false; | |
3112 bool honor_trapv = false; | |
3113 bool handled, ret; | |
3114 unsigned i; | |
3115 | |
3116 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison | |
3117 || TREE_CODE_CLASS (nary->opcode) == tcc_unary | |
3118 || TREE_CODE_CLASS (nary->opcode) == tcc_binary) | |
3119 { | |
3120 type = nary->type; | |
3121 fp_operation = FLOAT_TYPE_P (type); | |
3122 if (fp_operation) | |
3123 { | |
3124 honor_nans = flag_trapping_math && !flag_finite_math_only; | |
3125 honor_snans = flag_signaling_nans != 0; | |
3126 } | |
3127 else if (INTEGRAL_TYPE_P (type) | |
3128 && TYPE_OVERFLOW_TRAPS (type)) | |
3129 honor_trapv = true; | |
3130 } | |
3131 rhs2 = nary->op[1]; | |
3132 ret = operation_could_trap_helper_p (nary->opcode, fp_operation, | |
3133 honor_trapv, | |
3134 honor_nans, honor_snans, rhs2, | |
3135 &handled); | |
3136 if (handled | |
3137 && ret) | |
3138 return true; | |
3139 | |
3140 for (i = 0; i < nary->length; ++i) | |
3141 if (tree_could_trap_p (nary->op[i])) | |
3142 return true; | |
3143 | |
3144 return false; | |
3145 } |