Mercurial > hg > CbC > CbC_gcc
comparison gcc/ipa-polymorphic-call.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | |
children | 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
68:561a7518be6b | 111:04ced10e8804 |
---|---|
1 /* Analysis of polymorphic call context. | |
2 Copyright (C) 2013-2017 Free Software Foundation, Inc. | |
3 Contributed by Jan Hubicka | |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify it under | |
8 the terms of the GNU General Public License as published by the Free | |
9 Software Foundation; either version 3, or (at your option) any later | |
10 version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "backend.h" | |
25 #include "rtl.h" | |
26 #include "tree.h" | |
27 #include "gimple.h" | |
28 #include "tree-pass.h" | |
29 #include "tree-ssa-operands.h" | |
30 #include "streamer-hooks.h" | |
31 #include "cgraph.h" | |
32 #include "data-streamer.h" | |
33 #include "diagnostic.h" | |
34 #include "alias.h" | |
35 #include "fold-const.h" | |
36 #include "calls.h" | |
37 #include "ipa-utils.h" | |
38 #include "tree-dfa.h" | |
39 #include "gimple-pretty-print.h" | |
40 #include "tree-into-ssa.h" | |
41 #include "params.h" | |
42 | |
43 /* Return true when TYPE contains an polymorphic type and thus is interesting | |
44 for devirtualization machinery. */ | |
45 | |
46 static bool contains_type_p (tree, HOST_WIDE_INT, tree, | |
47 bool consider_placement_new = true, | |
48 bool consider_bases = true); | |
49 | |
50 bool | |
51 contains_polymorphic_type_p (const_tree type) | |
52 { | |
53 type = TYPE_MAIN_VARIANT (type); | |
54 | |
55 if (RECORD_OR_UNION_TYPE_P (type)) | |
56 { | |
57 if (TYPE_BINFO (type) | |
58 && polymorphic_type_binfo_p (TYPE_BINFO (type))) | |
59 return true; | |
60 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) | |
61 if (TREE_CODE (fld) == FIELD_DECL | |
62 && !DECL_ARTIFICIAL (fld) | |
63 && contains_polymorphic_type_p (TREE_TYPE (fld))) | |
64 return true; | |
65 return false; | |
66 } | |
67 if (TREE_CODE (type) == ARRAY_TYPE) | |
68 return contains_polymorphic_type_p (TREE_TYPE (type)); | |
69 return false; | |
70 } | |
71 | |
72 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE | |
73 at possition CUR_OFFSET within TYPE. | |
74 | |
75 POD can be changed to an instance of a polymorphic type by | |
76 placement new. Here we play safe and assume that any | |
77 non-polymorphic type is POD. */ | |
78 bool | |
79 possible_placement_new (tree type, tree expected_type, | |
80 HOST_WIDE_INT cur_offset) | |
81 { | |
82 if (cur_offset < 0) | |
83 return true; | |
84 return ((TREE_CODE (type) != RECORD_TYPE | |
85 || !TYPE_BINFO (type) | |
86 || cur_offset >= POINTER_SIZE | |
87 || !polymorphic_type_binfo_p (TYPE_BINFO (type))) | |
88 && (!TYPE_SIZE (type) | |
89 || !tree_fits_shwi_p (TYPE_SIZE (type)) | |
90 || (cur_offset | |
91 + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type)) | |
92 : POINTER_SIZE) | |
93 <= tree_to_uhwi (TYPE_SIZE (type))))); | |
94 } | |
95 | |
96 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE | |
97 is contained at THIS->OFFSET. Walk the memory representation of | |
98 THIS->OUTER_TYPE and find the outermost class type that match | |
99 OTR_TYPE or contain OTR_TYPE as a base. Update THIS | |
100 to represent it. | |
101 | |
102 If OTR_TYPE is NULL, just find outermost polymorphic type with | |
103 virtual table present at possition OFFSET. | |
104 | |
105 For example when THIS represents type | |
106 class A | |
107 { | |
108 int a; | |
109 class B b; | |
110 } | |
111 and we look for type at offset sizeof(int), we end up with B and offset 0. | |
112 If the same is produced by multiple inheritance, we end up with A and offset | |
113 sizeof(int). | |
114 | |
115 If we can not find corresponding class, give up by setting | |
116 THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL. | |
117 Return true when lookup was sucesful. | |
118 | |
119 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made | |
120 valid only via allocation of new polymorphic type inside by means | |
121 of placement new. | |
122 | |
123 When CONSIDER_BASES is false, only look for actual fields, not base types | |
124 of TYPE. */ | |
125 | |
126 bool | |
127 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type, | |
128 bool consider_placement_new, | |
129 bool consider_bases) | |
130 { | |
131 tree type = outer_type; | |
132 HOST_WIDE_INT cur_offset = offset; | |
133 bool speculative = false; | |
134 bool size_unknown = false; | |
135 unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE; | |
136 | |
137 /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set. */ | |
138 if (!outer_type) | |
139 { | |
140 clear_outer_type (otr_type); | |
141 type = otr_type; | |
142 cur_offset = 0; | |
143 } | |
144 /* See if OFFSET points inside OUTER_TYPE. If it does not, we know | |
145 that the context is either invalid, or the instance type must be | |
146 derived from OUTER_TYPE. | |
147 | |
148 Because the instance type may contain field whose type is of OUTER_TYPE, | |
149 we can not derive any effective information about it. | |
150 | |
151 TODO: In the case we know all derrived types, we can definitely do better | |
152 here. */ | |
153 else if (TYPE_SIZE (outer_type) | |
154 && tree_fits_shwi_p (TYPE_SIZE (outer_type)) | |
155 && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0 | |
156 && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset) | |
157 { | |
158 bool der = maybe_derived_type; /* clear_outer_type will reset it. */ | |
159 bool dyn = dynamic; | |
160 clear_outer_type (otr_type); | |
161 type = otr_type; | |
162 cur_offset = 0; | |
163 | |
164 /* If derived type is not allowed, we know that the context is invalid. | |
165 For dynamic types, we really do not have information about | |
166 size of the memory location. It is possible that completely | |
167 different type is stored after outer_type. */ | |
168 if (!der && !dyn) | |
169 { | |
170 clear_speculation (); | |
171 invalid = true; | |
172 return false; | |
173 } | |
174 } | |
175 | |
176 if (otr_type && TYPE_SIZE (otr_type) | |
177 && tree_fits_shwi_p (TYPE_SIZE (otr_type))) | |
178 otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type)); | |
179 | |
180 if (!type || offset < 0) | |
181 goto no_useful_type_info; | |
182 | |
183 /* Find the sub-object the constant actually refers to and mark whether it is | |
184 an artificial one (as opposed to a user-defined one). | |
185 | |
186 This loop is performed twice; first time for outer_type and second time | |
187 for speculative_outer_type. The second run has SPECULATIVE set. */ | |
188 while (true) | |
189 { | |
190 unsigned HOST_WIDE_INT pos, size; | |
191 tree fld; | |
192 | |
193 /* If we do not know size of TYPE, we need to be more conservative | |
194 about accepting cases where we can not find EXPECTED_TYPE. | |
195 Generally the types that do matter here are of constant size. | |
196 Size_unknown case should be very rare. */ | |
197 if (TYPE_SIZE (type) | |
198 && tree_fits_shwi_p (TYPE_SIZE (type)) | |
199 && tree_to_shwi (TYPE_SIZE (type)) >= 0) | |
200 size_unknown = false; | |
201 else | |
202 size_unknown = true; | |
203 | |
204 /* On a match, just return what we found. */ | |
205 if ((otr_type | |
206 && types_odr_comparable (type, otr_type) | |
207 && types_same_for_odr (type, otr_type)) | |
208 || (!otr_type | |
209 && TREE_CODE (type) == RECORD_TYPE | |
210 && TYPE_BINFO (type) | |
211 && polymorphic_type_binfo_p (TYPE_BINFO (type)))) | |
212 { | |
213 if (speculative) | |
214 { | |
215 /* If we did not match the offset, just give up on speculation. */ | |
216 if (cur_offset != 0 | |
217 /* Also check if speculation did not end up being same as | |
218 non-speculation. */ | |
219 || (types_must_be_same_for_odr (speculative_outer_type, | |
220 outer_type) | |
221 && (maybe_derived_type | |
222 == speculative_maybe_derived_type))) | |
223 clear_speculation (); | |
224 return true; | |
225 } | |
226 else | |
227 { | |
228 /* If type is known to be final, do not worry about derived | |
229 types. Testing it here may help us to avoid speculation. */ | |
230 if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE | |
231 && (!in_lto_p || odr_type_p (outer_type)) | |
232 && type_with_linkage_p (outer_type) | |
233 && type_known_to_have_no_derivations_p (outer_type)) | |
234 maybe_derived_type = false; | |
235 | |
236 /* Type can not contain itself on an non-zero offset. In that case | |
237 just give up. Still accept the case where size is now known. | |
238 Either the second copy may appear past the end of type or within | |
239 the non-POD buffer located inside the variably sized type | |
240 itself. */ | |
241 if (cur_offset != 0) | |
242 goto no_useful_type_info; | |
243 /* If we determined type precisely or we have no clue on | |
244 speuclation, we are done. */ | |
245 if (!maybe_derived_type || !speculative_outer_type | |
246 || !speculation_consistent_p (speculative_outer_type, | |
247 speculative_offset, | |
248 speculative_maybe_derived_type, | |
249 otr_type)) | |
250 { | |
251 clear_speculation (); | |
252 return true; | |
253 } | |
254 /* Otherwise look into speculation now. */ | |
255 else | |
256 { | |
257 speculative = true; | |
258 type = speculative_outer_type; | |
259 cur_offset = speculative_offset; | |
260 continue; | |
261 } | |
262 } | |
263 } | |
264 | |
265 /* Walk fields and find corresponding on at OFFSET. */ | |
266 if (TREE_CODE (type) == RECORD_TYPE) | |
267 { | |
268 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) | |
269 { | |
270 if (TREE_CODE (fld) != FIELD_DECL | |
271 || TREE_TYPE (fld) == error_mark_node) | |
272 continue; | |
273 | |
274 pos = int_bit_position (fld); | |
275 if (pos > (unsigned HOST_WIDE_INT)cur_offset) | |
276 continue; | |
277 | |
278 /* Do not consider vptr itself. Not even for placement new. */ | |
279 if (!pos && DECL_ARTIFICIAL (fld) | |
280 && POINTER_TYPE_P (TREE_TYPE (fld)) | |
281 && TYPE_BINFO (type) | |
282 && polymorphic_type_binfo_p (TYPE_BINFO (type))) | |
283 continue; | |
284 | |
285 if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld))) | |
286 goto no_useful_type_info; | |
287 size = tree_to_uhwi (DECL_SIZE (fld)); | |
288 | |
289 /* We can always skip types smaller than pointer size: | |
290 those can not contain a virtual table pointer. | |
291 | |
292 Disqualifying fields that are too small to fit OTR_TYPE | |
293 saves work needed to walk them for no benefit. | |
294 Because of the way the bases are packed into a class, the | |
295 field's size may be smaller than type size, so it needs | |
296 to be done with a care. */ | |
297 | |
298 if (pos <= (unsigned HOST_WIDE_INT)cur_offset | |
299 && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset | |
300 + POINTER_SIZE | |
301 && (!otr_type | |
302 || !TYPE_SIZE (TREE_TYPE (fld)) | |
303 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld))) | |
304 || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld)))) | |
305 >= cur_offset + otr_type_size)) | |
306 break; | |
307 } | |
308 | |
309 if (!fld) | |
310 goto no_useful_type_info; | |
311 | |
312 type = TYPE_MAIN_VARIANT (TREE_TYPE (fld)); | |
313 cur_offset -= pos; | |
314 /* DECL_ARTIFICIAL represents a basetype. */ | |
315 if (!DECL_ARTIFICIAL (fld)) | |
316 { | |
317 if (!speculative) | |
318 { | |
319 outer_type = type; | |
320 offset = cur_offset; | |
321 /* As soon as we se an field containing the type, | |
322 we know we are not looking for derivations. */ | |
323 maybe_derived_type = false; | |
324 } | |
325 else | |
326 { | |
327 speculative_outer_type = type; | |
328 speculative_offset = cur_offset; | |
329 speculative_maybe_derived_type = false; | |
330 } | |
331 } | |
332 else if (!consider_bases) | |
333 goto no_useful_type_info; | |
334 } | |
335 else if (TREE_CODE (type) == ARRAY_TYPE) | |
336 { | |
337 tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type)); | |
338 | |
339 /* Give up if we don't know array field size. | |
340 Also give up on non-polymorphic types as they are used | |
341 as buffers for placement new. */ | |
342 if (!TYPE_SIZE (subtype) | |
343 || !tree_fits_shwi_p (TYPE_SIZE (subtype)) | |
344 || tree_to_shwi (TYPE_SIZE (subtype)) <= 0 | |
345 || !contains_polymorphic_type_p (subtype)) | |
346 goto no_useful_type_info; | |
347 | |
348 HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype)); | |
349 | |
350 /* We may see buffer for placement new. In this case the expected type | |
351 can be bigger than the subtype. */ | |
352 if (TYPE_SIZE (subtype) | |
353 && (cur_offset + otr_type_size | |
354 > tree_to_uhwi (TYPE_SIZE (subtype)))) | |
355 goto no_useful_type_info; | |
356 | |
357 cur_offset = new_offset; | |
358 type = TYPE_MAIN_VARIANT (subtype); | |
359 if (!speculative) | |
360 { | |
361 outer_type = type; | |
362 offset = cur_offset; | |
363 maybe_derived_type = false; | |
364 } | |
365 else | |
366 { | |
367 speculative_outer_type = type; | |
368 speculative_offset = cur_offset; | |
369 speculative_maybe_derived_type = false; | |
370 } | |
371 } | |
372 /* Give up on anything else. */ | |
373 else | |
374 { | |
375 no_useful_type_info: | |
376 if (maybe_derived_type && !speculative | |
377 && TREE_CODE (outer_type) == RECORD_TYPE | |
378 && TREE_CODE (otr_type) == RECORD_TYPE | |
379 && TYPE_BINFO (otr_type) | |
380 && !offset | |
381 && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type)) | |
382 { | |
383 clear_outer_type (otr_type); | |
384 if (!speculative_outer_type | |
385 || !speculation_consistent_p (speculative_outer_type, | |
386 speculative_offset, | |
387 speculative_maybe_derived_type, | |
388 otr_type)) | |
389 clear_speculation (); | |
390 if (speculative_outer_type) | |
391 { | |
392 speculative = true; | |
393 type = speculative_outer_type; | |
394 cur_offset = speculative_offset; | |
395 } | |
396 else | |
397 return true; | |
398 } | |
399 /* We found no way to embedd EXPECTED_TYPE in TYPE. | |
400 We still permit two special cases - placement new and | |
401 the case of variadic types containing themselves. */ | |
402 if (!speculative | |
403 && consider_placement_new | |
404 && (size_unknown || !type || maybe_derived_type | |
405 || possible_placement_new (type, otr_type, cur_offset))) | |
406 { | |
407 /* In these weird cases we want to accept the context. | |
408 In non-speculative run we have no useful outer_type info | |
409 (TODO: we may eventually want to record upper bound on the | |
410 type size that can be used to prune the walk), | |
411 but we still want to consider speculation that may | |
412 give useful info. */ | |
413 if (!speculative) | |
414 { | |
415 clear_outer_type (otr_type); | |
416 if (!speculative_outer_type | |
417 || !speculation_consistent_p (speculative_outer_type, | |
418 speculative_offset, | |
419 speculative_maybe_derived_type, | |
420 otr_type)) | |
421 clear_speculation (); | |
422 if (speculative_outer_type) | |
423 { | |
424 speculative = true; | |
425 type = speculative_outer_type; | |
426 cur_offset = speculative_offset; | |
427 } | |
428 else | |
429 return true; | |
430 } | |
431 else | |
432 { | |
433 clear_speculation (); | |
434 return true; | |
435 } | |
436 } | |
437 else | |
438 { | |
439 clear_speculation (); | |
440 if (speculative) | |
441 return true; | |
442 clear_outer_type (otr_type); | |
443 invalid = true; | |
444 return false; | |
445 } | |
446 } | |
447 } | |
448 } | |
449 | |
450 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. | |
451 CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can | |
452 be built within OUTER_TYPE by means of placement new. CONSIDER_BASES makes | |
453 function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as | |
454 base of one of fields of OUTER_TYPE. */ | |
455 | |
456 static bool | |
457 contains_type_p (tree outer_type, HOST_WIDE_INT offset, | |
458 tree otr_type, | |
459 bool consider_placement_new, | |
460 bool consider_bases) | |
461 { | |
462 ipa_polymorphic_call_context context; | |
463 | |
464 /* Check that type is within range. */ | |
465 if (offset < 0) | |
466 return false; | |
467 | |
468 /* PR ipa/71207 | |
469 As OUTER_TYPE can be a type which has a diamond virtual inheritance, | |
470 it's not necessary that INNER_TYPE will fit within OUTER_TYPE with | |
471 a given offset. It can happen that INNER_TYPE also contains a base object, | |
472 however it would point to the same instance in the OUTER_TYPE. */ | |
473 | |
474 context.offset = offset; | |
475 context.outer_type = TYPE_MAIN_VARIANT (outer_type); | |
476 context.maybe_derived_type = false; | |
477 context.dynamic = false; | |
478 return context.restrict_to_inner_class (otr_type, consider_placement_new, | |
479 consider_bases); | |
480 } | |
481 | |
482 | |
483 /* Return a FUNCTION_DECL if FN represent a constructor or destructor. | |
484 If CHECK_CLONES is true, also check for clones of ctor/dtors. */ | |
485 | |
486 tree | |
487 polymorphic_ctor_dtor_p (tree fn, bool check_clones) | |
488 { | |
489 if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE | |
490 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn))) | |
491 { | |
492 if (!check_clones) | |
493 return NULL_TREE; | |
494 | |
495 /* Watch for clones where we constant propagated the first | |
496 argument (pointer to the instance). */ | |
497 fn = DECL_ABSTRACT_ORIGIN (fn); | |
498 if (!fn | |
499 || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE | |
500 || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn))) | |
501 return NULL_TREE; | |
502 } | |
503 | |
504 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST)) | |
505 return NULL_TREE; | |
506 | |
507 return fn; | |
508 } | |
509 | |
510 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor. | |
511 If CHECK_CLONES is true, also check for clones of ctor/dtors. */ | |
512 | |
513 tree | |
514 inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones) | |
515 { | |
516 tree fn = block_ultimate_origin (block); | |
517 if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL) | |
518 return NULL_TREE; | |
519 | |
520 return polymorphic_ctor_dtor_p (fn, check_clones); | |
521 } | |
522 | |
523 | |
524 /* We know that the instance is stored in variable or parameter | |
525 (not dynamically allocated) and we want to disprove the fact | |
526 that it may be in construction at invocation of CALL. | |
527 | |
528 BASE represents memory location where instance is stored. | |
529 If BASE is NULL, it is assumed to be global memory. | |
530 OUTER_TYPE is known type of the instance or NULL if not | |
531 known. | |
532 | |
533 For the variable to be in construction we actually need to | |
534 be in constructor of corresponding global variable or | |
535 the inline stack of CALL must contain the constructor. | |
536 Check this condition. This check works safely only before | |
537 IPA passes, because inline stacks may become out of date | |
538 later. */ | |
539 | |
540 bool | |
541 decl_maybe_in_construction_p (tree base, tree outer_type, | |
542 gimple *call, tree function) | |
543 { | |
544 if (outer_type) | |
545 outer_type = TYPE_MAIN_VARIANT (outer_type); | |
546 gcc_assert (!base || DECL_P (base)); | |
547 | |
548 /* After inlining the code unification optimizations may invalidate | |
549 inline stacks. Also we need to give up on global variables after | |
550 IPA, because addresses of these may have been propagated to their | |
551 constructors. */ | |
552 if (DECL_STRUCT_FUNCTION (function)->after_inlining) | |
553 return true; | |
554 | |
555 /* Pure functions can not do any changes on the dynamic type; | |
556 that require writting to memory. */ | |
557 if ((!base || !auto_var_in_fn_p (base, function)) | |
558 && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST)) | |
559 return false; | |
560 | |
561 bool check_clones = !base || is_global_var (base); | |
562 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK; | |
563 block = BLOCK_SUPERCONTEXT (block)) | |
564 if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones)) | |
565 { | |
566 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); | |
567 | |
568 if (!outer_type || !types_odr_comparable (type, outer_type)) | |
569 { | |
570 if (TREE_CODE (type) == RECORD_TYPE | |
571 && TYPE_BINFO (type) | |
572 && polymorphic_type_binfo_p (TYPE_BINFO (type))) | |
573 return true; | |
574 } | |
575 else if (types_same_for_odr (type, outer_type)) | |
576 return true; | |
577 } | |
578 | |
579 if (!base || (VAR_P (base) && is_global_var (base))) | |
580 { | |
581 if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE | |
582 || (!DECL_CXX_CONSTRUCTOR_P (function) | |
583 && !DECL_CXX_DESTRUCTOR_P (function))) | |
584 { | |
585 if (!DECL_ABSTRACT_ORIGIN (function)) | |
586 return false; | |
587 /* Watch for clones where we constant propagated the first | |
588 argument (pointer to the instance). */ | |
589 function = DECL_ABSTRACT_ORIGIN (function); | |
590 if (!function | |
591 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE | |
592 || (!DECL_CXX_CONSTRUCTOR_P (function) | |
593 && !DECL_CXX_DESTRUCTOR_P (function))) | |
594 return false; | |
595 } | |
596 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (function)); | |
597 if (!outer_type || !types_odr_comparable (type, outer_type)) | |
598 { | |
599 if (TREE_CODE (type) == RECORD_TYPE | |
600 && TYPE_BINFO (type) | |
601 && polymorphic_type_binfo_p (TYPE_BINFO (type))) | |
602 return true; | |
603 } | |
604 else if (types_same_for_odr (type, outer_type)) | |
605 return true; | |
606 } | |
607 return false; | |
608 } | |
609 | |
610 /* Dump human readable context to F. If NEWLINE is true, it will be terminated | |
611 by a newline. */ | |
612 | |
613 void | |
614 ipa_polymorphic_call_context::dump (FILE *f, bool newline) const | |
615 { | |
616 fprintf (f, " "); | |
617 if (invalid) | |
618 fprintf (f, "Call is known to be undefined"); | |
619 else | |
620 { | |
621 if (useless_p ()) | |
622 fprintf (f, "nothing known"); | |
623 if (outer_type || offset) | |
624 { | |
625 fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":""); | |
626 print_generic_expr (f, outer_type, TDF_SLIM); | |
627 if (maybe_derived_type) | |
628 fprintf (f, " (or a derived type)"); | |
629 if (maybe_in_construction) | |
630 fprintf (f, " (maybe in construction)"); | |
631 fprintf (f, " offset " HOST_WIDE_INT_PRINT_DEC, | |
632 offset); | |
633 } | |
634 if (speculative_outer_type) | |
635 { | |
636 if (outer_type || offset) | |
637 fprintf (f, " "); | |
638 fprintf (f, "Speculative outer type:"); | |
639 print_generic_expr (f, speculative_outer_type, TDF_SLIM); | |
640 if (speculative_maybe_derived_type) | |
641 fprintf (f, " (or a derived type)"); | |
642 fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC, | |
643 speculative_offset); | |
644 } | |
645 } | |
646 if (newline) | |
647 fprintf(f, "\n"); | |
648 } | |
649 | |
650 /* Print context to stderr. */ | |
651 | |
652 void | |
653 ipa_polymorphic_call_context::debug () const | |
654 { | |
655 dump (stderr); | |
656 } | |
657 | |
658 /* Stream out the context to OB. */ | |
659 | |
660 void | |
661 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const | |
662 { | |
663 struct bitpack_d bp = bitpack_create (ob->main_stream); | |
664 | |
665 bp_pack_value (&bp, invalid, 1); | |
666 bp_pack_value (&bp, maybe_in_construction, 1); | |
667 bp_pack_value (&bp, maybe_derived_type, 1); | |
668 bp_pack_value (&bp, speculative_maybe_derived_type, 1); | |
669 bp_pack_value (&bp, dynamic, 1); | |
670 bp_pack_value (&bp, outer_type != NULL, 1); | |
671 bp_pack_value (&bp, offset != 0, 1); | |
672 bp_pack_value (&bp, speculative_outer_type != NULL, 1); | |
673 streamer_write_bitpack (&bp); | |
674 | |
675 if (outer_type != NULL) | |
676 stream_write_tree (ob, outer_type, true); | |
677 if (offset) | |
678 streamer_write_hwi (ob, offset); | |
679 if (speculative_outer_type != NULL) | |
680 { | |
681 stream_write_tree (ob, speculative_outer_type, true); | |
682 streamer_write_hwi (ob, speculative_offset); | |
683 } | |
684 else | |
685 gcc_assert (!speculative_offset); | |
686 } | |
687 | |
688 /* Stream in the context from IB and DATA_IN. */ | |
689 | |
690 void | |
691 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib, | |
692 struct data_in *data_in) | |
693 { | |
694 struct bitpack_d bp = streamer_read_bitpack (ib); | |
695 | |
696 invalid = bp_unpack_value (&bp, 1); | |
697 maybe_in_construction = bp_unpack_value (&bp, 1); | |
698 maybe_derived_type = bp_unpack_value (&bp, 1); | |
699 speculative_maybe_derived_type = bp_unpack_value (&bp, 1); | |
700 dynamic = bp_unpack_value (&bp, 1); | |
701 bool outer_type_p = bp_unpack_value (&bp, 1); | |
702 bool offset_p = bp_unpack_value (&bp, 1); | |
703 bool speculative_outer_type_p = bp_unpack_value (&bp, 1); | |
704 | |
705 if (outer_type_p) | |
706 outer_type = stream_read_tree (ib, data_in); | |
707 else | |
708 outer_type = NULL; | |
709 if (offset_p) | |
710 offset = (HOST_WIDE_INT) streamer_read_hwi (ib); | |
711 else | |
712 offset = 0; | |
713 if (speculative_outer_type_p) | |
714 { | |
715 speculative_outer_type = stream_read_tree (ib, data_in); | |
716 speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib); | |
717 } | |
718 else | |
719 { | |
720 speculative_outer_type = NULL; | |
721 speculative_offset = 0; | |
722 } | |
723 } | |
724 | |
725 /* Proudce polymorphic call context for call method of instance | |
726 that is located within BASE (that is assumed to be a decl) at offset OFF. */ | |
727 | |
728 void | |
729 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off) | |
730 { | |
731 gcc_assert (DECL_P (base)); | |
732 clear_speculation (); | |
733 | |
734 if (!contains_polymorphic_type_p (TREE_TYPE (base))) | |
735 { | |
736 clear_outer_type (); | |
737 offset = off; | |
738 return; | |
739 } | |
740 outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base)); | |
741 offset = off; | |
742 /* Make very conservative assumption that all objects | |
743 may be in construction. | |
744 | |
745 It is up to caller to revisit this via | |
746 get_dynamic_type or decl_maybe_in_construction_p. */ | |
747 maybe_in_construction = true; | |
748 maybe_derived_type = false; | |
749 dynamic = false; | |
750 } | |
751 | |
752 /* CST is an invariant (address of decl), try to get meaningful | |
753 polymorphic call context for polymorphic call of method | |
754 if instance of OTR_TYPE that is located at offset OFF of this invariant. | |
755 Return FALSE if nothing meaningful can be found. */ | |
756 | |
757 bool | |
758 ipa_polymorphic_call_context::set_by_invariant (tree cst, | |
759 tree otr_type, | |
760 HOST_WIDE_INT off) | |
761 { | |
762 HOST_WIDE_INT offset2, size, max_size; | |
763 bool reverse; | |
764 tree base; | |
765 | |
766 invalid = false; | |
767 off = 0; | |
768 clear_outer_type (otr_type); | |
769 | |
770 if (TREE_CODE (cst) != ADDR_EXPR) | |
771 return false; | |
772 | |
773 cst = TREE_OPERAND (cst, 0); | |
774 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse); | |
775 if (!DECL_P (base) || max_size == -1 || max_size != size) | |
776 return false; | |
777 | |
778 /* Only type inconsistent programs can have otr_type that is | |
779 not part of outer type. */ | |
780 if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type)) | |
781 return false; | |
782 | |
783 set_by_decl (base, off); | |
784 return true; | |
785 } | |
786 | |
787 /* See if OP is SSA name initialized as a copy or by single assignment. | |
788 If so, walk the SSA graph up. Because simple PHI conditional is considered | |
789 copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA | |
790 graph. */ | |
791 | |
792 static tree | |
793 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL) | |
794 { | |
795 hash_set <tree> *visited = NULL; | |
796 STRIP_NOPS (op); | |
797 while (TREE_CODE (op) == SSA_NAME | |
798 && !SSA_NAME_IS_DEFAULT_DEF (op) | |
799 /* We might be called via fold_stmt during cfgcleanup where | |
800 SSA form need not be up-to-date. */ | |
801 && !name_registered_for_update_p (op) | |
802 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op)) | |
803 || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)) | |
804 { | |
805 if (global_visited) | |
806 { | |
807 if (!*global_visited) | |
808 *global_visited = new hash_set<tree>; | |
809 if ((*global_visited)->add (op)) | |
810 goto done; | |
811 } | |
812 else | |
813 { | |
814 if (!visited) | |
815 visited = new hash_set<tree>; | |
816 if (visited->add (op)) | |
817 goto done; | |
818 } | |
819 /* Special case | |
820 if (ptr == 0) | |
821 ptr = 0; | |
822 else | |
823 ptr = ptr.foo; | |
824 This pattern is implicitly produced for casts to non-primary | |
825 bases. When doing context analysis, we do not really care | |
826 about the case pointer is NULL, because the call will be | |
827 undefined anyway. */ | |
828 if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI) | |
829 { | |
830 gimple *phi = SSA_NAME_DEF_STMT (op); | |
831 | |
832 if (gimple_phi_num_args (phi) > 2) | |
833 goto done; | |
834 if (gimple_phi_num_args (phi) == 1) | |
835 op = gimple_phi_arg_def (phi, 0); | |
836 else if (integer_zerop (gimple_phi_arg_def (phi, 0))) | |
837 op = gimple_phi_arg_def (phi, 1); | |
838 else if (integer_zerop (gimple_phi_arg_def (phi, 1))) | |
839 op = gimple_phi_arg_def (phi, 0); | |
840 else | |
841 goto done; | |
842 } | |
843 else | |
844 { | |
845 if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op))) | |
846 goto done; | |
847 op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op)); | |
848 } | |
849 STRIP_NOPS (op); | |
850 } | |
851 done: | |
852 if (visited) | |
853 delete (visited); | |
854 return op; | |
855 } | |
856 | |
857 /* Create polymorphic call context from IP invariant CST. | |
858 This is typically &global_var. | |
859 OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF | |
860 is offset of call. */ | |
861 | |
862 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst, | |
863 tree otr_type, | |
864 HOST_WIDE_INT off) | |
865 { | |
866 clear_speculation (); | |
867 set_by_invariant (cst, otr_type, off); | |
868 } | |
869 | |
870 /* Build context for pointer REF contained in FNDECL at statement STMT. | |
871 if INSTANCE is non-NULL, return pointer to the object described by | |
872 the context or DECL where context is contained in. */ | |
873 | |
874 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl, | |
875 tree ref, | |
876 gimple *stmt, | |
877 tree *instance) | |
878 { | |
879 tree otr_type = NULL; | |
880 tree base_pointer; | |
881 hash_set <tree> *visited = NULL; | |
882 | |
883 if (TREE_CODE (ref) == OBJ_TYPE_REF) | |
884 { | |
885 otr_type = obj_type_ref_class (ref); | |
886 base_pointer = OBJ_TYPE_REF_OBJECT (ref); | |
887 } | |
888 else | |
889 base_pointer = ref; | |
890 | |
891 /* Set up basic info in case we find nothing interesting in the analysis. */ | |
892 clear_speculation (); | |
893 clear_outer_type (otr_type); | |
894 invalid = false; | |
895 | |
896 /* Walk SSA for outer object. */ | |
897 while (true) | |
898 { | |
899 base_pointer = walk_ssa_copies (base_pointer, &visited); | |
900 if (TREE_CODE (base_pointer) == ADDR_EXPR) | |
901 { | |
902 HOST_WIDE_INT size, max_size; | |
903 HOST_WIDE_INT offset2; | |
904 bool reverse; | |
905 tree base | |
906 = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0), | |
907 &offset2, &size, &max_size, &reverse); | |
908 | |
909 if (max_size != -1 && max_size == size) | |
910 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)), | |
911 offset + offset2, | |
912 true, | |
913 NULL /* Do not change outer type. */); | |
914 | |
915 /* If this is a varying address, punt. */ | |
916 if ((TREE_CODE (base) == MEM_REF || DECL_P (base)) | |
917 && max_size != -1 | |
918 && max_size == size) | |
919 { | |
920 /* We found dereference of a pointer. Type of the pointer | |
921 and MEM_REF is meaningless, but we can look futher. */ | |
922 if (TREE_CODE (base) == MEM_REF) | |
923 { | |
924 offset_int o = mem_ref_offset (base) * BITS_PER_UNIT; | |
925 o += offset; | |
926 o += offset2; | |
927 if (!wi::fits_shwi_p (o)) | |
928 break; | |
929 base_pointer = TREE_OPERAND (base, 0); | |
930 offset = o.to_shwi (); | |
931 outer_type = NULL; | |
932 } | |
933 /* We found base object. In this case the outer_type | |
934 is known. */ | |
935 else if (DECL_P (base)) | |
936 { | |
937 if (visited) | |
938 delete (visited); | |
939 /* Only type inconsistent programs can have otr_type that is | |
940 not part of outer type. */ | |
941 if (otr_type | |
942 && !contains_type_p (TREE_TYPE (base), | |
943 offset + offset2, otr_type)) | |
944 { | |
945 invalid = true; | |
946 if (instance) | |
947 *instance = base_pointer; | |
948 return; | |
949 } | |
950 set_by_decl (base, offset + offset2); | |
951 if (outer_type && maybe_in_construction && stmt) | |
952 maybe_in_construction | |
953 = decl_maybe_in_construction_p (base, | |
954 outer_type, | |
955 stmt, | |
956 fndecl); | |
957 if (instance) | |
958 *instance = base; | |
959 return; | |
960 } | |
961 else | |
962 break; | |
963 } | |
964 else | |
965 break; | |
966 } | |
967 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR | |
968 && TREE_CODE (TREE_OPERAND (base_pointer, 1)) == INTEGER_CST) | |
969 { | |
970 offset_int o | |
971 = offset_int::from (wi::to_wide (TREE_OPERAND (base_pointer, 1)), | |
972 SIGNED); | |
973 o *= BITS_PER_UNIT; | |
974 o += offset; | |
975 if (!wi::fits_shwi_p (o)) | |
976 break; | |
977 offset = o.to_shwi (); | |
978 base_pointer = TREE_OPERAND (base_pointer, 0); | |
979 } | |
980 else | |
981 break; | |
982 } | |
983 | |
984 if (visited) | |
985 delete (visited); | |
986 | |
987 /* Try to determine type of the outer object. */ | |
988 if (TREE_CODE (base_pointer) == SSA_NAME | |
989 && SSA_NAME_IS_DEFAULT_DEF (base_pointer) | |
990 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL) | |
991 { | |
992 /* See if parameter is THIS pointer of a method. */ | |
993 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE | |
994 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl)) | |
995 { | |
996 outer_type | |
997 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer))); | |
998 gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE | |
999 || TREE_CODE (outer_type) == UNION_TYPE); | |
1000 | |
1001 /* Dynamic casting has possibly upcasted the type | |
1002 in the hiearchy. In this case outer type is less | |
1003 informative than inner type and we should forget | |
1004 about it. */ | |
1005 if ((otr_type | |
1006 && !contains_type_p (outer_type, offset, | |
1007 otr_type)) | |
1008 || !contains_polymorphic_type_p (outer_type)) | |
1009 { | |
1010 outer_type = NULL; | |
1011 if (instance) | |
1012 *instance = base_pointer; | |
1013 return; | |
1014 } | |
1015 | |
1016 dynamic = true; | |
1017 | |
1018 /* If the function is constructor or destructor, then | |
1019 the type is possibly in construction, but we know | |
1020 it is not derived type. */ | |
1021 if (DECL_CXX_CONSTRUCTOR_P (fndecl) | |
1022 || DECL_CXX_DESTRUCTOR_P (fndecl)) | |
1023 { | |
1024 maybe_in_construction = true; | |
1025 maybe_derived_type = false; | |
1026 } | |
1027 else | |
1028 { | |
1029 maybe_derived_type = true; | |
1030 maybe_in_construction = false; | |
1031 } | |
1032 if (instance) | |
1033 *instance = base_pointer; | |
1034 return; | |
1035 } | |
1036 /* Non-PODs passed by value are really passed by invisible | |
1037 reference. In this case we also know the type of the | |
1038 object. */ | |
1039 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer))) | |
1040 { | |
1041 outer_type | |
1042 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer))); | |
1043 /* Only type inconsistent programs can have otr_type that is | |
1044 not part of outer type. */ | |
1045 if (otr_type && !contains_type_p (outer_type, offset, | |
1046 otr_type)) | |
1047 { | |
1048 invalid = true; | |
1049 if (instance) | |
1050 *instance = base_pointer; | |
1051 return; | |
1052 } | |
1053 /* Non-polymorphic types have no interest for us. */ | |
1054 else if (!otr_type && !contains_polymorphic_type_p (outer_type)) | |
1055 { | |
1056 outer_type = NULL; | |
1057 if (instance) | |
1058 *instance = base_pointer; | |
1059 return; | |
1060 } | |
1061 maybe_derived_type = false; | |
1062 maybe_in_construction = false; | |
1063 if (instance) | |
1064 *instance = base_pointer; | |
1065 return; | |
1066 } | |
1067 } | |
1068 | |
1069 tree base_type = TREE_TYPE (base_pointer); | |
1070 | |
1071 if (TREE_CODE (base_pointer) == SSA_NAME | |
1072 && SSA_NAME_IS_DEFAULT_DEF (base_pointer) | |
1073 && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL | |
1074 || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL)) | |
1075 { | |
1076 invalid = true; | |
1077 if (instance) | |
1078 *instance = base_pointer; | |
1079 return; | |
1080 } | |
1081 if (TREE_CODE (base_pointer) == SSA_NAME | |
1082 && SSA_NAME_DEF_STMT (base_pointer) | |
1083 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer))) | |
1084 base_type = TREE_TYPE (gimple_assign_rhs1 | |
1085 (SSA_NAME_DEF_STMT (base_pointer))); | |
1086 | |
1087 if (base_type && POINTER_TYPE_P (base_type)) | |
1088 combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)), | |
1089 offset, | |
1090 true, NULL /* Do not change type here */); | |
1091 /* TODO: There are multiple ways to derive a type. For instance | |
1092 if BASE_POINTER is passed to an constructor call prior our refernece. | |
1093 We do not make this type of flow sensitive analysis yet. */ | |
1094 if (instance) | |
1095 *instance = base_pointer; | |
1096 return; | |
1097 } | |
1098 | |
1099 /* Structure to be passed in between detect_type_change and | |
1100 check_stmt_for_type_change. */ | |
1101 | |
1102 struct type_change_info | |
1103 { | |
1104 /* Offset into the object where there is the virtual method pointer we are | |
1105 looking for. */ | |
1106 HOST_WIDE_INT offset; | |
1107 /* The declaration or SSA_NAME pointer of the base that we are checking for | |
1108 type change. */ | |
1109 tree instance; | |
1110 /* The reference to virtual table pointer used. */ | |
1111 tree vtbl_ptr_ref; | |
1112 tree otr_type; | |
1113 /* If we actually can tell the type that the object has changed to, it is | |
1114 stored in this field. Otherwise it remains NULL_TREE. */ | |
1115 tree known_current_type; | |
1116 HOST_WIDE_INT known_current_offset; | |
1117 | |
1118 /* Set to nonzero if we possibly missed some dynamic type changes and we | |
1119 should consider the set to be speculative. */ | |
1120 unsigned speculative; | |
1121 | |
1122 /* Set to true if dynamic type change has been detected. */ | |
1123 bool type_maybe_changed; | |
1124 /* Set to true if multiple types have been encountered. known_current_type | |
1125 must be disregarded in that case. */ | |
1126 bool multiple_types_encountered; | |
1127 bool seen_unanalyzed_store; | |
1128 }; | |
1129 | |
1130 /* Return true if STMT is not call and can modify a virtual method table pointer. | |
1131 We take advantage of fact that vtable stores must appear within constructor | |
1132 and destructor functions. */ | |
1133 | |
1134 static bool | |
1135 noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt) | |
1136 { | |
1137 if (is_gimple_assign (stmt)) | |
1138 { | |
1139 tree lhs = gimple_assign_lhs (stmt); | |
1140 | |
1141 if (gimple_clobber_p (stmt)) | |
1142 return false; | |
1143 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs))) | |
1144 { | |
1145 if (flag_strict_aliasing | |
1146 && !POINTER_TYPE_P (TREE_TYPE (lhs))) | |
1147 return false; | |
1148 | |
1149 if (TREE_CODE (lhs) == COMPONENT_REF | |
1150 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) | |
1151 return false; | |
1152 /* In the future we might want to use get_base_ref_and_offset to find | |
1153 if there is a field corresponding to the offset and if so, proceed | |
1154 almost like if it was a component ref. */ | |
1155 } | |
1156 } | |
1157 | |
1158 /* Code unification may mess with inline stacks. */ | |
1159 if (cfun->after_inlining) | |
1160 return true; | |
1161 | |
1162 /* Walk the inline stack and watch out for ctors/dtors. | |
1163 TODO: Maybe we can require the store to appear in toplevel | |
1164 block of CTOR/DTOR. */ | |
1165 for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK; | |
1166 block = BLOCK_SUPERCONTEXT (block)) | |
1167 if (BLOCK_ABSTRACT_ORIGIN (block) | |
1168 && TREE_CODE (block_ultimate_origin (block)) == FUNCTION_DECL) | |
1169 return inlined_polymorphic_ctor_dtor_block_p (block, false); | |
1170 return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE | |
1171 && (DECL_CXX_CONSTRUCTOR_P (current_function_decl) | |
1172 || DECL_CXX_DESTRUCTOR_P (current_function_decl))); | |
1173 } | |
1174 | |
1175 /* If STMT can be proved to be an assignment to the virtual method table | |
1176 pointer of ANALYZED_OBJ and the type associated with the new table | |
1177 identified, return the type. Otherwise return NULL_TREE if type changes | |
1178 in unknown way or ERROR_MARK_NODE if type is unchanged. */ | |
1179 | |
1180 static tree | |
1181 extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci, | |
1182 HOST_WIDE_INT *type_offset) | |
1183 { | |
1184 HOST_WIDE_INT offset, size, max_size; | |
1185 tree lhs, rhs, base; | |
1186 bool reverse; | |
1187 | |
1188 if (!gimple_assign_single_p (stmt)) | |
1189 return NULL_TREE; | |
1190 | |
1191 lhs = gimple_assign_lhs (stmt); | |
1192 rhs = gimple_assign_rhs1 (stmt); | |
1193 if (TREE_CODE (lhs) != COMPONENT_REF | |
1194 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) | |
1195 { | |
1196 if (dump_file) | |
1197 fprintf (dump_file, " LHS is not virtual table.\n"); | |
1198 return NULL_TREE; | |
1199 } | |
1200 | |
1201 if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0)) | |
1202 ; | |
1203 else | |
1204 { | |
1205 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse); | |
1206 if (DECL_P (tci->instance)) | |
1207 { | |
1208 if (base != tci->instance) | |
1209 { | |
1210 if (dump_file) | |
1211 { | |
1212 fprintf (dump_file, " base:"); | |
1213 print_generic_expr (dump_file, base, TDF_SLIM); | |
1214 fprintf (dump_file, " does not match instance:"); | |
1215 print_generic_expr (dump_file, tci->instance, TDF_SLIM); | |
1216 fprintf (dump_file, "\n"); | |
1217 } | |
1218 return NULL_TREE; | |
1219 } | |
1220 } | |
1221 else if (TREE_CODE (base) == MEM_REF) | |
1222 { | |
1223 if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0)) | |
1224 { | |
1225 if (dump_file) | |
1226 { | |
1227 fprintf (dump_file, " base mem ref:"); | |
1228 print_generic_expr (dump_file, base, TDF_SLIM); | |
1229 fprintf (dump_file, " does not match instance:"); | |
1230 print_generic_expr (dump_file, tci->instance, TDF_SLIM); | |
1231 fprintf (dump_file, "\n"); | |
1232 } | |
1233 return NULL_TREE; | |
1234 } | |
1235 if (!integer_zerop (TREE_OPERAND (base, 1))) | |
1236 { | |
1237 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1))) | |
1238 { | |
1239 if (dump_file) | |
1240 { | |
1241 fprintf (dump_file, " base mem ref:"); | |
1242 print_generic_expr (dump_file, base, TDF_SLIM); | |
1243 fprintf (dump_file, " has non-representable offset:"); | |
1244 print_generic_expr (dump_file, tci->instance, TDF_SLIM); | |
1245 fprintf (dump_file, "\n"); | |
1246 } | |
1247 return NULL_TREE; | |
1248 } | |
1249 else | |
1250 offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT; | |
1251 } | |
1252 } | |
1253 else if (!operand_equal_p (tci->instance, base, 0) | |
1254 || tci->offset) | |
1255 { | |
1256 if (dump_file) | |
1257 { | |
1258 fprintf (dump_file, " base:"); | |
1259 print_generic_expr (dump_file, base, TDF_SLIM); | |
1260 fprintf (dump_file, " does not match instance:"); | |
1261 print_generic_expr (dump_file, tci->instance, TDF_SLIM); | |
1262 fprintf (dump_file, " with offset %i\n", (int)tci->offset); | |
1263 } | |
1264 return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE; | |
1265 } | |
1266 if (offset != tci->offset | |
1267 || size != POINTER_SIZE | |
1268 || max_size != POINTER_SIZE) | |
1269 { | |
1270 if (dump_file) | |
1271 fprintf (dump_file, " wrong offset %i!=%i or size %i\n", | |
1272 (int)offset, (int)tci->offset, (int)size); | |
1273 return offset + POINTER_SIZE <= tci->offset | |
1274 || (max_size != -1 | |
1275 && tci->offset + POINTER_SIZE > offset + max_size) | |
1276 ? error_mark_node : NULL; | |
1277 } | |
1278 } | |
1279 | |
1280 tree vtable; | |
1281 unsigned HOST_WIDE_INT offset2; | |
1282 | |
1283 if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2)) | |
1284 { | |
1285 if (dump_file) | |
1286 fprintf (dump_file, " Failed to lookup binfo\n"); | |
1287 return NULL; | |
1288 } | |
1289 | |
1290 tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)), | |
1291 offset2, vtable); | |
1292 if (!binfo) | |
1293 { | |
1294 if (dump_file) | |
1295 fprintf (dump_file, " Construction vtable used\n"); | |
1296 /* FIXME: We should suport construction contexts. */ | |
1297 return NULL; | |
1298 } | |
1299 | |
1300 *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT; | |
1301 return DECL_CONTEXT (vtable); | |
1302 } | |
1303 | |
1304 /* Record dynamic type change of TCI to TYPE. */ | |
1305 | |
1306 static void | |
1307 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset) | |
1308 { | |
1309 if (dump_file) | |
1310 { | |
1311 if (type) | |
1312 { | |
1313 fprintf (dump_file, " Recording type: "); | |
1314 print_generic_expr (dump_file, type, TDF_SLIM); | |
1315 fprintf (dump_file, " at offset %i\n", (int)offset); | |
1316 } | |
1317 else | |
1318 fprintf (dump_file, " Recording unknown type\n"); | |
1319 } | |
1320 | |
1321 /* If we found a constructor of type that is not polymorphic or | |
1322 that may contain the type in question as a field (not as base), | |
1323 restrict to the inner class first to make type matching bellow | |
1324 happier. */ | |
1325 if (type | |
1326 && (offset | |
1327 || (TREE_CODE (type) != RECORD_TYPE | |
1328 || !TYPE_BINFO (type) | |
1329 || !polymorphic_type_binfo_p (TYPE_BINFO (type))))) | |
1330 { | |
1331 ipa_polymorphic_call_context context; | |
1332 | |
1333 context.offset = offset; | |
1334 context.outer_type = type; | |
1335 context.maybe_in_construction = false; | |
1336 context.maybe_derived_type = false; | |
1337 context.dynamic = true; | |
1338 /* If we failed to find the inner type, we know that the call | |
1339 would be undefined for type produced here. */ | |
1340 if (!context.restrict_to_inner_class (tci->otr_type)) | |
1341 { | |
1342 if (dump_file) | |
1343 fprintf (dump_file, " Ignoring; does not contain otr_type\n"); | |
1344 return; | |
1345 } | |
1346 /* Watch for case we reached an POD type and anticipate placement | |
1347 new. */ | |
1348 if (!context.maybe_derived_type) | |
1349 { | |
1350 type = context.outer_type; | |
1351 offset = context.offset; | |
1352 } | |
1353 } | |
1354 if (tci->type_maybe_changed | |
1355 && (!types_same_for_odr (type, tci->known_current_type) | |
1356 || offset != tci->known_current_offset)) | |
1357 tci->multiple_types_encountered = true; | |
1358 tci->known_current_type = TYPE_MAIN_VARIANT (type); | |
1359 tci->known_current_offset = offset; | |
1360 tci->type_maybe_changed = true; | |
1361 } | |
1362 | |
1363 | |
1364 /* The maximum number of may-defs we visit when looking for a must-def | |
1365 that changes the dynamic type in check_stmt_for_type_change. Tuned | |
1366 after the PR12392 testcase which unlimited spends 40% time within | |
1367 these alias walks and 8% with the following limit. */ | |
1368 | |
1369 static inline bool | |
1370 csftc_abort_walking_p (unsigned speculative) | |
1371 { | |
1372 unsigned max = PARAM_VALUE (PARAM_MAX_SPECULATIVE_DEVIRT_MAYDEFS); | |
1373 return speculative > max ? true : false; | |
1374 } | |
1375 | |
1376 /* Callback of walk_aliased_vdefs and a helper function for | |
1377 detect_type_change to check whether a particular statement may modify | |
1378 the virtual table pointer, and if possible also determine the new type of | |
1379 the (sub-)object. It stores its result into DATA, which points to a | |
1380 type_change_info structure. */ | |
1381 | |
1382 static bool | |
1383 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data) | |
1384 { | |
1385 gimple *stmt = SSA_NAME_DEF_STMT (vdef); | |
1386 struct type_change_info *tci = (struct type_change_info *) data; | |
1387 tree fn; | |
1388 | |
1389 /* If we already gave up, just terminate the rest of walk. */ | |
1390 if (tci->multiple_types_encountered) | |
1391 return true; | |
1392 | |
1393 if (is_gimple_call (stmt)) | |
1394 { | |
1395 if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)) | |
1396 return false; | |
1397 | |
1398 /* Check for a constructor call. */ | |
1399 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE | |
1400 && DECL_CXX_CONSTRUCTOR_P (fn) | |
1401 && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE | |
1402 && gimple_call_num_args (stmt)) | |
1403 { | |
1404 tree op = walk_ssa_copies (gimple_call_arg (stmt, 0)); | |
1405 tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); | |
1406 HOST_WIDE_INT offset = 0, size, max_size; | |
1407 bool reverse; | |
1408 | |
1409 if (dump_file) | |
1410 { | |
1411 fprintf (dump_file, " Checking constructor call: "); | |
1412 print_gimple_stmt (dump_file, stmt, 0); | |
1413 } | |
1414 | |
1415 /* See if THIS parameter seems like instance pointer. */ | |
1416 if (TREE_CODE (op) == ADDR_EXPR) | |
1417 { | |
1418 op = get_ref_base_and_extent (TREE_OPERAND (op, 0), &offset, | |
1419 &size, &max_size, &reverse); | |
1420 if (size != max_size || max_size == -1) | |
1421 { | |
1422 tci->speculative++; | |
1423 return csftc_abort_walking_p (tci->speculative); | |
1424 } | |
1425 if (op && TREE_CODE (op) == MEM_REF) | |
1426 { | |
1427 if (!tree_fits_shwi_p (TREE_OPERAND (op, 1))) | |
1428 { | |
1429 tci->speculative++; | |
1430 return csftc_abort_walking_p (tci->speculative); | |
1431 } | |
1432 offset += tree_to_shwi (TREE_OPERAND (op, 1)) | |
1433 * BITS_PER_UNIT; | |
1434 op = TREE_OPERAND (op, 0); | |
1435 } | |
1436 else if (DECL_P (op)) | |
1437 ; | |
1438 else | |
1439 { | |
1440 tci->speculative++; | |
1441 return csftc_abort_walking_p (tci->speculative); | |
1442 } | |
1443 op = walk_ssa_copies (op); | |
1444 } | |
1445 if (operand_equal_p (op, tci->instance, 0) | |
1446 && TYPE_SIZE (type) | |
1447 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST | |
1448 && tree_fits_shwi_p (TYPE_SIZE (type)) | |
1449 && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset | |
1450 /* Some inlined constructors may look as follows: | |
1451 _3 = operator new (16); | |
1452 MEM[(struct &)_3] ={v} {CLOBBER}; | |
1453 MEM[(struct CompositeClass *)_3]._vptr.CompositeClass | |
1454 = &MEM[(void *)&_ZTV14CompositeClass + 16B]; | |
1455 _7 = &MEM[(struct CompositeClass *)_3].object; | |
1456 EmptyClass::EmptyClass (_7); | |
1457 | |
1458 When determining dynamic type of _3 and because we stop at first | |
1459 dynamic type found, we would stop on EmptyClass::EmptyClass (_7). | |
1460 In this case the emptyclass is not even polymorphic and we miss | |
1461 it is contained in an outer type that is polymorphic. */ | |
1462 | |
1463 && (tci->offset == offset || contains_polymorphic_type_p (type))) | |
1464 { | |
1465 record_known_type (tci, type, tci->offset - offset); | |
1466 return true; | |
1467 } | |
1468 } | |
1469 /* Calls may possibly change dynamic type by placement new. Assume | |
1470 it will not happen, but make result speculative only. */ | |
1471 if (dump_file) | |
1472 { | |
1473 fprintf (dump_file, " Function call may change dynamic type:"); | |
1474 print_gimple_stmt (dump_file, stmt, 0); | |
1475 } | |
1476 tci->speculative++; | |
1477 return csftc_abort_walking_p (tci->speculative); | |
1478 } | |
1479 /* Check for inlined virtual table store. */ | |
1480 else if (noncall_stmt_may_be_vtbl_ptr_store (stmt)) | |
1481 { | |
1482 tree type; | |
1483 HOST_WIDE_INT offset = 0; | |
1484 if (dump_file) | |
1485 { | |
1486 fprintf (dump_file, " Checking vtbl store: "); | |
1487 print_gimple_stmt (dump_file, stmt, 0); | |
1488 } | |
1489 | |
1490 type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset); | |
1491 if (type == error_mark_node) | |
1492 return false; | |
1493 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type); | |
1494 if (!type) | |
1495 { | |
1496 if (dump_file) | |
1497 fprintf (dump_file, " Unanalyzed store may change type.\n"); | |
1498 tci->seen_unanalyzed_store = true; | |
1499 tci->speculative++; | |
1500 } | |
1501 else | |
1502 record_known_type (tci, type, offset); | |
1503 return true; | |
1504 } | |
1505 else | |
1506 return false; | |
1507 } | |
1508 | |
1509 /* THIS is polymorphic call context obtained from get_polymorphic_context. | |
1510 OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT. | |
1511 INSTANCE is pointer to the outer instance as returned by | |
1512 get_polymorphic_context. To avoid creation of temporary expressions, | |
1513 INSTANCE may also be an declaration of get_polymorphic_context found the | |
1514 value to be in static storage. | |
1515 | |
1516 If the type of instance is not fully determined | |
1517 (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES | |
1518 is set), try to walk memory writes and find the actual construction of the | |
1519 instance. | |
1520 | |
1521 Return true if memory is unchanged from function entry. | |
1522 | |
1523 We do not include this analysis in the context analysis itself, because | |
1524 it needs memory SSA to be fully built and the walk may be expensive. | |
1525 So it is not suitable for use withing fold_stmt and similar uses. */ | |
1526 | |
1527 bool | |
1528 ipa_polymorphic_call_context::get_dynamic_type (tree instance, | |
1529 tree otr_object, | |
1530 tree otr_type, | |
1531 gimple *call) | |
1532 { | |
1533 struct type_change_info tci; | |
1534 ao_ref ao; | |
1535 bool function_entry_reached = false; | |
1536 tree instance_ref = NULL; | |
1537 gimple *stmt = call; | |
1538 /* Remember OFFSET before it is modified by restrict_to_inner_class. | |
1539 This is because we do not update INSTANCE when walking inwards. */ | |
1540 HOST_WIDE_INT instance_offset = offset; | |
1541 tree instance_outer_type = outer_type; | |
1542 | |
1543 if (otr_type) | |
1544 otr_type = TYPE_MAIN_VARIANT (otr_type); | |
1545 | |
1546 /* Walk into inner type. This may clear maybe_derived_type and save us | |
1547 from useless work. It also makes later comparsions with static type | |
1548 easier. */ | |
1549 if (outer_type && otr_type) | |
1550 { | |
1551 if (!restrict_to_inner_class (otr_type)) | |
1552 return false; | |
1553 } | |
1554 | |
1555 if (!maybe_in_construction && !maybe_derived_type) | |
1556 return false; | |
1557 | |
1558 /* If we are in fact not looking at any object object or the instance is | |
1559 some placement new into a random load, give up straight away. */ | |
1560 if (TREE_CODE (instance) == MEM_REF) | |
1561 return false; | |
1562 | |
1563 /* We need to obtain refernce to virtual table pointer. It is better | |
1564 to look it up in the code rather than build our own. This require bit | |
1565 of pattern matching, but we end up verifying that what we found is | |
1566 correct. | |
1567 | |
1568 What we pattern match is: | |
1569 | |
1570 tmp = instance->_vptr.A; // vtbl ptr load | |
1571 tmp2 = tmp[otr_token]; // vtable lookup | |
1572 OBJ_TYPE_REF(tmp2;instance->0) (instance); | |
1573 | |
1574 We want to start alias oracle walk from vtbl pointer load, | |
1575 but we may not be able to identify it, for example, when PRE moved the | |
1576 load around. */ | |
1577 | |
1578 if (gimple_code (call) == GIMPLE_CALL) | |
1579 { | |
1580 tree ref = gimple_call_fn (call); | |
1581 HOST_WIDE_INT offset2, size, max_size; | |
1582 bool reverse; | |
1583 | |
1584 if (TREE_CODE (ref) == OBJ_TYPE_REF) | |
1585 { | |
1586 ref = OBJ_TYPE_REF_EXPR (ref); | |
1587 ref = walk_ssa_copies (ref); | |
1588 | |
1589 /* If call target is already known, no need to do the expensive | |
1590 memory walk. */ | |
1591 if (is_gimple_min_invariant (ref)) | |
1592 return false; | |
1593 | |
1594 /* Check if definition looks like vtable lookup. */ | |
1595 if (TREE_CODE (ref) == SSA_NAME | |
1596 && !SSA_NAME_IS_DEFAULT_DEF (ref) | |
1597 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)) | |
1598 && TREE_CODE (gimple_assign_rhs1 | |
1599 (SSA_NAME_DEF_STMT (ref))) == MEM_REF) | |
1600 { | |
1601 ref = get_base_address | |
1602 (TREE_OPERAND (gimple_assign_rhs1 | |
1603 (SSA_NAME_DEF_STMT (ref)), 0)); | |
1604 ref = walk_ssa_copies (ref); | |
1605 /* Find base address of the lookup and see if it looks like | |
1606 vptr load. */ | |
1607 if (TREE_CODE (ref) == SSA_NAME | |
1608 && !SSA_NAME_IS_DEFAULT_DEF (ref) | |
1609 && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))) | |
1610 { | |
1611 tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref)); | |
1612 tree base_ref | |
1613 = get_ref_base_and_extent (ref_exp, &offset2, &size, | |
1614 &max_size, &reverse); | |
1615 | |
1616 /* Finally verify that what we found looks like read from | |
1617 OTR_OBJECT or from INSTANCE with offset OFFSET. */ | |
1618 if (base_ref | |
1619 && ((TREE_CODE (base_ref) == MEM_REF | |
1620 && ((offset2 == instance_offset | |
1621 && TREE_OPERAND (base_ref, 0) == instance) | |
1622 || (!offset2 | |
1623 && TREE_OPERAND (base_ref, 0) | |
1624 == otr_object))) | |
1625 || (DECL_P (instance) && base_ref == instance | |
1626 && offset2 == instance_offset))) | |
1627 { | |
1628 stmt = SSA_NAME_DEF_STMT (ref); | |
1629 instance_ref = ref_exp; | |
1630 } | |
1631 } | |
1632 } | |
1633 } | |
1634 } | |
1635 | |
1636 /* If we failed to look up the reference in code, build our own. */ | |
1637 if (!instance_ref) | |
1638 { | |
1639 /* If the statement in question does not use memory, we can't tell | |
1640 anything. */ | |
1641 if (!gimple_vuse (stmt)) | |
1642 return false; | |
1643 ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL); | |
1644 } | |
1645 else | |
1646 /* Otherwise use the real reference. */ | |
1647 ao_ref_init (&ao, instance_ref); | |
1648 | |
1649 /* We look for vtbl pointer read. */ | |
1650 ao.size = POINTER_SIZE; | |
1651 ao.max_size = ao.size; | |
1652 /* We are looking for stores to vptr pointer within the instance of | |
1653 outer type. | |
1654 TODO: The vptr pointer type is globally known, we probably should | |
1655 keep it and do that even when otr_type is unknown. */ | |
1656 if (otr_type) | |
1657 { | |
1658 ao.base_alias_set | |
1659 = get_alias_set (outer_type ? outer_type : otr_type); | |
1660 ao.ref_alias_set | |
1661 = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type)))); | |
1662 } | |
1663 | |
1664 if (dump_file) | |
1665 { | |
1666 fprintf (dump_file, "Determining dynamic type for call: "); | |
1667 print_gimple_stmt (dump_file, call, 0); | |
1668 fprintf (dump_file, " Starting walk at: "); | |
1669 print_gimple_stmt (dump_file, stmt, 0); | |
1670 fprintf (dump_file, " instance pointer: "); | |
1671 print_generic_expr (dump_file, otr_object, TDF_SLIM); | |
1672 fprintf (dump_file, " Outer instance pointer: "); | |
1673 print_generic_expr (dump_file, instance, TDF_SLIM); | |
1674 fprintf (dump_file, " offset: %i (bits)", (int)instance_offset); | |
1675 fprintf (dump_file, " vtbl reference: "); | |
1676 print_generic_expr (dump_file, instance_ref, TDF_SLIM); | |
1677 fprintf (dump_file, "\n"); | |
1678 } | |
1679 | |
1680 tci.offset = instance_offset; | |
1681 tci.instance = instance; | |
1682 tci.vtbl_ptr_ref = instance_ref; | |
1683 tci.known_current_type = NULL_TREE; | |
1684 tci.known_current_offset = 0; | |
1685 tci.otr_type = otr_type; | |
1686 tci.type_maybe_changed = false; | |
1687 tci.multiple_types_encountered = false; | |
1688 tci.speculative = 0; | |
1689 tci.seen_unanalyzed_store = false; | |
1690 | |
1691 walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change, | |
1692 &tci, NULL, &function_entry_reached); | |
1693 | |
1694 /* If we did not find any type changing statements, we may still drop | |
1695 maybe_in_construction flag if the context already have outer type. | |
1696 | |
1697 Here we make special assumptions about both constructors and | |
1698 destructors which are all the functions that are allowed to alter the | |
1699 VMT pointers. It assumes that destructors begin with assignment into | |
1700 all VMT pointers and that constructors essentially look in the | |
1701 following way: | |
1702 | |
1703 1) The very first thing they do is that they call constructors of | |
1704 ancestor sub-objects that have them. | |
1705 | |
1706 2) Then VMT pointers of this and all its ancestors is set to new | |
1707 values corresponding to the type corresponding to the constructor. | |
1708 | |
1709 3) Only afterwards, other stuff such as constructor of member | |
1710 sub-objects and the code written by the user is run. Only this may | |
1711 include calling virtual functions, directly or indirectly. | |
1712 | |
1713 4) placement new can not be used to change type of non-POD statically | |
1714 allocated variables. | |
1715 | |
1716 There is no way to call a constructor of an ancestor sub-object in any | |
1717 other way. | |
1718 | |
1719 This means that we do not have to care whether constructors get the | |
1720 correct type information because they will always change it (in fact, | |
1721 if we define the type to be given by the VMT pointer, it is undefined). | |
1722 | |
1723 The most important fact to derive from the above is that if, for some | |
1724 statement in the section 3, we try to detect whether the dynamic type | |
1725 has changed, we can safely ignore all calls as we examine the function | |
1726 body backwards until we reach statements in section 2 because these | |
1727 calls cannot be ancestor constructors or destructors (if the input is | |
1728 not bogus) and so do not change the dynamic type (this holds true only | |
1729 for automatically allocated objects but at the moment we devirtualize | |
1730 only these). We then must detect that statements in section 2 change | |
1731 the dynamic type and can try to derive the new type. That is enough | |
1732 and we can stop, we will never see the calls into constructors of | |
1733 sub-objects in this code. | |
1734 | |
1735 Therefore if the static outer type was found (outer_type) | |
1736 we can safely ignore tci.speculative that is set on calls and give up | |
1737 only if there was dyanmic type store that may affect given variable | |
1738 (seen_unanalyzed_store) */ | |
1739 | |
1740 if (!tci.type_maybe_changed | |
1741 || (outer_type | |
1742 && !dynamic | |
1743 && !tci.seen_unanalyzed_store | |
1744 && !tci.multiple_types_encountered | |
1745 && ((offset == tci.offset | |
1746 && types_same_for_odr (tci.known_current_type, | |
1747 outer_type)) | |
1748 || (instance_offset == offset | |
1749 && types_same_for_odr (tci.known_current_type, | |
1750 instance_outer_type))))) | |
1751 { | |
1752 if (!outer_type || tci.seen_unanalyzed_store) | |
1753 return false; | |
1754 if (maybe_in_construction) | |
1755 maybe_in_construction = false; | |
1756 if (dump_file) | |
1757 fprintf (dump_file, " No dynamic type change found.\n"); | |
1758 return true; | |
1759 } | |
1760 | |
1761 if (tci.known_current_type | |
1762 && !function_entry_reached | |
1763 && !tci.multiple_types_encountered) | |
1764 { | |
1765 if (!tci.speculative) | |
1766 { | |
1767 outer_type = TYPE_MAIN_VARIANT (tci.known_current_type); | |
1768 offset = tci.known_current_offset; | |
1769 dynamic = true; | |
1770 maybe_in_construction = false; | |
1771 maybe_derived_type = false; | |
1772 if (dump_file) | |
1773 fprintf (dump_file, " Determined dynamic type.\n"); | |
1774 } | |
1775 else if (!speculative_outer_type | |
1776 || speculative_maybe_derived_type) | |
1777 { | |
1778 speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type); | |
1779 speculative_offset = tci.known_current_offset; | |
1780 speculative_maybe_derived_type = false; | |
1781 if (dump_file) | |
1782 fprintf (dump_file, " Determined speculative dynamic type.\n"); | |
1783 } | |
1784 } | |
1785 else if (dump_file) | |
1786 { | |
1787 fprintf (dump_file, " Found multiple types%s%s\n", | |
1788 function_entry_reached ? " (function entry reached)" : "", | |
1789 function_entry_reached ? " (multiple types encountered)" : ""); | |
1790 } | |
1791 | |
1792 return false; | |
1793 } | |
1794 | |
1795 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE | |
1796 seems consistent (and useful) with what we already have in the non-speculative context. */ | |
1797 | |
1798 bool | |
1799 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type, | |
1800 HOST_WIDE_INT spec_offset, | |
1801 bool spec_maybe_derived_type, | |
1802 tree otr_type) const | |
1803 { | |
1804 if (!flag_devirtualize_speculatively) | |
1805 return false; | |
1806 | |
1807 /* Non-polymorphic types are useless for deriving likely polymorphic | |
1808 call targets. */ | |
1809 if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type)) | |
1810 return false; | |
1811 | |
1812 /* If we know nothing, speculation is always good. */ | |
1813 if (!outer_type) | |
1814 return true; | |
1815 | |
1816 /* Speculation is only useful to avoid derived types. | |
1817 This is not 100% true for placement new, where the outer context may | |
1818 turn out to be useless, but ignore these for now. */ | |
1819 if (!maybe_derived_type) | |
1820 return false; | |
1821 | |
1822 /* If types agrees, speculation is consistent, but it makes sense only | |
1823 when it says something new. */ | |
1824 if (types_must_be_same_for_odr (spec_outer_type, outer_type)) | |
1825 return maybe_derived_type && !spec_maybe_derived_type; | |
1826 | |
1827 /* If speculation does not contain the type in question, ignore it. */ | |
1828 if (otr_type | |
1829 && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true)) | |
1830 return false; | |
1831 | |
1832 /* If outer type already contains speculation as a filed, | |
1833 it is useless. We already know from OUTER_TYPE | |
1834 SPEC_TYPE and that it is not in the construction. */ | |
1835 if (contains_type_p (outer_type, offset - spec_offset, | |
1836 spec_outer_type, false, false)) | |
1837 return false; | |
1838 | |
1839 /* If speculative outer type is not more specified than outer | |
1840 type, just give up. | |
1841 We can only decide this safely if we can compare types with OUTER_TYPE. | |
1842 */ | |
1843 if ((!in_lto_p || odr_type_p (outer_type)) | |
1844 && !contains_type_p (spec_outer_type, | |
1845 spec_offset - offset, | |
1846 outer_type, false)) | |
1847 return false; | |
1848 return true; | |
1849 } | |
1850 | |
1851 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET, | |
1852 NEW_MAYBE_DERIVED_TYPE | |
1853 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */ | |
1854 | |
1855 bool | |
1856 ipa_polymorphic_call_context::combine_speculation_with | |
1857 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type, | |
1858 tree otr_type) | |
1859 { | |
1860 if (!new_outer_type) | |
1861 return false; | |
1862 | |
1863 /* restrict_to_inner_class may eliminate wrong speculation making our job | |
1864 easeier. */ | |
1865 if (otr_type) | |
1866 restrict_to_inner_class (otr_type); | |
1867 | |
1868 if (!speculation_consistent_p (new_outer_type, new_offset, | |
1869 new_maybe_derived_type, otr_type)) | |
1870 return false; | |
1871 | |
1872 /* New speculation is a win in case we have no speculation or new | |
1873 speculation does not consider derivations. */ | |
1874 if (!speculative_outer_type | |
1875 || (speculative_maybe_derived_type | |
1876 && !new_maybe_derived_type)) | |
1877 { | |
1878 speculative_outer_type = new_outer_type; | |
1879 speculative_offset = new_offset; | |
1880 speculative_maybe_derived_type = new_maybe_derived_type; | |
1881 return true; | |
1882 } | |
1883 else if (types_must_be_same_for_odr (speculative_outer_type, | |
1884 new_outer_type)) | |
1885 { | |
1886 if (speculative_offset != new_offset) | |
1887 { | |
1888 /* OK we have two contexts that seems valid but they disagree, | |
1889 just give up. | |
1890 | |
1891 This is not a lattice operation, so we may want to drop it later. */ | |
1892 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1893 fprintf (dump_file, | |
1894 "Speculative outer types match, " | |
1895 "offset mismatch -> invalid speculation\n"); | |
1896 clear_speculation (); | |
1897 return true; | |
1898 } | |
1899 else | |
1900 { | |
1901 if (speculative_maybe_derived_type && !new_maybe_derived_type) | |
1902 { | |
1903 speculative_maybe_derived_type = false; | |
1904 return true; | |
1905 } | |
1906 else | |
1907 return false; | |
1908 } | |
1909 } | |
1910 /* Choose type that contains the other. This one either contains the outer | |
1911 as a field (thus giving exactly one target) or is deeper in the type | |
1912 hiearchy. */ | |
1913 else if (speculative_outer_type | |
1914 && speculative_maybe_derived_type | |
1915 && (new_offset > speculative_offset | |
1916 || (new_offset == speculative_offset | |
1917 && contains_type_p (new_outer_type, | |
1918 0, speculative_outer_type, false)))) | |
1919 { | |
1920 tree old_outer_type = speculative_outer_type; | |
1921 HOST_WIDE_INT old_offset = speculative_offset; | |
1922 bool old_maybe_derived_type = speculative_maybe_derived_type; | |
1923 | |
1924 speculative_outer_type = new_outer_type; | |
1925 speculative_offset = new_offset; | |
1926 speculative_maybe_derived_type = new_maybe_derived_type; | |
1927 | |
1928 if (otr_type) | |
1929 restrict_to_inner_class (otr_type); | |
1930 | |
1931 /* If the speculation turned out to make no sense, revert to sensible | |
1932 one. */ | |
1933 if (!speculative_outer_type) | |
1934 { | |
1935 speculative_outer_type = old_outer_type; | |
1936 speculative_offset = old_offset; | |
1937 speculative_maybe_derived_type = old_maybe_derived_type; | |
1938 return false; | |
1939 } | |
1940 return (old_offset != speculative_offset | |
1941 || old_maybe_derived_type != speculative_maybe_derived_type | |
1942 || types_must_be_same_for_odr (speculative_outer_type, | |
1943 new_outer_type)); | |
1944 } | |
1945 return false; | |
1946 } | |
1947 | |
1948 /* Make speculation less specific so | |
1949 NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included. | |
1950 If OTR_TYPE is set, assume the context is used with OTR_TYPE. */ | |
1951 | |
1952 bool | |
1953 ipa_polymorphic_call_context::meet_speculation_with | |
1954 (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type, | |
1955 tree otr_type) | |
1956 { | |
1957 if (!new_outer_type && speculative_outer_type) | |
1958 { | |
1959 clear_speculation (); | |
1960 return true; | |
1961 } | |
1962 | |
1963 /* restrict_to_inner_class may eliminate wrong speculation making our job | |
1964 easeier. */ | |
1965 if (otr_type) | |
1966 restrict_to_inner_class (otr_type); | |
1967 | |
1968 if (!speculative_outer_type | |
1969 || !speculation_consistent_p (speculative_outer_type, | |
1970 speculative_offset, | |
1971 speculative_maybe_derived_type, | |
1972 otr_type)) | |
1973 return false; | |
1974 | |
1975 if (!speculation_consistent_p (new_outer_type, new_offset, | |
1976 new_maybe_derived_type, otr_type)) | |
1977 { | |
1978 clear_speculation (); | |
1979 return true; | |
1980 } | |
1981 | |
1982 else if (types_must_be_same_for_odr (speculative_outer_type, | |
1983 new_outer_type)) | |
1984 { | |
1985 if (speculative_offset != new_offset) | |
1986 { | |
1987 clear_speculation (); | |
1988 return true; | |
1989 } | |
1990 else | |
1991 { | |
1992 if (!speculative_maybe_derived_type && new_maybe_derived_type) | |
1993 { | |
1994 speculative_maybe_derived_type = true; | |
1995 return true; | |
1996 } | |
1997 else | |
1998 return false; | |
1999 } | |
2000 } | |
2001 /* See if one type contains the other as a field (not base). */ | |
2002 else if (contains_type_p (new_outer_type, new_offset - speculative_offset, | |
2003 speculative_outer_type, false, false)) | |
2004 return false; | |
2005 else if (contains_type_p (speculative_outer_type, | |
2006 speculative_offset - new_offset, | |
2007 new_outer_type, false, false)) | |
2008 { | |
2009 speculative_outer_type = new_outer_type; | |
2010 speculative_offset = new_offset; | |
2011 speculative_maybe_derived_type = new_maybe_derived_type; | |
2012 return true; | |
2013 } | |
2014 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */ | |
2015 else if (contains_type_p (new_outer_type, | |
2016 new_offset - speculative_offset, | |
2017 speculative_outer_type, false, true)) | |
2018 { | |
2019 if (!speculative_maybe_derived_type) | |
2020 { | |
2021 speculative_maybe_derived_type = true; | |
2022 return true; | |
2023 } | |
2024 return false; | |
2025 } | |
2026 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */ | |
2027 else if (contains_type_p (speculative_outer_type, | |
2028 speculative_offset - new_offset, new_outer_type, false, true)) | |
2029 { | |
2030 speculative_outer_type = new_outer_type; | |
2031 speculative_offset = new_offset; | |
2032 speculative_maybe_derived_type = true; | |
2033 return true; | |
2034 } | |
2035 else | |
2036 { | |
2037 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2038 fprintf (dump_file, "Giving up on speculative meet\n"); | |
2039 clear_speculation (); | |
2040 return true; | |
2041 } | |
2042 } | |
2043 | |
2044 /* Assume that both THIS and a given context is valid and strenghten THIS | |
2045 if possible. Return true if any strenghtening was made. | |
2046 If actual type the context is being used in is known, OTR_TYPE should be | |
2047 set accordingly. This improves quality of combined result. */ | |
2048 | |
2049 bool | |
2050 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx, | |
2051 tree otr_type) | |
2052 { | |
2053 bool updated = false; | |
2054 | |
2055 if (ctx.useless_p () || invalid) | |
2056 return false; | |
2057 | |
2058 /* Restricting context to inner type makes merging easier, however do not | |
2059 do that unless we know how the context is used (OTR_TYPE is non-NULL) */ | |
2060 if (otr_type && !invalid && !ctx.invalid) | |
2061 { | |
2062 restrict_to_inner_class (otr_type); | |
2063 ctx.restrict_to_inner_class (otr_type); | |
2064 if(invalid) | |
2065 return false; | |
2066 } | |
2067 | |
2068 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2069 { | |
2070 fprintf (dump_file, "Polymorphic call context combine:"); | |
2071 dump (dump_file); | |
2072 fprintf (dump_file, "With context: "); | |
2073 ctx.dump (dump_file); | |
2074 if (otr_type) | |
2075 { | |
2076 fprintf (dump_file, "To be used with type: "); | |
2077 print_generic_expr (dump_file, otr_type, TDF_SLIM); | |
2078 fprintf (dump_file, "\n"); | |
2079 } | |
2080 } | |
2081 | |
2082 /* If call is known to be invalid, we are done. */ | |
2083 if (ctx.invalid) | |
2084 { | |
2085 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2086 fprintf (dump_file, "-> Invalid context\n"); | |
2087 goto invalidate; | |
2088 } | |
2089 | |
2090 if (!ctx.outer_type) | |
2091 ; | |
2092 else if (!outer_type) | |
2093 { | |
2094 outer_type = ctx.outer_type; | |
2095 offset = ctx.offset; | |
2096 dynamic = ctx.dynamic; | |
2097 maybe_in_construction = ctx.maybe_in_construction; | |
2098 maybe_derived_type = ctx.maybe_derived_type; | |
2099 updated = true; | |
2100 } | |
2101 /* If types are known to be same, merging is quite easy. */ | |
2102 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type)) | |
2103 { | |
2104 if (offset != ctx.offset | |
2105 && TYPE_SIZE (outer_type) | |
2106 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST) | |
2107 { | |
2108 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2109 fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n"); | |
2110 clear_speculation (); | |
2111 clear_outer_type (); | |
2112 invalid = true; | |
2113 return true; | |
2114 } | |
2115 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2116 fprintf (dump_file, "Outer types match, merging flags\n"); | |
2117 if (maybe_in_construction && !ctx.maybe_in_construction) | |
2118 { | |
2119 updated = true; | |
2120 maybe_in_construction = false; | |
2121 } | |
2122 if (maybe_derived_type && !ctx.maybe_derived_type) | |
2123 { | |
2124 updated = true; | |
2125 maybe_derived_type = false; | |
2126 } | |
2127 if (dynamic && !ctx.dynamic) | |
2128 { | |
2129 updated = true; | |
2130 dynamic = false; | |
2131 } | |
2132 } | |
2133 /* If we know the type precisely, there is not much to improve. */ | |
2134 else if (!maybe_derived_type && !maybe_in_construction | |
2135 && !ctx.maybe_derived_type && !ctx.maybe_in_construction) | |
2136 { | |
2137 /* It may be easy to check if second context permits the first | |
2138 and set INVALID otherwise. This is not easy to do in general; | |
2139 contains_type_p may return false negatives for non-comparable | |
2140 types. | |
2141 | |
2142 If OTR_TYPE is known, we however can expect that | |
2143 restrict_to_inner_class should have discovered the same base | |
2144 type. */ | |
2145 if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type) | |
2146 { | |
2147 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2148 fprintf (dump_file, "Contextes disagree -> invalid\n"); | |
2149 goto invalidate; | |
2150 } | |
2151 } | |
2152 /* See if one type contains the other as a field (not base). | |
2153 In this case we want to choose the wider type, because it contains | |
2154 more information. */ | |
2155 else if (contains_type_p (ctx.outer_type, ctx.offset - offset, | |
2156 outer_type, false, false)) | |
2157 { | |
2158 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2159 fprintf (dump_file, "Second type contain the first as a field\n"); | |
2160 | |
2161 if (maybe_derived_type) | |
2162 { | |
2163 outer_type = ctx.outer_type; | |
2164 maybe_derived_type = ctx.maybe_derived_type; | |
2165 offset = ctx.offset; | |
2166 dynamic = ctx.dynamic; | |
2167 updated = true; | |
2168 } | |
2169 | |
2170 /* If we do not know how the context is being used, we can | |
2171 not clear MAYBE_IN_CONSTRUCTION because it may be offseted | |
2172 to other component of OUTER_TYPE later and we know nothing | |
2173 about it. */ | |
2174 if (otr_type && maybe_in_construction | |
2175 && !ctx.maybe_in_construction) | |
2176 { | |
2177 maybe_in_construction = false; | |
2178 updated = true; | |
2179 } | |
2180 } | |
2181 else if (contains_type_p (outer_type, offset - ctx.offset, | |
2182 ctx.outer_type, false, false)) | |
2183 { | |
2184 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2185 fprintf (dump_file, "First type contain the second as a field\n"); | |
2186 | |
2187 if (otr_type && maybe_in_construction | |
2188 && !ctx.maybe_in_construction) | |
2189 { | |
2190 maybe_in_construction = false; | |
2191 updated = true; | |
2192 } | |
2193 } | |
2194 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */ | |
2195 else if (contains_type_p (ctx.outer_type, | |
2196 ctx.offset - offset, outer_type, false, true)) | |
2197 { | |
2198 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2199 fprintf (dump_file, "First type is base of second\n"); | |
2200 if (!maybe_derived_type) | |
2201 { | |
2202 if (!ctx.maybe_in_construction | |
2203 && types_odr_comparable (outer_type, ctx.outer_type)) | |
2204 { | |
2205 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2206 fprintf (dump_file, "Second context does not permit base -> invalid\n"); | |
2207 goto invalidate; | |
2208 } | |
2209 } | |
2210 /* Pick variant deeper in the hiearchy. */ | |
2211 else | |
2212 { | |
2213 outer_type = ctx.outer_type; | |
2214 maybe_in_construction = ctx.maybe_in_construction; | |
2215 maybe_derived_type = ctx.maybe_derived_type; | |
2216 offset = ctx.offset; | |
2217 dynamic = ctx.dynamic; | |
2218 updated = true; | |
2219 } | |
2220 } | |
2221 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */ | |
2222 else if (contains_type_p (outer_type, | |
2223 offset - ctx.offset, ctx.outer_type, false, true)) | |
2224 { | |
2225 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2226 fprintf (dump_file, "Second type is base of first\n"); | |
2227 if (!ctx.maybe_derived_type) | |
2228 { | |
2229 if (!maybe_in_construction | |
2230 && types_odr_comparable (outer_type, ctx.outer_type)) | |
2231 { | |
2232 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2233 fprintf (dump_file, "First context does not permit base -> invalid\n"); | |
2234 goto invalidate; | |
2235 } | |
2236 /* Pick the base type. */ | |
2237 else if (maybe_in_construction) | |
2238 { | |
2239 outer_type = ctx.outer_type; | |
2240 maybe_in_construction = ctx.maybe_in_construction; | |
2241 maybe_derived_type = ctx.maybe_derived_type; | |
2242 offset = ctx.offset; | |
2243 dynamic = ctx.dynamic; | |
2244 updated = true; | |
2245 } | |
2246 } | |
2247 } | |
2248 /* TODO handle merging using hiearchy. */ | |
2249 else if (dump_file && (dump_flags & TDF_DETAILS)) | |
2250 fprintf (dump_file, "Giving up on merge\n"); | |
2251 | |
2252 updated |= combine_speculation_with (ctx.speculative_outer_type, | |
2253 ctx.speculative_offset, | |
2254 ctx.speculative_maybe_derived_type, | |
2255 otr_type); | |
2256 | |
2257 if (updated && dump_file && (dump_flags & TDF_DETAILS)) | |
2258 { | |
2259 fprintf (dump_file, "Updated as: "); | |
2260 dump (dump_file); | |
2261 fprintf (dump_file, "\n"); | |
2262 } | |
2263 return updated; | |
2264 | |
2265 invalidate: | |
2266 invalid = true; | |
2267 clear_speculation (); | |
2268 clear_outer_type (); | |
2269 return true; | |
2270 } | |
2271 | |
2272 /* Take non-speculative info, merge it with speculative and clear speculation. | |
2273 Used when we no longer manage to keep track of actual outer type, but we | |
2274 think it is still there. | |
2275 | |
2276 If OTR_TYPE is set, the transformation can be done more effectively assuming | |
2277 that context is going to be used only that way. */ | |
2278 | |
2279 void | |
2280 ipa_polymorphic_call_context::make_speculative (tree otr_type) | |
2281 { | |
2282 tree spec_outer_type = outer_type; | |
2283 HOST_WIDE_INT spec_offset = offset; | |
2284 bool spec_maybe_derived_type = maybe_derived_type; | |
2285 | |
2286 if (invalid) | |
2287 { | |
2288 invalid = false; | |
2289 clear_outer_type (); | |
2290 clear_speculation (); | |
2291 return; | |
2292 } | |
2293 if (!outer_type) | |
2294 return; | |
2295 clear_outer_type (); | |
2296 combine_speculation_with (spec_outer_type, spec_offset, | |
2297 spec_maybe_derived_type, | |
2298 otr_type); | |
2299 } | |
2300 | |
2301 /* Use when we can not track dynamic type change. This speculatively assume | |
2302 type change is not happening. */ | |
2303 | |
2304 void | |
2305 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor, | |
2306 tree otr_type) | |
2307 { | |
2308 if (dynamic) | |
2309 make_speculative (otr_type); | |
2310 else if (in_poly_cdtor) | |
2311 maybe_in_construction = true; | |
2312 } | |
2313 | |
2314 /* Return TRUE if this context conveys the same information as OTHER. */ | |
2315 | |
2316 bool | |
2317 ipa_polymorphic_call_context::equal_to | |
2318 (const ipa_polymorphic_call_context &x) const | |
2319 { | |
2320 if (useless_p ()) | |
2321 return x.useless_p (); | |
2322 if (invalid) | |
2323 return x.invalid; | |
2324 if (x.useless_p () || x.invalid) | |
2325 return false; | |
2326 | |
2327 if (outer_type) | |
2328 { | |
2329 if (!x.outer_type | |
2330 || !types_odr_comparable (outer_type, x.outer_type) | |
2331 || !types_same_for_odr (outer_type, x.outer_type) | |
2332 || offset != x.offset | |
2333 || maybe_in_construction != x.maybe_in_construction | |
2334 || maybe_derived_type != x.maybe_derived_type | |
2335 || dynamic != x.dynamic) | |
2336 return false; | |
2337 } | |
2338 else if (x.outer_type) | |
2339 return false; | |
2340 | |
2341 | |
2342 if (speculative_outer_type | |
2343 && speculation_consistent_p (speculative_outer_type, speculative_offset, | |
2344 speculative_maybe_derived_type, NULL_TREE)) | |
2345 { | |
2346 if (!x.speculative_outer_type) | |
2347 return false; | |
2348 | |
2349 if (!types_odr_comparable (speculative_outer_type, | |
2350 x.speculative_outer_type) | |
2351 || !types_same_for_odr (speculative_outer_type, | |
2352 x.speculative_outer_type) | |
2353 || speculative_offset != x.speculative_offset | |
2354 || speculative_maybe_derived_type != x.speculative_maybe_derived_type) | |
2355 return false; | |
2356 } | |
2357 else if (x.speculative_outer_type | |
2358 && x.speculation_consistent_p (x.speculative_outer_type, | |
2359 x.speculative_offset, | |
2360 x.speculative_maybe_derived_type, | |
2361 NULL)) | |
2362 return false; | |
2363 | |
2364 return true; | |
2365 } | |
2366 | |
2367 /* Modify context to be strictly less restrictive than CTX. */ | |
2368 | |
2369 bool | |
2370 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx, | |
2371 tree otr_type) | |
2372 { | |
2373 bool updated = false; | |
2374 | |
2375 if (useless_p () || ctx.invalid) | |
2376 return false; | |
2377 | |
2378 /* Restricting context to inner type makes merging easier, however do not | |
2379 do that unless we know how the context is used (OTR_TYPE is non-NULL) */ | |
2380 if (otr_type && !useless_p () && !ctx.useless_p ()) | |
2381 { | |
2382 restrict_to_inner_class (otr_type); | |
2383 ctx.restrict_to_inner_class (otr_type); | |
2384 if(invalid) | |
2385 return false; | |
2386 } | |
2387 | |
2388 if (equal_to (ctx)) | |
2389 return false; | |
2390 | |
2391 if (ctx.useless_p () || invalid) | |
2392 { | |
2393 *this = ctx; | |
2394 return true; | |
2395 } | |
2396 | |
2397 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2398 { | |
2399 fprintf (dump_file, "Polymorphic call context meet:"); | |
2400 dump (dump_file); | |
2401 fprintf (dump_file, "With context: "); | |
2402 ctx.dump (dump_file); | |
2403 if (otr_type) | |
2404 { | |
2405 fprintf (dump_file, "To be used with type: "); | |
2406 print_generic_expr (dump_file, otr_type, TDF_SLIM); | |
2407 fprintf (dump_file, "\n"); | |
2408 } | |
2409 } | |
2410 | |
2411 if (!dynamic && ctx.dynamic) | |
2412 { | |
2413 dynamic = true; | |
2414 updated = true; | |
2415 } | |
2416 | |
2417 /* If call is known to be invalid, we are done. */ | |
2418 if (!outer_type) | |
2419 ; | |
2420 else if (!ctx.outer_type) | |
2421 { | |
2422 clear_outer_type (); | |
2423 updated = true; | |
2424 } | |
2425 /* If types are known to be same, merging is quite easy. */ | |
2426 else if (types_must_be_same_for_odr (outer_type, ctx.outer_type)) | |
2427 { | |
2428 if (offset != ctx.offset | |
2429 && TYPE_SIZE (outer_type) | |
2430 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST) | |
2431 { | |
2432 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2433 fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n"); | |
2434 clear_outer_type (); | |
2435 return true; | |
2436 } | |
2437 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2438 fprintf (dump_file, "Outer types match, merging flags\n"); | |
2439 if (!maybe_in_construction && ctx.maybe_in_construction) | |
2440 { | |
2441 updated = true; | |
2442 maybe_in_construction = true; | |
2443 } | |
2444 if (!maybe_derived_type && ctx.maybe_derived_type) | |
2445 { | |
2446 updated = true; | |
2447 maybe_derived_type = true; | |
2448 } | |
2449 if (!dynamic && ctx.dynamic) | |
2450 { | |
2451 updated = true; | |
2452 dynamic = true; | |
2453 } | |
2454 } | |
2455 /* See if one type contains the other as a field (not base). */ | |
2456 else if (contains_type_p (ctx.outer_type, ctx.offset - offset, | |
2457 outer_type, false, false)) | |
2458 { | |
2459 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2460 fprintf (dump_file, "Second type contain the first as a field\n"); | |
2461 | |
2462 /* The second type is more specified, so we keep the first. | |
2463 We need to set DYNAMIC flag to avoid declaring context INVALID | |
2464 of OFFSET ends up being out of range. */ | |
2465 if (!dynamic | |
2466 && (ctx.dynamic | |
2467 || (!otr_type | |
2468 && (!TYPE_SIZE (ctx.outer_type) | |
2469 || !TYPE_SIZE (outer_type) | |
2470 || !operand_equal_p (TYPE_SIZE (ctx.outer_type), | |
2471 TYPE_SIZE (outer_type), 0))))) | |
2472 { | |
2473 dynamic = true; | |
2474 updated = true; | |
2475 } | |
2476 } | |
2477 else if (contains_type_p (outer_type, offset - ctx.offset, | |
2478 ctx.outer_type, false, false)) | |
2479 { | |
2480 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2481 fprintf (dump_file, "First type contain the second as a field\n"); | |
2482 | |
2483 if (!dynamic | |
2484 && (ctx.dynamic | |
2485 || (!otr_type | |
2486 && (!TYPE_SIZE (ctx.outer_type) | |
2487 || !TYPE_SIZE (outer_type) | |
2488 || !operand_equal_p (TYPE_SIZE (ctx.outer_type), | |
2489 TYPE_SIZE (outer_type), 0))))) | |
2490 dynamic = true; | |
2491 outer_type = ctx.outer_type; | |
2492 offset = ctx.offset; | |
2493 dynamic = ctx.dynamic; | |
2494 maybe_in_construction = ctx.maybe_in_construction; | |
2495 maybe_derived_type = ctx.maybe_derived_type; | |
2496 updated = true; | |
2497 } | |
2498 /* See if OUTER_TYPE is base of CTX.OUTER_TYPE. */ | |
2499 else if (contains_type_p (ctx.outer_type, | |
2500 ctx.offset - offset, outer_type, false, true)) | |
2501 { | |
2502 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2503 fprintf (dump_file, "First type is base of second\n"); | |
2504 if (!maybe_derived_type) | |
2505 { | |
2506 maybe_derived_type = true; | |
2507 updated = true; | |
2508 } | |
2509 if (!maybe_in_construction && ctx.maybe_in_construction) | |
2510 { | |
2511 maybe_in_construction = true; | |
2512 updated = true; | |
2513 } | |
2514 if (!dynamic && ctx.dynamic) | |
2515 { | |
2516 dynamic = true; | |
2517 updated = true; | |
2518 } | |
2519 } | |
2520 /* See if CTX.OUTER_TYPE is base of OUTER_TYPE. */ | |
2521 else if (contains_type_p (outer_type, | |
2522 offset - ctx.offset, ctx.outer_type, false, true)) | |
2523 { | |
2524 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2525 fprintf (dump_file, "Second type is base of first\n"); | |
2526 outer_type = ctx.outer_type; | |
2527 offset = ctx.offset; | |
2528 updated = true; | |
2529 if (!maybe_derived_type) | |
2530 maybe_derived_type = true; | |
2531 if (!maybe_in_construction && ctx.maybe_in_construction) | |
2532 maybe_in_construction = true; | |
2533 if (!dynamic && ctx.dynamic) | |
2534 dynamic = true; | |
2535 } | |
2536 /* TODO handle merging using hiearchy. */ | |
2537 else | |
2538 { | |
2539 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2540 fprintf (dump_file, "Giving up on meet\n"); | |
2541 clear_outer_type (); | |
2542 updated = true; | |
2543 } | |
2544 | |
2545 updated |= meet_speculation_with (ctx.speculative_outer_type, | |
2546 ctx.speculative_offset, | |
2547 ctx.speculative_maybe_derived_type, | |
2548 otr_type); | |
2549 | |
2550 if (updated && dump_file && (dump_flags & TDF_DETAILS)) | |
2551 { | |
2552 fprintf (dump_file, "Updated as: "); | |
2553 dump (dump_file); | |
2554 fprintf (dump_file, "\n"); | |
2555 } | |
2556 return updated; | |
2557 } |