Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-stdarg.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 77e2b8dfacca |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Pass computing data for optimizing stdarg functions. | |
2 Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc. | |
3 Contributed by Jakub Jelinek <jakub@redhat.com> | |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify | |
8 it under the terms of the GNU General Public License as published by | |
9 the Free Software Foundation; either version 3, or (at your option) | |
10 any later version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, | |
13 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 GNU General Public License for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "tm.h" | |
25 #include "tree.h" | |
26 #include "function.h" | |
27 #include "langhooks.h" | |
28 #include "diagnostic.h" | |
29 #include "target.h" | |
30 #include "tree-flow.h" | |
31 #include "tree-pass.h" | |
32 #include "tree-stdarg.h" | |
33 | |
34 /* A simple pass that attempts to optimize stdarg functions on architectures | |
35 that need to save register arguments to stack on entry to stdarg functions. | |
36 If the function doesn't use any va_start macros, no registers need to | |
37 be saved. If va_start macros are used, the va_list variables don't escape | |
38 the function, it is only necessary to save registers that will be used | |
39 in va_arg macros. E.g. if va_arg is only used with integral types | |
40 in the function, floating point registers don't need to be saved, etc. */ | |
41 | |
42 | |
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and | |
44 is executed at most as many times as VA_START_BB. */ | |
45 | |
46 static bool | |
47 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb) | |
48 { | |
49 VEC (edge, heap) *stack = NULL; | |
50 edge e; | |
51 edge_iterator ei; | |
52 sbitmap visited; | |
53 bool ret; | |
54 | |
55 if (va_arg_bb == va_start_bb) | |
56 return true; | |
57 | |
58 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb)) | |
59 return false; | |
60 | |
61 visited = sbitmap_alloc (last_basic_block); | |
62 sbitmap_zero (visited); | |
63 ret = true; | |
64 | |
65 FOR_EACH_EDGE (e, ei, va_arg_bb->preds) | |
66 VEC_safe_push (edge, heap, stack, e); | |
67 | |
68 while (! VEC_empty (edge, stack)) | |
69 { | |
70 basic_block src; | |
71 | |
72 e = VEC_pop (edge, stack); | |
73 src = e->src; | |
74 | |
75 if (e->flags & EDGE_COMPLEX) | |
76 { | |
77 ret = false; | |
78 break; | |
79 } | |
80 | |
81 if (src == va_start_bb) | |
82 continue; | |
83 | |
84 /* va_arg_bb can be executed more times than va_start_bb. */ | |
85 if (src == va_arg_bb) | |
86 { | |
87 ret = false; | |
88 break; | |
89 } | |
90 | |
91 gcc_assert (src != ENTRY_BLOCK_PTR); | |
92 | |
93 if (! TEST_BIT (visited, src->index)) | |
94 { | |
95 SET_BIT (visited, src->index); | |
96 FOR_EACH_EDGE (e, ei, src->preds) | |
97 VEC_safe_push (edge, heap, stack, e); | |
98 } | |
99 } | |
100 | |
101 VEC_free (edge, heap, stack); | |
102 sbitmap_free (visited); | |
103 return ret; | |
104 } | |
105 | |
106 | |
107 /* For statement COUNTER = RHS, if RHS is COUNTER + constant, | |
108 return constant, otherwise return (unsigned HOST_WIDE_INT) -1. | |
109 GPR_P is true if this is GPR counter. */ | |
110 | |
111 static unsigned HOST_WIDE_INT | |
112 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs, | |
113 bool gpr_p) | |
114 { | |
115 tree lhs, orig_lhs; | |
116 gimple stmt; | |
117 unsigned HOST_WIDE_INT ret = 0, val, counter_val; | |
118 unsigned int max_size; | |
119 | |
120 if (si->offsets == NULL) | |
121 { | |
122 unsigned int i; | |
123 | |
124 si->offsets = XNEWVEC (int, num_ssa_names); | |
125 for (i = 0; i < num_ssa_names; ++i) | |
126 si->offsets[i] = -1; | |
127 } | |
128 | |
129 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size; | |
130 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE; | |
131 orig_lhs = lhs = rhs; | |
132 while (lhs) | |
133 { | |
134 enum tree_code rhs_code; | |
135 | |
136 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) | |
137 { | |
138 if (counter_val >= max_size) | |
139 { | |
140 ret = max_size; | |
141 break; | |
142 } | |
143 | |
144 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)]; | |
145 break; | |
146 } | |
147 | |
148 stmt = SSA_NAME_DEF_STMT (lhs); | |
149 | |
150 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs) | |
151 return (unsigned HOST_WIDE_INT) -1; | |
152 | |
153 rhs_code = gimple_assign_rhs_code (stmt); | |
154 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS | |
155 || gimple_assign_cast_p (stmt)) | |
156 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) | |
157 { | |
158 lhs = gimple_assign_rhs1 (stmt); | |
159 continue; | |
160 } | |
161 | |
162 if ((rhs_code == POINTER_PLUS_EXPR | |
163 || rhs_code == PLUS_EXPR) | |
164 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | |
165 && host_integerp (gimple_assign_rhs2 (stmt), 1)) | |
166 { | |
167 ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1); | |
168 lhs = gimple_assign_rhs1 (stmt); | |
169 continue; | |
170 } | |
171 | |
172 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS) | |
173 return (unsigned HOST_WIDE_INT) -1; | |
174 | |
175 rhs = gimple_assign_rhs1 (stmt); | |
176 if (TREE_CODE (counter) != TREE_CODE (rhs)) | |
177 return (unsigned HOST_WIDE_INT) -1; | |
178 | |
179 if (TREE_CODE (counter) == COMPONENT_REF) | |
180 { | |
181 if (get_base_address (counter) != get_base_address (rhs) | |
182 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL | |
183 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1)) | |
184 return (unsigned HOST_WIDE_INT) -1; | |
185 } | |
186 else if (counter != rhs) | |
187 return (unsigned HOST_WIDE_INT) -1; | |
188 | |
189 lhs = NULL; | |
190 } | |
191 | |
192 lhs = orig_lhs; | |
193 val = ret + counter_val; | |
194 while (lhs) | |
195 { | |
196 enum tree_code rhs_code; | |
197 | |
198 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) | |
199 break; | |
200 | |
201 if (val >= max_size) | |
202 si->offsets[SSA_NAME_VERSION (lhs)] = max_size; | |
203 else | |
204 si->offsets[SSA_NAME_VERSION (lhs)] = val; | |
205 | |
206 stmt = SSA_NAME_DEF_STMT (lhs); | |
207 | |
208 rhs_code = gimple_assign_rhs_code (stmt); | |
209 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS | |
210 || gimple_assign_cast_p (stmt)) | |
211 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) | |
212 { | |
213 lhs = gimple_assign_rhs1 (stmt); | |
214 continue; | |
215 } | |
216 | |
217 if ((rhs_code == POINTER_PLUS_EXPR | |
218 || rhs_code == PLUS_EXPR) | |
219 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | |
220 && host_integerp (gimple_assign_rhs2 (stmt), 1)) | |
221 { | |
222 val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1); | |
223 lhs = gimple_assign_rhs1 (stmt); | |
224 continue; | |
225 } | |
226 | |
227 lhs = NULL; | |
228 } | |
229 | |
230 return ret; | |
231 } | |
232 | |
233 | |
234 /* Called by walk_tree to look for references to va_list variables. */ | |
235 | |
236 static tree | |
237 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
238 void *data) | |
239 { | |
240 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info; | |
241 tree var = *tp; | |
242 | |
243 if (TREE_CODE (var) == SSA_NAME) | |
244 var = SSA_NAME_VAR (var); | |
245 | |
246 if (TREE_CODE (var) == VAR_DECL | |
247 && bitmap_bit_p (va_list_vars, DECL_UID (var))) | |
248 return var; | |
249 | |
250 return NULL_TREE; | |
251 } | |
252 | |
253 | |
254 /* Helper function of va_list_counter_struct_op. Compute | |
255 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter, | |
256 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP | |
257 statement. GPR_P is true if AP is a GPR counter, false if it is | |
258 a FPR counter. */ | |
259 | |
260 static void | |
261 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p, | |
262 bool write_p) | |
263 { | |
264 unsigned HOST_WIDE_INT increment; | |
265 | |
266 if (si->compute_sizes < 0) | |
267 { | |
268 si->compute_sizes = 0; | |
269 if (si->va_start_count == 1 | |
270 && reachable_at_most_once (si->bb, si->va_start_bb)) | |
271 si->compute_sizes = 1; | |
272 | |
273 if (dump_file && (dump_flags & TDF_DETAILS)) | |
274 fprintf (dump_file, | |
275 "bb%d will %sbe executed at most once for each va_start " | |
276 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", | |
277 si->va_start_bb->index); | |
278 } | |
279 | |
280 if (write_p | |
281 && si->compute_sizes | |
282 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1) | |
283 { | |
284 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE) | |
285 { | |
286 cfun->va_list_gpr_size += increment; | |
287 return; | |
288 } | |
289 | |
290 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE) | |
291 { | |
292 cfun->va_list_fpr_size += increment; | |
293 return; | |
294 } | |
295 } | |
296 | |
297 if (write_p || !si->compute_sizes) | |
298 { | |
299 if (gpr_p) | |
300 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | |
301 else | |
302 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; | |
303 } | |
304 } | |
305 | |
306 | |
307 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size. | |
308 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P | |
309 is false, AP has been seen in VAR = AP assignment. | |
310 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized | |
311 va_arg operation that doesn't cause the va_list variable to escape | |
312 current function. */ | |
313 | |
314 static bool | |
315 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var, | |
316 bool write_p) | |
317 { | |
318 tree base; | |
319 | |
320 if (TREE_CODE (ap) != COMPONENT_REF | |
321 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL) | |
322 return false; | |
323 | |
324 if (TREE_CODE (var) != SSA_NAME | |
325 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var)))) | |
326 return false; | |
327 | |
328 base = get_base_address (ap); | |
329 if (TREE_CODE (base) != VAR_DECL | |
330 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base))) | |
331 return false; | |
332 | |
333 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field) | |
334 va_list_counter_op (si, ap, var, true, write_p); | |
335 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field) | |
336 va_list_counter_op (si, ap, var, false, write_p); | |
337 | |
338 return true; | |
339 } | |
340 | |
341 | |
342 /* Check for TEM = AP. Return true if found and the caller shouldn't | |
343 search for va_list references in the statement. */ | |
344 | |
345 static bool | |
346 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem) | |
347 { | |
348 if (TREE_CODE (ap) != VAR_DECL | |
349 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap))) | |
350 return false; | |
351 | |
352 if (TREE_CODE (tem) != SSA_NAME | |
353 || bitmap_bit_p (si->va_list_vars, | |
354 DECL_UID (SSA_NAME_VAR (tem))) | |
355 || is_global_var (SSA_NAME_VAR (tem))) | |
356 return false; | |
357 | |
358 if (si->compute_sizes < 0) | |
359 { | |
360 si->compute_sizes = 0; | |
361 if (si->va_start_count == 1 | |
362 && reachable_at_most_once (si->bb, si->va_start_bb)) | |
363 si->compute_sizes = 1; | |
364 | |
365 if (dump_file && (dump_flags & TDF_DETAILS)) | |
366 fprintf (dump_file, | |
367 "bb%d will %sbe executed at most once for each va_start " | |
368 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", | |
369 si->va_start_bb->index); | |
370 } | |
371 | |
372 /* For void * or char * va_list types, there is just one counter. | |
373 If va_arg is used in a loop, we don't know how many registers need | |
374 saving. */ | |
375 if (! si->compute_sizes) | |
376 return false; | |
377 | |
378 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1) | |
379 return false; | |
380 | |
381 /* Note the temporary, as we need to track whether it doesn't escape | |
382 the current function. */ | |
383 bitmap_set_bit (si->va_list_escape_vars, | |
384 DECL_UID (SSA_NAME_VAR (tem))); | |
385 return true; | |
386 } | |
387 | |
388 | |
389 /* Check for: | |
390 tem1 = AP; | |
391 TEM2 = tem1 + CST; | |
392 AP = TEM2; | |
393 sequence and update cfun->va_list_gpr_size. Return true if found. */ | |
394 | |
395 static bool | |
396 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2) | |
397 { | |
398 unsigned HOST_WIDE_INT increment; | |
399 | |
400 if (TREE_CODE (ap) != VAR_DECL | |
401 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap))) | |
402 return false; | |
403 | |
404 if (TREE_CODE (tem2) != SSA_NAME | |
405 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2)))) | |
406 return false; | |
407 | |
408 if (si->compute_sizes <= 0) | |
409 return false; | |
410 | |
411 increment = va_list_counter_bump (si, ap, tem2, true); | |
412 if (increment + 1 <= 1) | |
413 return false; | |
414 | |
415 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE) | |
416 cfun->va_list_gpr_size += increment; | |
417 else | |
418 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | |
419 | |
420 return true; | |
421 } | |
422 | |
423 | |
424 /* If RHS is X, (some type *) X or X + CST for X a temporary variable | |
425 containing value of some va_list variable plus optionally some constant, | |
426 either set si->va_list_escapes or add LHS to si->va_list_escape_vars, | |
427 depending whether LHS is a function local temporary. */ | |
428 | |
429 static void | |
430 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs) | |
431 { | |
432 if (! POINTER_TYPE_P (TREE_TYPE (rhs))) | |
433 return; | |
434 | |
435 if (TREE_CODE (rhs) != SSA_NAME | |
436 || ! bitmap_bit_p (si->va_list_escape_vars, | |
437 DECL_UID (SSA_NAME_VAR (rhs)))) | |
438 return; | |
439 | |
440 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs))) | |
441 { | |
442 si->va_list_escapes = true; | |
443 return; | |
444 } | |
445 | |
446 if (si->compute_sizes < 0) | |
447 { | |
448 si->compute_sizes = 0; | |
449 if (si->va_start_count == 1 | |
450 && reachable_at_most_once (si->bb, si->va_start_bb)) | |
451 si->compute_sizes = 1; | |
452 | |
453 if (dump_file && (dump_flags & TDF_DETAILS)) | |
454 fprintf (dump_file, | |
455 "bb%d will %sbe executed at most once for each va_start " | |
456 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", | |
457 si->va_start_bb->index); | |
458 } | |
459 | |
460 /* For void * or char * va_list types, there is just one counter. | |
461 If va_arg is used in a loop, we don't know how many registers need | |
462 saving. */ | |
463 if (! si->compute_sizes) | |
464 { | |
465 si->va_list_escapes = true; | |
466 return; | |
467 } | |
468 | |
469 if (va_list_counter_bump (si, si->va_start_ap, lhs, true) | |
470 == (unsigned HOST_WIDE_INT) -1) | |
471 { | |
472 si->va_list_escapes = true; | |
473 return; | |
474 } | |
475 | |
476 bitmap_set_bit (si->va_list_escape_vars, | |
477 DECL_UID (SSA_NAME_VAR (lhs))); | |
478 } | |
479 | |
480 | |
481 /* Check all uses of temporaries from si->va_list_escape_vars bitmap. | |
482 Return true if va_list might be escaping. */ | |
483 | |
484 static bool | |
485 check_all_va_list_escapes (struct stdarg_info *si) | |
486 { | |
487 basic_block bb; | |
488 | |
489 FOR_EACH_BB (bb) | |
490 { | |
491 gimple_stmt_iterator i; | |
492 | |
493 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
494 { | |
495 gimple stmt = gsi_stmt (i); | |
496 tree use; | |
497 ssa_op_iter iter; | |
498 | |
499 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES) | |
500 { | |
501 if (! bitmap_bit_p (si->va_list_escape_vars, | |
502 DECL_UID (SSA_NAME_VAR (use)))) | |
503 continue; | |
504 | |
505 if (is_gimple_assign (stmt)) | |
506 { | |
507 tree rhs = gimple_assign_rhs1 (stmt); | |
508 enum tree_code rhs_code = gimple_assign_rhs_code (stmt); | |
509 | |
510 /* x = *ap_temp; */ | |
511 if (gimple_assign_rhs_code (stmt) == INDIRECT_REF | |
512 && TREE_OPERAND (rhs, 0) == use | |
513 && TYPE_SIZE_UNIT (TREE_TYPE (rhs)) | |
514 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1) | |
515 && si->offsets[SSA_NAME_VERSION (use)] != -1) | |
516 { | |
517 unsigned HOST_WIDE_INT gpr_size; | |
518 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs)); | |
519 | |
520 gpr_size = si->offsets[SSA_NAME_VERSION (use)] | |
521 + tree_low_cst (access_size, 1); | |
522 if (gpr_size >= VA_LIST_MAX_GPR_SIZE) | |
523 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | |
524 else if (gpr_size > cfun->va_list_gpr_size) | |
525 cfun->va_list_gpr_size = gpr_size; | |
526 continue; | |
527 } | |
528 | |
529 /* va_arg sequences may contain | |
530 other_ap_temp = ap_temp; | |
531 other_ap_temp = ap_temp + constant; | |
532 other_ap_temp = (some_type *) ap_temp; | |
533 ap = ap_temp; | |
534 statements. */ | |
535 if (rhs == use | |
536 && ((rhs_code == POINTER_PLUS_EXPR | |
537 && (TREE_CODE (gimple_assign_rhs2 (stmt)) | |
538 == INTEGER_CST)) | |
539 || gimple_assign_cast_p (stmt) | |
540 || (get_gimple_rhs_class (rhs_code) | |
541 == GIMPLE_SINGLE_RHS))) | |
542 { | |
543 tree lhs = gimple_assign_lhs (stmt); | |
544 | |
545 if (TREE_CODE (lhs) == SSA_NAME | |
546 && bitmap_bit_p (si->va_list_escape_vars, | |
547 DECL_UID (SSA_NAME_VAR (lhs)))) | |
548 continue; | |
549 | |
550 if (TREE_CODE (lhs) == VAR_DECL | |
551 && bitmap_bit_p (si->va_list_vars, | |
552 DECL_UID (lhs))) | |
553 continue; | |
554 } | |
555 } | |
556 | |
557 if (dump_file && (dump_flags & TDF_DETAILS)) | |
558 { | |
559 fputs ("va_list escapes in ", dump_file); | |
560 print_gimple_stmt (dump_file, stmt, 0, dump_flags); | |
561 fputc ('\n', dump_file); | |
562 } | |
563 return true; | |
564 } | |
565 } | |
566 } | |
567 | |
568 return false; | |
569 } | |
570 | |
571 | |
572 /* Return true if this optimization pass should be done. | |
573 It makes only sense for stdarg functions. */ | |
574 | |
575 static bool | |
576 gate_optimize_stdarg (void) | |
577 { | |
578 /* This optimization is only for stdarg functions. */ | |
579 return cfun->stdarg != 0; | |
580 } | |
581 | |
582 | |
583 /* Entry point to the stdarg optimization pass. */ | |
584 | |
585 static unsigned int | |
586 execute_optimize_stdarg (void) | |
587 { | |
588 basic_block bb; | |
589 bool va_list_escapes = false; | |
590 bool va_list_simple_ptr; | |
591 struct stdarg_info si; | |
592 struct walk_stmt_info wi; | |
593 const char *funcname = NULL; | |
594 tree cfun_va_list; | |
595 | |
596 cfun->va_list_gpr_size = 0; | |
597 cfun->va_list_fpr_size = 0; | |
598 memset (&si, 0, sizeof (si)); | |
599 si.va_list_vars = BITMAP_ALLOC (NULL); | |
600 si.va_list_escape_vars = BITMAP_ALLOC (NULL); | |
601 | |
602 if (dump_file) | |
603 funcname = lang_hooks.decl_printable_name (current_function_decl, 2); | |
604 | |
605 cfun_va_list = targetm.fn_abi_va_list (cfun->decl); | |
606 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) | |
607 && (TREE_TYPE (cfun_va_list) == void_type_node | |
608 || TREE_TYPE (cfun_va_list) == char_type_node); | |
609 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr); | |
610 | |
611 FOR_EACH_BB (bb) | |
612 { | |
613 gimple_stmt_iterator i; | |
614 | |
615 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
616 { | |
617 gimple stmt = gsi_stmt (i); | |
618 tree callee, ap; | |
619 | |
620 if (!is_gimple_call (stmt)) | |
621 continue; | |
622 | |
623 callee = gimple_call_fndecl (stmt); | |
624 if (!callee | |
625 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) | |
626 continue; | |
627 | |
628 switch (DECL_FUNCTION_CODE (callee)) | |
629 { | |
630 case BUILT_IN_VA_START: | |
631 break; | |
632 /* If old style builtins are used, don't optimize anything. */ | |
633 case BUILT_IN_SAVEREGS: | |
634 case BUILT_IN_ARGS_INFO: | |
635 case BUILT_IN_NEXT_ARG: | |
636 va_list_escapes = true; | |
637 continue; | |
638 default: | |
639 continue; | |
640 } | |
641 | |
642 si.va_start_count++; | |
643 ap = gimple_call_arg (stmt, 0); | |
644 | |
645 if (TREE_CODE (ap) != ADDR_EXPR) | |
646 { | |
647 va_list_escapes = true; | |
648 break; | |
649 } | |
650 ap = TREE_OPERAND (ap, 0); | |
651 if (TREE_CODE (ap) == ARRAY_REF) | |
652 { | |
653 if (! integer_zerop (TREE_OPERAND (ap, 1))) | |
654 { | |
655 va_list_escapes = true; | |
656 break; | |
657 } | |
658 ap = TREE_OPERAND (ap, 0); | |
659 } | |
660 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap)) | |
661 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl)) | |
662 || TREE_CODE (ap) != VAR_DECL) | |
663 { | |
664 va_list_escapes = true; | |
665 break; | |
666 } | |
667 | |
668 if (is_global_var (ap)) | |
669 { | |
670 va_list_escapes = true; | |
671 break; | |
672 } | |
673 | |
674 bitmap_set_bit (si.va_list_vars, DECL_UID (ap)); | |
675 | |
676 /* VA_START_BB and VA_START_AP will be only used if there is just | |
677 one va_start in the function. */ | |
678 si.va_start_bb = bb; | |
679 si.va_start_ap = ap; | |
680 } | |
681 | |
682 if (va_list_escapes) | |
683 break; | |
684 } | |
685 | |
686 /* If there were no va_start uses in the function, there is no need to | |
687 save anything. */ | |
688 if (si.va_start_count == 0) | |
689 goto finish; | |
690 | |
691 /* If some va_list arguments weren't local, we can't optimize. */ | |
692 if (va_list_escapes) | |
693 goto finish; | |
694 | |
695 /* For void * or char * va_list, something useful can be done only | |
696 if there is just one va_start. */ | |
697 if (va_list_simple_ptr && si.va_start_count > 1) | |
698 { | |
699 va_list_escapes = true; | |
700 goto finish; | |
701 } | |
702 | |
703 /* For struct * va_list, if the backend didn't tell us what the counter fields | |
704 are, there is nothing more we can do. */ | |
705 if (!va_list_simple_ptr | |
706 && va_list_gpr_counter_field == NULL_TREE | |
707 && va_list_fpr_counter_field == NULL_TREE) | |
708 { | |
709 va_list_escapes = true; | |
710 goto finish; | |
711 } | |
712 | |
713 /* For void * or char * va_list there is just one counter | |
714 (va_list itself). Use VA_LIST_GPR_SIZE for it. */ | |
715 if (va_list_simple_ptr) | |
716 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; | |
717 | |
718 calculate_dominance_info (CDI_DOMINATORS); | |
719 memset (&wi, 0, sizeof (wi)); | |
720 wi.info = si.va_list_vars; | |
721 | |
722 FOR_EACH_BB (bb) | |
723 { | |
724 gimple_stmt_iterator i; | |
725 | |
726 si.compute_sizes = -1; | |
727 si.bb = bb; | |
728 | |
729 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat | |
730 them as assignments for the purpose of escape analysis. This is | |
731 not needed for non-simple va_list because virtual phis don't perform | |
732 any real data movement. */ | |
733 if (va_list_simple_ptr) | |
734 { | |
735 tree lhs, rhs; | |
736 use_operand_p uop; | |
737 ssa_op_iter soi; | |
738 | |
739 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) | |
740 { | |
741 gimple phi = gsi_stmt (i); | |
742 lhs = PHI_RESULT (phi); | |
743 | |
744 if (!is_gimple_reg (lhs)) | |
745 continue; | |
746 | |
747 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) | |
748 { | |
749 rhs = USE_FROM_PTR (uop); | |
750 if (va_list_ptr_read (&si, rhs, lhs)) | |
751 continue; | |
752 else if (va_list_ptr_write (&si, lhs, rhs)) | |
753 continue; | |
754 else | |
755 check_va_list_escapes (&si, lhs, rhs); | |
756 | |
757 if (si.va_list_escapes) | |
758 { | |
759 if (dump_file && (dump_flags & TDF_DETAILS)) | |
760 { | |
761 fputs ("va_list escapes in ", dump_file); | |
762 print_gimple_stmt (dump_file, phi, 0, dump_flags); | |
763 fputc ('\n', dump_file); | |
764 } | |
765 va_list_escapes = true; | |
766 } | |
767 } | |
768 } | |
769 } | |
770 | |
771 for (i = gsi_start_bb (bb); | |
772 !gsi_end_p (i) && !va_list_escapes; | |
773 gsi_next (&i)) | |
774 { | |
775 gimple stmt = gsi_stmt (i); | |
776 | |
777 /* Don't look at __builtin_va_{start,end}, they are ok. */ | |
778 if (is_gimple_call (stmt)) | |
779 { | |
780 tree callee = gimple_call_fndecl (stmt); | |
781 | |
782 if (callee | |
783 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL | |
784 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START | |
785 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END)) | |
786 continue; | |
787 } | |
788 | |
789 if (is_gimple_assign (stmt)) | |
790 { | |
791 tree lhs = gimple_assign_lhs (stmt); | |
792 tree rhs = gimple_assign_rhs1 (stmt); | |
793 | |
794 if (va_list_simple_ptr) | |
795 { | |
796 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
797 == GIMPLE_SINGLE_RHS) | |
798 { | |
799 /* Check for tem = ap. */ | |
800 if (va_list_ptr_read (&si, rhs, lhs)) | |
801 continue; | |
802 | |
803 /* Check for the last insn in: | |
804 tem1 = ap; | |
805 tem2 = tem1 + CST; | |
806 ap = tem2; | |
807 sequence. */ | |
808 else if (va_list_ptr_write (&si, lhs, rhs)) | |
809 continue; | |
810 } | |
811 | |
812 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR | |
813 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST) | |
814 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)) | |
815 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
816 == GIMPLE_SINGLE_RHS)) | |
817 check_va_list_escapes (&si, lhs, rhs); | |
818 } | |
819 else | |
820 { | |
821 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
822 == GIMPLE_SINGLE_RHS) | |
823 { | |
824 /* Check for ap[0].field = temp. */ | |
825 if (va_list_counter_struct_op (&si, lhs, rhs, true)) | |
826 continue; | |
827 | |
828 /* Check for temp = ap[0].field. */ | |
829 else if (va_list_counter_struct_op (&si, rhs, lhs, | |
830 false)) | |
831 continue; | |
832 } | |
833 | |
834 /* Do any architecture specific checking. */ | |
835 if (targetm.stdarg_optimize_hook | |
836 && targetm.stdarg_optimize_hook (&si, stmt)) | |
837 continue; | |
838 } | |
839 } | |
840 | |
841 /* All other uses of va_list are either va_copy (that is not handled | |
842 in this optimization), taking address of va_list variable or | |
843 passing va_list to other functions (in that case va_list might | |
844 escape the function and therefore va_start needs to set it up | |
845 fully), or some unexpected use of va_list. None of these should | |
846 happen in a gimplified VA_ARG_EXPR. */ | |
847 if (si.va_list_escapes | |
848 || walk_gimple_op (stmt, find_va_list_reference, &wi)) | |
849 { | |
850 if (dump_file && (dump_flags & TDF_DETAILS)) | |
851 { | |
852 fputs ("va_list escapes in ", dump_file); | |
853 print_gimple_stmt (dump_file, stmt, 0, dump_flags); | |
854 fputc ('\n', dump_file); | |
855 } | |
856 va_list_escapes = true; | |
857 } | |
858 } | |
859 | |
860 if (va_list_escapes) | |
861 break; | |
862 } | |
863 | |
864 if (! va_list_escapes | |
865 && va_list_simple_ptr | |
866 && ! bitmap_empty_p (si.va_list_escape_vars) | |
867 && check_all_va_list_escapes (&si)) | |
868 va_list_escapes = true; | |
869 | |
870 finish: | |
871 if (va_list_escapes) | |
872 { | |
873 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | |
874 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; | |
875 } | |
876 BITMAP_FREE (si.va_list_vars); | |
877 BITMAP_FREE (si.va_list_escape_vars); | |
878 free (si.offsets); | |
879 if (dump_file) | |
880 { | |
881 fprintf (dump_file, "%s: va_list escapes %d, needs to save ", | |
882 funcname, (int) va_list_escapes); | |
883 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE) | |
884 fputs ("all", dump_file); | |
885 else | |
886 fprintf (dump_file, "%d", cfun->va_list_gpr_size); | |
887 fputs (" GPR units and ", dump_file); | |
888 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE) | |
889 fputs ("all", dump_file); | |
890 else | |
891 fprintf (dump_file, "%d", cfun->va_list_fpr_size); | |
892 fputs (" FPR units.\n", dump_file); | |
893 } | |
894 return 0; | |
895 } | |
896 | |
897 | |
898 struct gimple_opt_pass pass_stdarg = | |
899 { | |
900 { | |
901 GIMPLE_PASS, | |
902 "stdarg", /* name */ | |
903 gate_optimize_stdarg, /* gate */ | |
904 execute_optimize_stdarg, /* execute */ | |
905 NULL, /* sub */ | |
906 NULL, /* next */ | |
907 0, /* static_pass_number */ | |
908 0, /* tv_id */ | |
909 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */ | |
910 0, /* properties_provided */ | |
911 0, /* properties_destroyed */ | |
912 0, /* todo_flags_start */ | |
913 TODO_dump_func /* todo_flags_finish */ | |
914 } | |
915 }; |