Mercurial > hg > CbC > CbC_gcc
comparison gcc/gimple.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | a4c410aa4714 77e2b8dfacca |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Gimple IR support functions. | |
2 | |
3 Copyright 2007, 2008 Free Software Foundation, Inc. | |
4 Contributed by Aldy Hernandez <aldyh@redhat.com> | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify it under | |
9 the terms of the GNU General Public License as published by the Free | |
10 Software Foundation; either version 3, or (at your option) any later | |
11 version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 #include "config.h" | |
23 #include "system.h" | |
24 #include "coretypes.h" | |
25 #include "tm.h" | |
26 #include "tree.h" | |
27 #include "ggc.h" | |
28 #include "errors.h" | |
29 #include "hard-reg-set.h" | |
30 #include "basic-block.h" | |
31 #include "gimple.h" | |
32 #include "diagnostic.h" | |
33 #include "tree-flow.h" | |
34 #include "value-prof.h" | |
35 #include "flags.h" | |
36 | |
37 #define DEFGSCODE(SYM, NAME, STRUCT) NAME, | |
38 const char *const gimple_code_name[] = { | |
39 #include "gimple.def" | |
40 }; | |
41 #undef DEFGSCODE | |
42 | |
43 /* All the tuples have their operand vector at the very bottom | |
44 of the structure. Therefore, the offset required to find the | |
45 operands vector the size of the structure minus the size of the 1 | |
46 element tree array at the end (see gimple_ops). */ | |
47 #define DEFGSCODE(SYM, NAME, STRUCT) (sizeof (STRUCT) - sizeof (tree)), | |
48 const size_t gimple_ops_offset_[] = { | |
49 #include "gimple.def" | |
50 }; | |
51 #undef DEFGSCODE | |
52 | |
53 #ifdef GATHER_STATISTICS | |
54 /* Gimple stats. */ | |
55 | |
56 int gimple_alloc_counts[(int) gimple_alloc_kind_all]; | |
57 int gimple_alloc_sizes[(int) gimple_alloc_kind_all]; | |
58 | |
59 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */ | |
60 static const char * const gimple_alloc_kind_names[] = { | |
61 "assignments", | |
62 "phi nodes", | |
63 "conditionals", | |
64 "sequences", | |
65 "everything else" | |
66 }; | |
67 | |
68 #endif /* GATHER_STATISTICS */ | |
69 | |
70 /* A cache of gimple_seq objects. Sequences are created and destroyed | |
71 fairly often during gimplification. */ | |
72 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache; | |
73 | |
74 /* Private API manipulation functions shared only with some | |
75 other files. */ | |
76 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *); | |
77 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *); | |
78 | |
79 /* Gimple tuple constructors. | |
80 Note: Any constructor taking a ``gimple_seq'' as a parameter, can | |
81 be passed a NULL to start with an empty sequence. */ | |
82 | |
83 /* Set the code for statement G to CODE. */ | |
84 | |
85 static inline void | |
86 gimple_set_code (gimple g, enum gimple_code code) | |
87 { | |
88 g->gsbase.code = code; | |
89 } | |
90 | |
91 | |
92 /* Return the GSS_* identifier for the given GIMPLE statement CODE. */ | |
93 | |
94 static enum gimple_statement_structure_enum | |
95 gss_for_code (enum gimple_code code) | |
96 { | |
97 switch (code) | |
98 { | |
99 case GIMPLE_ASSIGN: | |
100 case GIMPLE_CALL: | |
101 case GIMPLE_RETURN: return GSS_WITH_MEM_OPS; | |
102 case GIMPLE_COND: | |
103 case GIMPLE_GOTO: | |
104 case GIMPLE_LABEL: | |
105 case GIMPLE_CHANGE_DYNAMIC_TYPE: | |
106 case GIMPLE_SWITCH: return GSS_WITH_OPS; | |
107 case GIMPLE_ASM: return GSS_ASM; | |
108 case GIMPLE_BIND: return GSS_BIND; | |
109 case GIMPLE_CATCH: return GSS_CATCH; | |
110 case GIMPLE_EH_FILTER: return GSS_EH_FILTER; | |
111 case GIMPLE_NOP: return GSS_BASE; | |
112 case GIMPLE_PHI: return GSS_PHI; | |
113 case GIMPLE_RESX: return GSS_RESX; | |
114 case GIMPLE_TRY: return GSS_TRY; | |
115 case GIMPLE_WITH_CLEANUP_EXPR: return GSS_WCE; | |
116 case GIMPLE_OMP_CRITICAL: return GSS_OMP_CRITICAL; | |
117 case GIMPLE_OMP_FOR: return GSS_OMP_FOR; | |
118 case GIMPLE_OMP_MASTER: | |
119 case GIMPLE_OMP_ORDERED: | |
120 case GIMPLE_OMP_SECTION: return GSS_OMP; | |
121 case GIMPLE_OMP_RETURN: | |
122 case GIMPLE_OMP_SECTIONS_SWITCH: return GSS_BASE; | |
123 case GIMPLE_OMP_CONTINUE: return GSS_OMP_CONTINUE; | |
124 case GIMPLE_OMP_PARALLEL: return GSS_OMP_PARALLEL; | |
125 case GIMPLE_OMP_TASK: return GSS_OMP_TASK; | |
126 case GIMPLE_OMP_SECTIONS: return GSS_OMP_SECTIONS; | |
127 case GIMPLE_OMP_SINGLE: return GSS_OMP_SINGLE; | |
128 case GIMPLE_OMP_ATOMIC_LOAD: return GSS_OMP_ATOMIC_LOAD; | |
129 case GIMPLE_OMP_ATOMIC_STORE: return GSS_OMP_ATOMIC_STORE; | |
130 case GIMPLE_PREDICT: return GSS_BASE; | |
131 default: gcc_unreachable (); | |
132 } | |
133 } | |
134 | |
135 | |
136 /* Return the number of bytes needed to hold a GIMPLE statement with | |
137 code CODE. */ | |
138 | |
139 static size_t | |
140 gimple_size (enum gimple_code code) | |
141 { | |
142 enum gimple_statement_structure_enum gss = gss_for_code (code); | |
143 | |
144 if (gss == GSS_WITH_OPS) | |
145 return sizeof (struct gimple_statement_with_ops); | |
146 else if (gss == GSS_WITH_MEM_OPS) | |
147 return sizeof (struct gimple_statement_with_memory_ops); | |
148 | |
149 switch (code) | |
150 { | |
151 case GIMPLE_ASM: | |
152 return sizeof (struct gimple_statement_asm); | |
153 case GIMPLE_NOP: | |
154 return sizeof (struct gimple_statement_base); | |
155 case GIMPLE_BIND: | |
156 return sizeof (struct gimple_statement_bind); | |
157 case GIMPLE_CATCH: | |
158 return sizeof (struct gimple_statement_catch); | |
159 case GIMPLE_EH_FILTER: | |
160 return sizeof (struct gimple_statement_eh_filter); | |
161 case GIMPLE_TRY: | |
162 return sizeof (struct gimple_statement_try); | |
163 case GIMPLE_RESX: | |
164 return sizeof (struct gimple_statement_resx); | |
165 case GIMPLE_OMP_CRITICAL: | |
166 return sizeof (struct gimple_statement_omp_critical); | |
167 case GIMPLE_OMP_FOR: | |
168 return sizeof (struct gimple_statement_omp_for); | |
169 case GIMPLE_OMP_PARALLEL: | |
170 return sizeof (struct gimple_statement_omp_parallel); | |
171 case GIMPLE_OMP_TASK: | |
172 return sizeof (struct gimple_statement_omp_task); | |
173 case GIMPLE_OMP_SECTION: | |
174 case GIMPLE_OMP_MASTER: | |
175 case GIMPLE_OMP_ORDERED: | |
176 return sizeof (struct gimple_statement_omp); | |
177 case GIMPLE_OMP_RETURN: | |
178 return sizeof (struct gimple_statement_base); | |
179 case GIMPLE_OMP_CONTINUE: | |
180 return sizeof (struct gimple_statement_omp_continue); | |
181 case GIMPLE_OMP_SECTIONS: | |
182 return sizeof (struct gimple_statement_omp_sections); | |
183 case GIMPLE_OMP_SECTIONS_SWITCH: | |
184 return sizeof (struct gimple_statement_base); | |
185 case GIMPLE_OMP_SINGLE: | |
186 return sizeof (struct gimple_statement_omp_single); | |
187 case GIMPLE_OMP_ATOMIC_LOAD: | |
188 return sizeof (struct gimple_statement_omp_atomic_load); | |
189 case GIMPLE_OMP_ATOMIC_STORE: | |
190 return sizeof (struct gimple_statement_omp_atomic_store); | |
191 case GIMPLE_WITH_CLEANUP_EXPR: | |
192 return sizeof (struct gimple_statement_wce); | |
193 case GIMPLE_CHANGE_DYNAMIC_TYPE: | |
194 return sizeof (struct gimple_statement_with_ops); | |
195 case GIMPLE_PREDICT: | |
196 return sizeof (struct gimple_statement_base); | |
197 default: | |
198 break; | |
199 } | |
200 | |
201 gcc_unreachable (); | |
202 } | |
203 | |
204 | |
205 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS | |
206 operands. */ | |
207 | |
208 #define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO) | |
209 static gimple | |
210 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL) | |
211 { | |
212 size_t size; | |
213 gimple stmt; | |
214 | |
215 size = gimple_size (code); | |
216 if (num_ops > 0) | |
217 size += sizeof (tree) * (num_ops - 1); | |
218 | |
219 #ifdef GATHER_STATISTICS | |
220 { | |
221 enum gimple_alloc_kind kind = gimple_alloc_kind (code); | |
222 gimple_alloc_counts[(int) kind]++; | |
223 gimple_alloc_sizes[(int) kind] += size; | |
224 } | |
225 #endif | |
226 | |
227 stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT); | |
228 gimple_set_code (stmt, code); | |
229 gimple_set_num_ops (stmt, num_ops); | |
230 | |
231 /* Do not call gimple_set_modified here as it has other side | |
232 effects and this tuple is still not completely built. */ | |
233 stmt->gsbase.modified = 1; | |
234 | |
235 return stmt; | |
236 } | |
237 | |
238 /* Set SUBCODE to be the code of the expression computed by statement G. */ | |
239 | |
240 static inline void | |
241 gimple_set_subcode (gimple g, unsigned subcode) | |
242 { | |
243 /* We only have 16 bits for the RHS code. Assert that we are not | |
244 overflowing it. */ | |
245 gcc_assert (subcode < (1 << 16)); | |
246 g->gsbase.subcode = subcode; | |
247 } | |
248 | |
249 | |
250 | |
251 /* Build a tuple with operands. CODE is the statement to build (which | |
252 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code | |
253 for the new tuple. NUM_OPS is the number of operands to allocate. */ | |
254 | |
255 #define gimple_build_with_ops(c, s, n) \ | |
256 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO) | |
257 | |
258 static gimple | |
259 gimple_build_with_ops_stat (enum gimple_code code, enum tree_code subcode, | |
260 unsigned num_ops MEM_STAT_DECL) | |
261 { | |
262 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT); | |
263 gimple_set_subcode (s, subcode); | |
264 | |
265 return s; | |
266 } | |
267 | |
268 | |
269 /* Build a GIMPLE_RETURN statement returning RETVAL. */ | |
270 | |
271 gimple | |
272 gimple_build_return (tree retval) | |
273 { | |
274 gimple s = gimple_build_with_ops (GIMPLE_RETURN, 0, 1); | |
275 if (retval) | |
276 gimple_return_set_retval (s, retval); | |
277 return s; | |
278 } | |
279 | |
280 /* Helper for gimple_build_call, gimple_build_call_vec and | |
281 gimple_build_call_from_tree. Build the basic components of a | |
282 GIMPLE_CALL statement to function FN with NARGS arguments. */ | |
283 | |
284 static inline gimple | |
285 gimple_build_call_1 (tree fn, unsigned nargs) | |
286 { | |
287 gimple s = gimple_build_with_ops (GIMPLE_CALL, 0, nargs + 3); | |
288 if (TREE_CODE (fn) == FUNCTION_DECL) | |
289 fn = build_fold_addr_expr (fn); | |
290 gimple_set_op (s, 1, fn); | |
291 return s; | |
292 } | |
293 | |
294 | |
295 /* Build a GIMPLE_CALL statement to function FN with the arguments | |
296 specified in vector ARGS. */ | |
297 | |
298 gimple | |
299 gimple_build_call_vec (tree fn, VEC(tree, heap) *args) | |
300 { | |
301 unsigned i; | |
302 unsigned nargs = VEC_length (tree, args); | |
303 gimple call = gimple_build_call_1 (fn, nargs); | |
304 | |
305 for (i = 0; i < nargs; i++) | |
306 gimple_call_set_arg (call, i, VEC_index (tree, args, i)); | |
307 | |
308 return call; | |
309 } | |
310 | |
311 | |
312 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of | |
313 arguments. The ... are the arguments. */ | |
314 | |
315 gimple | |
316 gimple_build_call (tree fn, unsigned nargs, ...) | |
317 { | |
318 va_list ap; | |
319 gimple call; | |
320 unsigned i; | |
321 | |
322 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn)); | |
323 | |
324 call = gimple_build_call_1 (fn, nargs); | |
325 | |
326 va_start (ap, nargs); | |
327 for (i = 0; i < nargs; i++) | |
328 gimple_call_set_arg (call, i, va_arg (ap, tree)); | |
329 va_end (ap); | |
330 | |
331 return call; | |
332 } | |
333 | |
334 | |
335 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is | |
336 assumed to be in GIMPLE form already. Minimal checking is done of | |
337 this fact. */ | |
338 | |
339 gimple | |
340 gimple_build_call_from_tree (tree t) | |
341 { | |
342 unsigned i, nargs; | |
343 gimple call; | |
344 tree fndecl = get_callee_fndecl (t); | |
345 | |
346 gcc_assert (TREE_CODE (t) == CALL_EXPR); | |
347 | |
348 nargs = call_expr_nargs (t); | |
349 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs); | |
350 | |
351 for (i = 0; i < nargs; i++) | |
352 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i)); | |
353 | |
354 gimple_set_block (call, TREE_BLOCK (t)); | |
355 | |
356 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */ | |
357 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t)); | |
358 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t)); | |
359 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t)); | |
360 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t)); | |
361 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t)); | |
362 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t)); | |
363 | |
364 return call; | |
365 } | |
366 | |
367 | |
368 /* Extract the operands and code for expression EXPR into *SUBCODE_P, | |
369 *OP1_P and *OP2_P respectively. */ | |
370 | |
371 void | |
372 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p, | |
373 tree *op2_p) | |
374 { | |
375 enum gimple_rhs_class grhs_class; | |
376 | |
377 *subcode_p = TREE_CODE (expr); | |
378 grhs_class = get_gimple_rhs_class (*subcode_p); | |
379 | |
380 if (grhs_class == GIMPLE_BINARY_RHS) | |
381 { | |
382 *op1_p = TREE_OPERAND (expr, 0); | |
383 *op2_p = TREE_OPERAND (expr, 1); | |
384 } | |
385 else if (grhs_class == GIMPLE_UNARY_RHS) | |
386 { | |
387 *op1_p = TREE_OPERAND (expr, 0); | |
388 *op2_p = NULL_TREE; | |
389 } | |
390 else if (grhs_class == GIMPLE_SINGLE_RHS) | |
391 { | |
392 *op1_p = expr; | |
393 *op2_p = NULL_TREE; | |
394 } | |
395 else | |
396 gcc_unreachable (); | |
397 } | |
398 | |
399 | |
400 /* Build a GIMPLE_ASSIGN statement. | |
401 | |
402 LHS of the assignment. | |
403 RHS of the assignment which can be unary or binary. */ | |
404 | |
405 gimple | |
406 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL) | |
407 { | |
408 enum tree_code subcode; | |
409 tree op1, op2; | |
410 | |
411 extract_ops_from_tree (rhs, &subcode, &op1, &op2); | |
412 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2 | |
413 PASS_MEM_STAT); | |
414 } | |
415 | |
416 | |
417 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands | |
418 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class | |
419 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */ | |
420 | |
421 gimple | |
422 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1, | |
423 tree op2 MEM_STAT_DECL) | |
424 { | |
425 unsigned num_ops; | |
426 gimple p; | |
427 | |
428 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the | |
429 code). */ | |
430 num_ops = get_gimple_rhs_num_ops (subcode) + 1; | |
431 | |
432 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, subcode, num_ops | |
433 PASS_MEM_STAT); | |
434 gimple_assign_set_lhs (p, lhs); | |
435 gimple_assign_set_rhs1 (p, op1); | |
436 if (op2) | |
437 { | |
438 gcc_assert (num_ops > 2); | |
439 gimple_assign_set_rhs2 (p, op2); | |
440 } | |
441 | |
442 return p; | |
443 } | |
444 | |
445 | |
446 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P. | |
447 | |
448 DST/SRC are the destination and source respectively. You can pass | |
449 ungimplified trees in DST or SRC, in which case they will be | |
450 converted to a gimple operand if necessary. | |
451 | |
452 This function returns the newly created GIMPLE_ASSIGN tuple. */ | |
453 | |
454 inline gimple | |
455 gimplify_assign (tree dst, tree src, gimple_seq *seq_p) | |
456 { | |
457 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); | |
458 gimplify_and_add (t, seq_p); | |
459 ggc_free (t); | |
460 return gimple_seq_last_stmt (*seq_p); | |
461 } | |
462 | |
463 | |
464 /* Build a GIMPLE_COND statement. | |
465 | |
466 PRED is the condition used to compare LHS and the RHS. | |
467 T_LABEL is the label to jump to if the condition is true. | |
468 F_LABEL is the label to jump to otherwise. */ | |
469 | |
470 gimple | |
471 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs, | |
472 tree t_label, tree f_label) | |
473 { | |
474 gimple p; | |
475 | |
476 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison); | |
477 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4); | |
478 gimple_cond_set_lhs (p, lhs); | |
479 gimple_cond_set_rhs (p, rhs); | |
480 gimple_cond_set_true_label (p, t_label); | |
481 gimple_cond_set_false_label (p, f_label); | |
482 return p; | |
483 } | |
484 | |
485 | |
486 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */ | |
487 | |
488 void | |
489 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p, | |
490 tree *lhs_p, tree *rhs_p) | |
491 { | |
492 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison | |
493 || TREE_CODE (cond) == TRUTH_NOT_EXPR | |
494 || is_gimple_min_invariant (cond) | |
495 || SSA_VAR_P (cond)); | |
496 | |
497 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p); | |
498 | |
499 /* Canonicalize conditionals of the form 'if (!VAL)'. */ | |
500 if (*code_p == TRUTH_NOT_EXPR) | |
501 { | |
502 *code_p = EQ_EXPR; | |
503 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); | |
504 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node); | |
505 } | |
506 /* Canonicalize conditionals of the form 'if (VAL)' */ | |
507 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison) | |
508 { | |
509 *code_p = NE_EXPR; | |
510 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); | |
511 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node); | |
512 } | |
513 } | |
514 | |
515 | |
516 /* Build a GIMPLE_COND statement from the conditional expression tree | |
517 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */ | |
518 | |
519 gimple | |
520 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label) | |
521 { | |
522 enum tree_code code; | |
523 tree lhs, rhs; | |
524 | |
525 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs); | |
526 return gimple_build_cond (code, lhs, rhs, t_label, f_label); | |
527 } | |
528 | |
529 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable | |
530 boolean expression tree COND. */ | |
531 | |
532 void | |
533 gimple_cond_set_condition_from_tree (gimple stmt, tree cond) | |
534 { | |
535 enum tree_code code; | |
536 tree lhs, rhs; | |
537 | |
538 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs); | |
539 gimple_cond_set_condition (stmt, code, lhs, rhs); | |
540 } | |
541 | |
542 /* Build a GIMPLE_LABEL statement for LABEL. */ | |
543 | |
544 gimple | |
545 gimple_build_label (tree label) | |
546 { | |
547 gimple p = gimple_build_with_ops (GIMPLE_LABEL, 0, 1); | |
548 gimple_label_set_label (p, label); | |
549 return p; | |
550 } | |
551 | |
552 /* Build a GIMPLE_GOTO statement to label DEST. */ | |
553 | |
554 gimple | |
555 gimple_build_goto (tree dest) | |
556 { | |
557 gimple p = gimple_build_with_ops (GIMPLE_GOTO, 0, 1); | |
558 gimple_goto_set_dest (p, dest); | |
559 return p; | |
560 } | |
561 | |
562 | |
563 /* Build a GIMPLE_NOP statement. */ | |
564 | |
565 gimple | |
566 gimple_build_nop (void) | |
567 { | |
568 return gimple_alloc (GIMPLE_NOP, 0); | |
569 } | |
570 | |
571 | |
572 /* Build a GIMPLE_BIND statement. | |
573 VARS are the variables in BODY. | |
574 BLOCK is the containing block. */ | |
575 | |
576 gimple | |
577 gimple_build_bind (tree vars, gimple_seq body, tree block) | |
578 { | |
579 gimple p = gimple_alloc (GIMPLE_BIND, 0); | |
580 gimple_bind_set_vars (p, vars); | |
581 if (body) | |
582 gimple_bind_set_body (p, body); | |
583 if (block) | |
584 gimple_bind_set_block (p, block); | |
585 return p; | |
586 } | |
587 | |
588 /* Helper function to set the simple fields of a asm stmt. | |
589 | |
590 STRING is a pointer to a string that is the asm blocks assembly code. | |
591 NINPUT is the number of register inputs. | |
592 NOUTPUT is the number of register outputs. | |
593 NCLOBBERS is the number of clobbered registers. | |
594 */ | |
595 | |
596 static inline gimple | |
597 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs, | |
598 unsigned nclobbers) | |
599 { | |
600 gimple p; | |
601 int size = strlen (string); | |
602 | |
603 p = gimple_build_with_ops (GIMPLE_ASM, 0, ninputs + noutputs + nclobbers); | |
604 | |
605 p->gimple_asm.ni = ninputs; | |
606 p->gimple_asm.no = noutputs; | |
607 p->gimple_asm.nc = nclobbers; | |
608 p->gimple_asm.string = ggc_alloc_string (string, size); | |
609 | |
610 #ifdef GATHER_STATISTICS | |
611 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size; | |
612 #endif | |
613 | |
614 return p; | |
615 } | |
616 | |
617 /* Build a GIMPLE_ASM statement. | |
618 | |
619 STRING is the assembly code. | |
620 NINPUT is the number of register inputs. | |
621 NOUTPUT is the number of register outputs. | |
622 NCLOBBERS is the number of clobbered registers. | |
623 INPUTS is a vector of the input register parameters. | |
624 OUTPUTS is a vector of the output register parameters. | |
625 CLOBBERS is a vector of the clobbered register parameters. */ | |
626 | |
627 gimple | |
628 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs, | |
629 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers) | |
630 { | |
631 gimple p; | |
632 unsigned i; | |
633 | |
634 p = gimple_build_asm_1 (string, | |
635 VEC_length (tree, inputs), | |
636 VEC_length (tree, outputs), | |
637 VEC_length (tree, clobbers)); | |
638 | |
639 for (i = 0; i < VEC_length (tree, inputs); i++) | |
640 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i)); | |
641 | |
642 for (i = 0; i < VEC_length (tree, outputs); i++) | |
643 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i)); | |
644 | |
645 for (i = 0; i < VEC_length (tree, clobbers); i++) | |
646 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i)); | |
647 | |
648 return p; | |
649 } | |
650 | |
651 /* Build a GIMPLE_ASM statement. | |
652 | |
653 STRING is the assembly code. | |
654 NINPUT is the number of register inputs. | |
655 NOUTPUT is the number of register outputs. | |
656 NCLOBBERS is the number of clobbered registers. | |
657 ... are trees for each input, output and clobbered register. */ | |
658 | |
659 gimple | |
660 gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs, | |
661 unsigned nclobbers, ...) | |
662 { | |
663 gimple p; | |
664 unsigned i; | |
665 va_list ap; | |
666 | |
667 p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers); | |
668 | |
669 va_start (ap, nclobbers); | |
670 | |
671 for (i = 0; i < ninputs; i++) | |
672 gimple_asm_set_input_op (p, i, va_arg (ap, tree)); | |
673 | |
674 for (i = 0; i < noutputs; i++) | |
675 gimple_asm_set_output_op (p, i, va_arg (ap, tree)); | |
676 | |
677 for (i = 0; i < nclobbers; i++) | |
678 gimple_asm_set_clobber_op (p, i, va_arg (ap, tree)); | |
679 | |
680 va_end (ap); | |
681 | |
682 return p; | |
683 } | |
684 | |
685 /* Build a GIMPLE_CATCH statement. | |
686 | |
687 TYPES are the catch types. | |
688 HANDLER is the exception handler. */ | |
689 | |
690 gimple | |
691 gimple_build_catch (tree types, gimple_seq handler) | |
692 { | |
693 gimple p = gimple_alloc (GIMPLE_CATCH, 0); | |
694 gimple_catch_set_types (p, types); | |
695 if (handler) | |
696 gimple_catch_set_handler (p, handler); | |
697 | |
698 return p; | |
699 } | |
700 | |
701 /* Build a GIMPLE_EH_FILTER statement. | |
702 | |
703 TYPES are the filter's types. | |
704 FAILURE is the filter's failure action. */ | |
705 | |
706 gimple | |
707 gimple_build_eh_filter (tree types, gimple_seq failure) | |
708 { | |
709 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0); | |
710 gimple_eh_filter_set_types (p, types); | |
711 if (failure) | |
712 gimple_eh_filter_set_failure (p, failure); | |
713 | |
714 return p; | |
715 } | |
716 | |
717 /* Build a GIMPLE_TRY statement. | |
718 | |
719 EVAL is the expression to evaluate. | |
720 CLEANUP is the cleanup expression. | |
721 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on | |
722 whether this is a try/catch or a try/finally respectively. */ | |
723 | |
724 gimple | |
725 gimple_build_try (gimple_seq eval, gimple_seq cleanup, | |
726 enum gimple_try_flags kind) | |
727 { | |
728 gimple p; | |
729 | |
730 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY); | |
731 p = gimple_alloc (GIMPLE_TRY, 0); | |
732 gimple_set_subcode (p, kind); | |
733 if (eval) | |
734 gimple_try_set_eval (p, eval); | |
735 if (cleanup) | |
736 gimple_try_set_cleanup (p, cleanup); | |
737 | |
738 return p; | |
739 } | |
740 | |
741 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement. | |
742 | |
743 CLEANUP is the cleanup expression. */ | |
744 | |
745 gimple | |
746 gimple_build_wce (gimple_seq cleanup) | |
747 { | |
748 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0); | |
749 if (cleanup) | |
750 gimple_wce_set_cleanup (p, cleanup); | |
751 | |
752 return p; | |
753 } | |
754 | |
755 | |
756 /* Build a GIMPLE_RESX statement. | |
757 | |
758 REGION is the region number from which this resx causes control flow to | |
759 leave. */ | |
760 | |
761 gimple | |
762 gimple_build_resx (int region) | |
763 { | |
764 gimple p = gimple_alloc (GIMPLE_RESX, 0); | |
765 gimple_resx_set_region (p, region); | |
766 return p; | |
767 } | |
768 | |
769 | |
770 /* The helper for constructing a gimple switch statement. | |
771 INDEX is the switch's index. | |
772 NLABELS is the number of labels in the switch excluding the default. | |
773 DEFAULT_LABEL is the default label for the switch statement. */ | |
774 | |
775 static inline gimple | |
776 gimple_build_switch_1 (unsigned nlabels, tree index, tree default_label) | |
777 { | |
778 /* nlabels + 1 default label + 1 index. */ | |
779 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, 0, nlabels + 1 + 1); | |
780 gimple_switch_set_index (p, index); | |
781 gimple_switch_set_default_label (p, default_label); | |
782 return p; | |
783 } | |
784 | |
785 | |
786 /* Build a GIMPLE_SWITCH statement. | |
787 | |
788 INDEX is the switch's index. | |
789 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL. | |
790 ... are the labels excluding the default. */ | |
791 | |
792 gimple | |
793 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...) | |
794 { | |
795 va_list al; | |
796 unsigned i; | |
797 gimple p; | |
798 | |
799 p = gimple_build_switch_1 (nlabels, index, default_label); | |
800 | |
801 /* Store the rest of the labels. */ | |
802 va_start (al, default_label); | |
803 for (i = 1; i <= nlabels; i++) | |
804 gimple_switch_set_label (p, i, va_arg (al, tree)); | |
805 va_end (al); | |
806 | |
807 return p; | |
808 } | |
809 | |
810 | |
811 /* Build a GIMPLE_SWITCH statement. | |
812 | |
813 INDEX is the switch's index. | |
814 DEFAULT_LABEL is the default label | |
815 ARGS is a vector of labels excluding the default. */ | |
816 | |
817 gimple | |
818 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args) | |
819 { | |
820 unsigned i; | |
821 unsigned nlabels = VEC_length (tree, args); | |
822 gimple p = gimple_build_switch_1 (nlabels, index, default_label); | |
823 | |
824 /* Put labels in labels[1 - (nlabels + 1)]. | |
825 Default label is in labels[0]. */ | |
826 for (i = 1; i <= nlabels; i++) | |
827 gimple_switch_set_label (p, i, VEC_index (tree, args, i - 1)); | |
828 | |
829 return p; | |
830 } | |
831 | |
832 | |
833 /* Build a GIMPLE_OMP_CRITICAL statement. | |
834 | |
835 BODY is the sequence of statements for which only one thread can execute. | |
836 NAME is optional identifier for this critical block. */ | |
837 | |
838 gimple | |
839 gimple_build_omp_critical (gimple_seq body, tree name) | |
840 { | |
841 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0); | |
842 gimple_omp_critical_set_name (p, name); | |
843 if (body) | |
844 gimple_omp_set_body (p, body); | |
845 | |
846 return p; | |
847 } | |
848 | |
849 /* Build a GIMPLE_OMP_FOR statement. | |
850 | |
851 BODY is sequence of statements inside the for loop. | |
852 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate, | |
853 lastprivate, reductions, ordered, schedule, and nowait. | |
854 COLLAPSE is the collapse count. | |
855 PRE_BODY is the sequence of statements that are loop invariant. */ | |
856 | |
857 gimple | |
858 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse, | |
859 gimple_seq pre_body) | |
860 { | |
861 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0); | |
862 if (body) | |
863 gimple_omp_set_body (p, body); | |
864 gimple_omp_for_set_clauses (p, clauses); | |
865 p->gimple_omp_for.collapse = collapse; | |
866 p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse); | |
867 if (pre_body) | |
868 gimple_omp_for_set_pre_body (p, pre_body); | |
869 | |
870 return p; | |
871 } | |
872 | |
873 | |
874 /* Build a GIMPLE_OMP_PARALLEL statement. | |
875 | |
876 BODY is sequence of statements which are executed in parallel. | |
877 CLAUSES, are the OMP parallel construct's clauses. | |
878 CHILD_FN is the function created for the parallel threads to execute. | |
879 DATA_ARG are the shared data argument(s). */ | |
880 | |
881 gimple | |
882 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn, | |
883 tree data_arg) | |
884 { | |
885 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0); | |
886 if (body) | |
887 gimple_omp_set_body (p, body); | |
888 gimple_omp_parallel_set_clauses (p, clauses); | |
889 gimple_omp_parallel_set_child_fn (p, child_fn); | |
890 gimple_omp_parallel_set_data_arg (p, data_arg); | |
891 | |
892 return p; | |
893 } | |
894 | |
895 | |
896 /* Build a GIMPLE_OMP_TASK statement. | |
897 | |
898 BODY is sequence of statements which are executed by the explicit task. | |
899 CLAUSES, are the OMP parallel construct's clauses. | |
900 CHILD_FN is the function created for the parallel threads to execute. | |
901 DATA_ARG are the shared data argument(s). | |
902 COPY_FN is the optional function for firstprivate initialization. | |
903 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */ | |
904 | |
905 gimple | |
906 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn, | |
907 tree data_arg, tree copy_fn, tree arg_size, | |
908 tree arg_align) | |
909 { | |
910 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0); | |
911 if (body) | |
912 gimple_omp_set_body (p, body); | |
913 gimple_omp_task_set_clauses (p, clauses); | |
914 gimple_omp_task_set_child_fn (p, child_fn); | |
915 gimple_omp_task_set_data_arg (p, data_arg); | |
916 gimple_omp_task_set_copy_fn (p, copy_fn); | |
917 gimple_omp_task_set_arg_size (p, arg_size); | |
918 gimple_omp_task_set_arg_align (p, arg_align); | |
919 | |
920 return p; | |
921 } | |
922 | |
923 | |
924 /* Build a GIMPLE_OMP_SECTION statement for a sections statement. | |
925 | |
926 BODY is the sequence of statements in the section. */ | |
927 | |
928 gimple | |
929 gimple_build_omp_section (gimple_seq body) | |
930 { | |
931 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0); | |
932 if (body) | |
933 gimple_omp_set_body (p, body); | |
934 | |
935 return p; | |
936 } | |
937 | |
938 | |
939 /* Build a GIMPLE_OMP_MASTER statement. | |
940 | |
941 BODY is the sequence of statements to be executed by just the master. */ | |
942 | |
943 gimple | |
944 gimple_build_omp_master (gimple_seq body) | |
945 { | |
946 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0); | |
947 if (body) | |
948 gimple_omp_set_body (p, body); | |
949 | |
950 return p; | |
951 } | |
952 | |
953 | |
954 /* Build a GIMPLE_OMP_CONTINUE statement. | |
955 | |
956 CONTROL_DEF is the definition of the control variable. | |
957 CONTROL_USE is the use of the control variable. */ | |
958 | |
959 gimple | |
960 gimple_build_omp_continue (tree control_def, tree control_use) | |
961 { | |
962 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0); | |
963 gimple_omp_continue_set_control_def (p, control_def); | |
964 gimple_omp_continue_set_control_use (p, control_use); | |
965 return p; | |
966 } | |
967 | |
968 /* Build a GIMPLE_OMP_ORDERED statement. | |
969 | |
970 BODY is the sequence of statements inside a loop that will executed in | |
971 sequence. */ | |
972 | |
973 gimple | |
974 gimple_build_omp_ordered (gimple_seq body) | |
975 { | |
976 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0); | |
977 if (body) | |
978 gimple_omp_set_body (p, body); | |
979 | |
980 return p; | |
981 } | |
982 | |
983 | |
984 /* Build a GIMPLE_OMP_RETURN statement. | |
985 WAIT_P is true if this is a non-waiting return. */ | |
986 | |
987 gimple | |
988 gimple_build_omp_return (bool wait_p) | |
989 { | |
990 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0); | |
991 if (wait_p) | |
992 gimple_omp_return_set_nowait (p); | |
993 | |
994 return p; | |
995 } | |
996 | |
997 | |
998 /* Build a GIMPLE_OMP_SECTIONS statement. | |
999 | |
1000 BODY is a sequence of section statements. | |
1001 CLAUSES are any of the OMP sections contsruct's clauses: private, | |
1002 firstprivate, lastprivate, reduction, and nowait. */ | |
1003 | |
1004 gimple | |
1005 gimple_build_omp_sections (gimple_seq body, tree clauses) | |
1006 { | |
1007 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0); | |
1008 if (body) | |
1009 gimple_omp_set_body (p, body); | |
1010 gimple_omp_sections_set_clauses (p, clauses); | |
1011 | |
1012 return p; | |
1013 } | |
1014 | |
1015 | |
1016 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */ | |
1017 | |
1018 gimple | |
1019 gimple_build_omp_sections_switch (void) | |
1020 { | |
1021 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0); | |
1022 } | |
1023 | |
1024 | |
1025 /* Build a GIMPLE_OMP_SINGLE statement. | |
1026 | |
1027 BODY is the sequence of statements that will be executed once. | |
1028 CLAUSES are any of the OMP single construct's clauses: private, firstprivate, | |
1029 copyprivate, nowait. */ | |
1030 | |
1031 gimple | |
1032 gimple_build_omp_single (gimple_seq body, tree clauses) | |
1033 { | |
1034 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0); | |
1035 if (body) | |
1036 gimple_omp_set_body (p, body); | |
1037 gimple_omp_single_set_clauses (p, clauses); | |
1038 | |
1039 return p; | |
1040 } | |
1041 | |
1042 | |
1043 /* Build a GIMPLE_CHANGE_DYNAMIC_TYPE statement. TYPE is the new type | |
1044 for the location PTR. */ | |
1045 | |
1046 gimple | |
1047 gimple_build_cdt (tree type, tree ptr) | |
1048 { | |
1049 gimple p = gimple_build_with_ops (GIMPLE_CHANGE_DYNAMIC_TYPE, 0, 2); | |
1050 gimple_cdt_set_new_type (p, type); | |
1051 gimple_cdt_set_location (p, ptr); | |
1052 | |
1053 return p; | |
1054 } | |
1055 | |
1056 | |
1057 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */ | |
1058 | |
1059 gimple | |
1060 gimple_build_omp_atomic_load (tree lhs, tree rhs) | |
1061 { | |
1062 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0); | |
1063 gimple_omp_atomic_load_set_lhs (p, lhs); | |
1064 gimple_omp_atomic_load_set_rhs (p, rhs); | |
1065 return p; | |
1066 } | |
1067 | |
1068 /* Build a GIMPLE_OMP_ATOMIC_STORE statement. | |
1069 | |
1070 VAL is the value we are storing. */ | |
1071 | |
1072 gimple | |
1073 gimple_build_omp_atomic_store (tree val) | |
1074 { | |
1075 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0); | |
1076 gimple_omp_atomic_store_set_val (p, val); | |
1077 return p; | |
1078 } | |
1079 | |
1080 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from | |
1081 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */ | |
1082 | |
1083 gimple | |
1084 gimple_build_predict (enum br_predictor predictor, enum prediction outcome) | |
1085 { | |
1086 gimple p = gimple_alloc (GIMPLE_PREDICT, 0); | |
1087 /* Ensure all the predictors fit into the lower bits of the subcode. */ | |
1088 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN); | |
1089 gimple_predict_set_predictor (p, predictor); | |
1090 gimple_predict_set_outcome (p, outcome); | |
1091 return p; | |
1092 } | |
1093 | |
1094 /* Return which gimple structure is used by T. The enums here are defined | |
1095 in gsstruct.def. */ | |
1096 | |
1097 enum gimple_statement_structure_enum | |
1098 gimple_statement_structure (gimple gs) | |
1099 { | |
1100 return gss_for_code (gimple_code (gs)); | |
1101 } | |
1102 | |
1103 #if defined ENABLE_GIMPLE_CHECKING | |
1104 /* Complain of a gimple type mismatch and die. */ | |
1105 | |
1106 void | |
1107 gimple_check_failed (const_gimple gs, const char *file, int line, | |
1108 const char *function, enum gimple_code code, | |
1109 enum tree_code subcode) | |
1110 { | |
1111 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d", | |
1112 gimple_code_name[code], | |
1113 tree_code_name[subcode], | |
1114 gimple_code_name[gimple_code (gs)], | |
1115 gs->gsbase.subcode > 0 | |
1116 ? tree_code_name[gs->gsbase.subcode] | |
1117 : "", | |
1118 function, trim_filename (file), line); | |
1119 } | |
1120 #endif /* ENABLE_GIMPLE_CHECKING */ | |
1121 | |
1122 | |
1123 /* Allocate a new GIMPLE sequence in GC memory and return it. If | |
1124 there are free sequences in GIMPLE_SEQ_CACHE return one of those | |
1125 instead. */ | |
1126 | |
1127 gimple_seq | |
1128 gimple_seq_alloc (void) | |
1129 { | |
1130 gimple_seq seq = gimple_seq_cache; | |
1131 if (seq) | |
1132 { | |
1133 gimple_seq_cache = gimple_seq_cache->next_free; | |
1134 gcc_assert (gimple_seq_cache != seq); | |
1135 memset (seq, 0, sizeof (*seq)); | |
1136 } | |
1137 else | |
1138 { | |
1139 seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq)); | |
1140 #ifdef GATHER_STATISTICS | |
1141 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++; | |
1142 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq); | |
1143 #endif | |
1144 } | |
1145 | |
1146 return seq; | |
1147 } | |
1148 | |
1149 /* Return SEQ to the free pool of GIMPLE sequences. */ | |
1150 | |
1151 void | |
1152 gimple_seq_free (gimple_seq seq) | |
1153 { | |
1154 if (seq == NULL) | |
1155 return; | |
1156 | |
1157 gcc_assert (gimple_seq_first (seq) == NULL); | |
1158 gcc_assert (gimple_seq_last (seq) == NULL); | |
1159 | |
1160 /* If this triggers, it's a sign that the same list is being freed | |
1161 twice. */ | |
1162 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL); | |
1163 | |
1164 /* Add SEQ to the pool of free sequences. */ | |
1165 seq->next_free = gimple_seq_cache; | |
1166 gimple_seq_cache = seq; | |
1167 } | |
1168 | |
1169 | |
1170 /* Link gimple statement GS to the end of the sequence *SEQ_P. If | |
1171 *SEQ_P is NULL, a new sequence is allocated. */ | |
1172 | |
1173 void | |
1174 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs) | |
1175 { | |
1176 gimple_stmt_iterator si; | |
1177 | |
1178 if (gs == NULL) | |
1179 return; | |
1180 | |
1181 if (*seq_p == NULL) | |
1182 *seq_p = gimple_seq_alloc (); | |
1183 | |
1184 si = gsi_last (*seq_p); | |
1185 gsi_insert_after (&si, gs, GSI_NEW_STMT); | |
1186 } | |
1187 | |
1188 | |
1189 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is | |
1190 NULL, a new sequence is allocated. */ | |
1191 | |
1192 void | |
1193 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src) | |
1194 { | |
1195 gimple_stmt_iterator si; | |
1196 | |
1197 if (src == NULL) | |
1198 return; | |
1199 | |
1200 if (*dst_p == NULL) | |
1201 *dst_p = gimple_seq_alloc (); | |
1202 | |
1203 si = gsi_last (*dst_p); | |
1204 gsi_insert_seq_after (&si, src, GSI_NEW_STMT); | |
1205 } | |
1206 | |
1207 | |
1208 /* Helper function of empty_body_p. Return true if STMT is an empty | |
1209 statement. */ | |
1210 | |
1211 static bool | |
1212 empty_stmt_p (gimple stmt) | |
1213 { | |
1214 if (gimple_code (stmt) == GIMPLE_NOP) | |
1215 return true; | |
1216 if (gimple_code (stmt) == GIMPLE_BIND) | |
1217 return empty_body_p (gimple_bind_body (stmt)); | |
1218 return false; | |
1219 } | |
1220 | |
1221 | |
1222 /* Return true if BODY contains nothing but empty statements. */ | |
1223 | |
1224 bool | |
1225 empty_body_p (gimple_seq body) | |
1226 { | |
1227 gimple_stmt_iterator i; | |
1228 | |
1229 | |
1230 if (gimple_seq_empty_p (body)) | |
1231 return true; | |
1232 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i)) | |
1233 if (!empty_stmt_p (gsi_stmt (i))) | |
1234 return false; | |
1235 | |
1236 return true; | |
1237 } | |
1238 | |
1239 | |
1240 /* Perform a deep copy of sequence SRC and return the result. */ | |
1241 | |
1242 gimple_seq | |
1243 gimple_seq_copy (gimple_seq src) | |
1244 { | |
1245 gimple_stmt_iterator gsi; | |
1246 gimple_seq new_seq = gimple_seq_alloc (); | |
1247 gimple stmt; | |
1248 | |
1249 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1250 { | |
1251 stmt = gimple_copy (gsi_stmt (gsi)); | |
1252 gimple_seq_add_stmt (&new_seq, stmt); | |
1253 } | |
1254 | |
1255 return new_seq; | |
1256 } | |
1257 | |
1258 | |
1259 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt | |
1260 on each one. WI is as in walk_gimple_stmt. | |
1261 | |
1262 If walk_gimple_stmt returns non-NULL, the walk is stopped, the | |
1263 value is stored in WI->CALLBACK_RESULT and the statement that | |
1264 produced the value is returned. | |
1265 | |
1266 Otherwise, all the statements are walked and NULL returned. */ | |
1267 | |
1268 gimple | |
1269 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt, | |
1270 walk_tree_fn callback_op, struct walk_stmt_info *wi) | |
1271 { | |
1272 gimple_stmt_iterator gsi; | |
1273 | |
1274 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1275 { | |
1276 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi); | |
1277 if (ret) | |
1278 { | |
1279 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist | |
1280 to hold it. */ | |
1281 gcc_assert (wi); | |
1282 wi->callback_result = ret; | |
1283 return gsi_stmt (gsi); | |
1284 } | |
1285 } | |
1286 | |
1287 if (wi) | |
1288 wi->callback_result = NULL_TREE; | |
1289 | |
1290 return NULL; | |
1291 } | |
1292 | |
1293 | |
1294 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */ | |
1295 | |
1296 static tree | |
1297 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op, | |
1298 struct walk_stmt_info *wi) | |
1299 { | |
1300 tree ret; | |
1301 unsigned noutputs; | |
1302 const char **oconstraints; | |
1303 unsigned i; | |
1304 const char *constraint; | |
1305 bool allows_mem, allows_reg, is_inout; | |
1306 | |
1307 noutputs = gimple_asm_noutputs (stmt); | |
1308 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
1309 | |
1310 if (wi) | |
1311 wi->is_lhs = true; | |
1312 | |
1313 for (i = 0; i < noutputs; i++) | |
1314 { | |
1315 tree op = gimple_asm_output_op (stmt, i); | |
1316 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
1317 oconstraints[i] = constraint; | |
1318 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg, | |
1319 &is_inout); | |
1320 if (wi) | |
1321 wi->val_only = (allows_reg || !allows_mem); | |
1322 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | |
1323 if (ret) | |
1324 return ret; | |
1325 } | |
1326 | |
1327 for (i = 0; i < gimple_asm_ninputs (stmt); i++) | |
1328 { | |
1329 tree op = gimple_asm_input_op (stmt, i); | |
1330 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
1331 parse_input_constraint (&constraint, 0, 0, noutputs, 0, | |
1332 oconstraints, &allows_mem, &allows_reg); | |
1333 if (wi) | |
1334 wi->val_only = (allows_reg || !allows_mem); | |
1335 | |
1336 /* Although input "m" is not really a LHS, we need a lvalue. */ | |
1337 if (wi) | |
1338 wi->is_lhs = !wi->val_only; | |
1339 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | |
1340 if (ret) | |
1341 return ret; | |
1342 } | |
1343 | |
1344 if (wi) | |
1345 { | |
1346 wi->is_lhs = false; | |
1347 wi->val_only = true; | |
1348 } | |
1349 | |
1350 return NULL_TREE; | |
1351 } | |
1352 | |
1353 | |
1354 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in | |
1355 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT. | |
1356 | |
1357 CALLBACK_OP is called on each operand of STMT via walk_tree. | |
1358 Additional parameters to walk_tree must be stored in WI. For each operand | |
1359 OP, walk_tree is called as: | |
1360 | |
1361 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET) | |
1362 | |
1363 If CALLBACK_OP returns non-NULL for an operand, the remaining | |
1364 operands are not scanned. | |
1365 | |
1366 The return value is that returned by the last call to walk_tree, or | |
1367 NULL_TREE if no CALLBACK_OP is specified. */ | |
1368 | |
1369 inline tree | |
1370 walk_gimple_op (gimple stmt, walk_tree_fn callback_op, | |
1371 struct walk_stmt_info *wi) | |
1372 { | |
1373 struct pointer_set_t *pset = (wi) ? wi->pset : NULL; | |
1374 unsigned i; | |
1375 tree ret = NULL_TREE; | |
1376 | |
1377 switch (gimple_code (stmt)) | |
1378 { | |
1379 case GIMPLE_ASSIGN: | |
1380 /* Walk the RHS operands. A formal temporary LHS may use a | |
1381 COMPONENT_REF RHS. */ | |
1382 if (wi) | |
1383 wi->val_only = !is_gimple_formal_tmp_var (gimple_assign_lhs (stmt)); | |
1384 | |
1385 for (i = 1; i < gimple_num_ops (stmt); i++) | |
1386 { | |
1387 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, | |
1388 pset); | |
1389 if (ret) | |
1390 return ret; | |
1391 } | |
1392 | |
1393 /* Walk the LHS. If the RHS is appropriate for a memory, we | |
1394 may use a COMPONENT_REF on the LHS. */ | |
1395 if (wi) | |
1396 { | |
1397 /* If the RHS has more than 1 operand, it is not appropriate | |
1398 for the memory. */ | |
1399 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt)) | |
1400 || !gimple_assign_single_p (stmt); | |
1401 wi->is_lhs = true; | |
1402 } | |
1403 | |
1404 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset); | |
1405 if (ret) | |
1406 return ret; | |
1407 | |
1408 if (wi) | |
1409 { | |
1410 wi->val_only = true; | |
1411 wi->is_lhs = false; | |
1412 } | |
1413 break; | |
1414 | |
1415 case GIMPLE_CALL: | |
1416 if (wi) | |
1417 wi->is_lhs = false; | |
1418 | |
1419 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset); | |
1420 if (ret) | |
1421 return ret; | |
1422 | |
1423 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset); | |
1424 if (ret) | |
1425 return ret; | |
1426 | |
1427 for (i = 0; i < gimple_call_num_args (stmt); i++) | |
1428 { | |
1429 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi, | |
1430 pset); | |
1431 if (ret) | |
1432 return ret; | |
1433 } | |
1434 | |
1435 if (wi) | |
1436 wi->is_lhs = true; | |
1437 | |
1438 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset); | |
1439 if (ret) | |
1440 return ret; | |
1441 | |
1442 if (wi) | |
1443 wi->is_lhs = false; | |
1444 break; | |
1445 | |
1446 case GIMPLE_CATCH: | |
1447 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi, | |
1448 pset); | |
1449 if (ret) | |
1450 return ret; | |
1451 break; | |
1452 | |
1453 case GIMPLE_EH_FILTER: | |
1454 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi, | |
1455 pset); | |
1456 if (ret) | |
1457 return ret; | |
1458 break; | |
1459 | |
1460 case GIMPLE_CHANGE_DYNAMIC_TYPE: | |
1461 ret = walk_tree (gimple_cdt_location_ptr (stmt), callback_op, wi, pset); | |
1462 if (ret) | |
1463 return ret; | |
1464 | |
1465 ret = walk_tree (gimple_cdt_new_type_ptr (stmt), callback_op, wi, pset); | |
1466 if (ret) | |
1467 return ret; | |
1468 break; | |
1469 | |
1470 case GIMPLE_ASM: | |
1471 ret = walk_gimple_asm (stmt, callback_op, wi); | |
1472 if (ret) | |
1473 return ret; | |
1474 break; | |
1475 | |
1476 case GIMPLE_OMP_CONTINUE: | |
1477 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt), | |
1478 callback_op, wi, pset); | |
1479 if (ret) | |
1480 return ret; | |
1481 | |
1482 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt), | |
1483 callback_op, wi, pset); | |
1484 if (ret) | |
1485 return ret; | |
1486 break; | |
1487 | |
1488 case GIMPLE_OMP_CRITICAL: | |
1489 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi, | |
1490 pset); | |
1491 if (ret) | |
1492 return ret; | |
1493 break; | |
1494 | |
1495 case GIMPLE_OMP_FOR: | |
1496 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi, | |
1497 pset); | |
1498 if (ret) | |
1499 return ret; | |
1500 for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
1501 { | |
1502 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op, | |
1503 wi, pset); | |
1504 if (ret) | |
1505 return ret; | |
1506 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op, | |
1507 wi, pset); | |
1508 if (ret) | |
1509 return ret; | |
1510 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op, | |
1511 wi, pset); | |
1512 if (ret) | |
1513 return ret; | |
1514 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op, | |
1515 wi, pset); | |
1516 } | |
1517 if (ret) | |
1518 return ret; | |
1519 break; | |
1520 | |
1521 case GIMPLE_OMP_PARALLEL: | |
1522 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op, | |
1523 wi, pset); | |
1524 if (ret) | |
1525 return ret; | |
1526 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op, | |
1527 wi, pset); | |
1528 if (ret) | |
1529 return ret; | |
1530 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op, | |
1531 wi, pset); | |
1532 if (ret) | |
1533 return ret; | |
1534 break; | |
1535 | |
1536 case GIMPLE_OMP_TASK: | |
1537 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op, | |
1538 wi, pset); | |
1539 if (ret) | |
1540 return ret; | |
1541 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op, | |
1542 wi, pset); | |
1543 if (ret) | |
1544 return ret; | |
1545 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op, | |
1546 wi, pset); | |
1547 if (ret) | |
1548 return ret; | |
1549 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op, | |
1550 wi, pset); | |
1551 if (ret) | |
1552 return ret; | |
1553 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op, | |
1554 wi, pset); | |
1555 if (ret) | |
1556 return ret; | |
1557 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op, | |
1558 wi, pset); | |
1559 if (ret) | |
1560 return ret; | |
1561 break; | |
1562 | |
1563 case GIMPLE_OMP_SECTIONS: | |
1564 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op, | |
1565 wi, pset); | |
1566 if (ret) | |
1567 return ret; | |
1568 | |
1569 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op, | |
1570 wi, pset); | |
1571 if (ret) | |
1572 return ret; | |
1573 | |
1574 break; | |
1575 | |
1576 case GIMPLE_OMP_SINGLE: | |
1577 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi, | |
1578 pset); | |
1579 if (ret) | |
1580 return ret; | |
1581 break; | |
1582 | |
1583 case GIMPLE_OMP_ATOMIC_LOAD: | |
1584 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi, | |
1585 pset); | |
1586 if (ret) | |
1587 return ret; | |
1588 | |
1589 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi, | |
1590 pset); | |
1591 if (ret) | |
1592 return ret; | |
1593 break; | |
1594 | |
1595 case GIMPLE_OMP_ATOMIC_STORE: | |
1596 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op, | |
1597 wi, pset); | |
1598 if (ret) | |
1599 return ret; | |
1600 break; | |
1601 | |
1602 /* Tuples that do not have operands. */ | |
1603 case GIMPLE_NOP: | |
1604 case GIMPLE_RESX: | |
1605 case GIMPLE_OMP_RETURN: | |
1606 case GIMPLE_PREDICT: | |
1607 break; | |
1608 | |
1609 default: | |
1610 { | |
1611 enum gimple_statement_structure_enum gss; | |
1612 gss = gimple_statement_structure (stmt); | |
1613 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS) | |
1614 for (i = 0; i < gimple_num_ops (stmt); i++) | |
1615 { | |
1616 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset); | |
1617 if (ret) | |
1618 return ret; | |
1619 } | |
1620 } | |
1621 break; | |
1622 } | |
1623 | |
1624 return NULL_TREE; | |
1625 } | |
1626 | |
1627 | |
1628 /* Walk the current statement in GSI (optionally using traversal state | |
1629 stored in WI). If WI is NULL, no state is kept during traversal. | |
1630 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates | |
1631 that it has handled all the operands of the statement, its return | |
1632 value is returned. Otherwise, the return value from CALLBACK_STMT | |
1633 is discarded and its operands are scanned. | |
1634 | |
1635 If CALLBACK_STMT is NULL or it didn't handle the operands, | |
1636 CALLBACK_OP is called on each operand of the statement via | |
1637 walk_gimple_op. If walk_gimple_op returns non-NULL for any | |
1638 operand, the remaining operands are not scanned. In this case, the | |
1639 return value from CALLBACK_OP is returned. | |
1640 | |
1641 In any other case, NULL_TREE is returned. */ | |
1642 | |
1643 tree | |
1644 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt, | |
1645 walk_tree_fn callback_op, struct walk_stmt_info *wi) | |
1646 { | |
1647 gimple ret; | |
1648 tree tree_ret; | |
1649 gimple stmt = gsi_stmt (*gsi); | |
1650 | |
1651 if (wi) | |
1652 wi->gsi = *gsi; | |
1653 | |
1654 if (wi && wi->want_locations && gimple_has_location (stmt)) | |
1655 input_location = gimple_location (stmt); | |
1656 | |
1657 ret = NULL; | |
1658 | |
1659 /* Invoke the statement callback. Return if the callback handled | |
1660 all of STMT operands by itself. */ | |
1661 if (callback_stmt) | |
1662 { | |
1663 bool handled_ops = false; | |
1664 tree_ret = callback_stmt (gsi, &handled_ops, wi); | |
1665 if (handled_ops) | |
1666 return tree_ret; | |
1667 | |
1668 /* If CALLBACK_STMT did not handle operands, it should not have | |
1669 a value to return. */ | |
1670 gcc_assert (tree_ret == NULL); | |
1671 | |
1672 /* Re-read stmt in case the callback changed it. */ | |
1673 stmt = gsi_stmt (*gsi); | |
1674 } | |
1675 | |
1676 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */ | |
1677 if (callback_op) | |
1678 { | |
1679 tree_ret = walk_gimple_op (stmt, callback_op, wi); | |
1680 if (tree_ret) | |
1681 return tree_ret; | |
1682 } | |
1683 | |
1684 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */ | |
1685 switch (gimple_code (stmt)) | |
1686 { | |
1687 case GIMPLE_BIND: | |
1688 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt, | |
1689 callback_op, wi); | |
1690 if (ret) | |
1691 return wi->callback_result; | |
1692 break; | |
1693 | |
1694 case GIMPLE_CATCH: | |
1695 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt, | |
1696 callback_op, wi); | |
1697 if (ret) | |
1698 return wi->callback_result; | |
1699 break; | |
1700 | |
1701 case GIMPLE_EH_FILTER: | |
1702 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt, | |
1703 callback_op, wi); | |
1704 if (ret) | |
1705 return wi->callback_result; | |
1706 break; | |
1707 | |
1708 case GIMPLE_TRY: | |
1709 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op, | |
1710 wi); | |
1711 if (ret) | |
1712 return wi->callback_result; | |
1713 | |
1714 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt, | |
1715 callback_op, wi); | |
1716 if (ret) | |
1717 return wi->callback_result; | |
1718 break; | |
1719 | |
1720 case GIMPLE_OMP_FOR: | |
1721 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt, | |
1722 callback_op, wi); | |
1723 if (ret) | |
1724 return wi->callback_result; | |
1725 | |
1726 /* FALL THROUGH. */ | |
1727 case GIMPLE_OMP_CRITICAL: | |
1728 case GIMPLE_OMP_MASTER: | |
1729 case GIMPLE_OMP_ORDERED: | |
1730 case GIMPLE_OMP_SECTION: | |
1731 case GIMPLE_OMP_PARALLEL: | |
1732 case GIMPLE_OMP_TASK: | |
1733 case GIMPLE_OMP_SECTIONS: | |
1734 case GIMPLE_OMP_SINGLE: | |
1735 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op, | |
1736 wi); | |
1737 if (ret) | |
1738 return wi->callback_result; | |
1739 break; | |
1740 | |
1741 case GIMPLE_WITH_CLEANUP_EXPR: | |
1742 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt, | |
1743 callback_op, wi); | |
1744 if (ret) | |
1745 return wi->callback_result; | |
1746 break; | |
1747 | |
1748 default: | |
1749 gcc_assert (!gimple_has_substatements (stmt)); | |
1750 break; | |
1751 } | |
1752 | |
1753 return NULL; | |
1754 } | |
1755 | |
1756 | |
1757 /* Set sequence SEQ to be the GIMPLE body for function FN. */ | |
1758 | |
1759 void | |
1760 gimple_set_body (tree fndecl, gimple_seq seq) | |
1761 { | |
1762 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); | |
1763 if (fn == NULL) | |
1764 { | |
1765 /* If FNDECL still does not have a function structure associated | |
1766 with it, then it does not make sense for it to receive a | |
1767 GIMPLE body. */ | |
1768 gcc_assert (seq == NULL); | |
1769 } | |
1770 else | |
1771 fn->gimple_body = seq; | |
1772 } | |
1773 | |
1774 | |
1775 /* Return the body of GIMPLE statements for function FN. */ | |
1776 | |
1777 gimple_seq | |
1778 gimple_body (tree fndecl) | |
1779 { | |
1780 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); | |
1781 return fn ? fn->gimple_body : NULL; | |
1782 } | |
1783 | |
1784 /* Return true when FNDECL has Gimple body either in unlowered | |
1785 or CFG form. */ | |
1786 bool | |
1787 gimple_has_body_p (tree fndecl) | |
1788 { | |
1789 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); | |
1790 return (gimple_body (fndecl) || (fn && fn->cfg)); | |
1791 } | |
1792 | |
1793 /* Detect flags from a GIMPLE_CALL. This is just like | |
1794 call_expr_flags, but for gimple tuples. */ | |
1795 | |
1796 int | |
1797 gimple_call_flags (const_gimple stmt) | |
1798 { | |
1799 int flags; | |
1800 tree decl = gimple_call_fndecl (stmt); | |
1801 tree t; | |
1802 | |
1803 if (decl) | |
1804 flags = flags_from_decl_or_type (decl); | |
1805 else | |
1806 { | |
1807 t = TREE_TYPE (gimple_call_fn (stmt)); | |
1808 if (t && TREE_CODE (t) == POINTER_TYPE) | |
1809 flags = flags_from_decl_or_type (TREE_TYPE (t)); | |
1810 else | |
1811 flags = 0; | |
1812 } | |
1813 | |
1814 return flags; | |
1815 } | |
1816 | |
1817 | |
1818 /* Return true if GS is a copy assignment. */ | |
1819 | |
1820 bool | |
1821 gimple_assign_copy_p (gimple gs) | |
1822 { | |
1823 return gimple_code (gs) == GIMPLE_ASSIGN | |
1824 && get_gimple_rhs_class (gimple_assign_rhs_code (gs)) | |
1825 == GIMPLE_SINGLE_RHS | |
1826 && is_gimple_val (gimple_op (gs, 1)); | |
1827 } | |
1828 | |
1829 | |
1830 /* Return true if GS is a SSA_NAME copy assignment. */ | |
1831 | |
1832 bool | |
1833 gimple_assign_ssa_name_copy_p (gimple gs) | |
1834 { | |
1835 return (gimple_code (gs) == GIMPLE_ASSIGN | |
1836 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs)) | |
1837 == GIMPLE_SINGLE_RHS) | |
1838 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME | |
1839 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME); | |
1840 } | |
1841 | |
1842 | |
1843 /* Return true if GS is an assignment with a singleton RHS, i.e., | |
1844 there is no operator associated with the assignment itself. | |
1845 Unlike gimple_assign_copy_p, this predicate returns true for | |
1846 any RHS operand, including those that perform an operation | |
1847 and do not have the semantics of a copy, such as COND_EXPR. */ | |
1848 | |
1849 bool | |
1850 gimple_assign_single_p (gimple gs) | |
1851 { | |
1852 return (gimple_code (gs) == GIMPLE_ASSIGN | |
1853 && get_gimple_rhs_class (gimple_assign_rhs_code (gs)) | |
1854 == GIMPLE_SINGLE_RHS); | |
1855 } | |
1856 | |
1857 /* Return true if GS is an assignment with a unary RHS, but the | |
1858 operator has no effect on the assigned value. The logic is adapted | |
1859 from STRIP_NOPS. This predicate is intended to be used in tuplifying | |
1860 instances in which STRIP_NOPS was previously applied to the RHS of | |
1861 an assignment. | |
1862 | |
1863 NOTE: In the use cases that led to the creation of this function | |
1864 and of gimple_assign_single_p, it is typical to test for either | |
1865 condition and to proceed in the same manner. In each case, the | |
1866 assigned value is represented by the single RHS operand of the | |
1867 assignment. I suspect there may be cases where gimple_assign_copy_p, | |
1868 gimple_assign_single_p, or equivalent logic is used where a similar | |
1869 treatment of unary NOPs is appropriate. */ | |
1870 | |
1871 bool | |
1872 gimple_assign_unary_nop_p (gimple gs) | |
1873 { | |
1874 return (gimple_code (gs) == GIMPLE_ASSIGN | |
1875 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)) | |
1876 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR) | |
1877 && gimple_assign_rhs1 (gs) != error_mark_node | |
1878 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs))) | |
1879 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs))))); | |
1880 } | |
1881 | |
1882 /* Set BB to be the basic block holding G. */ | |
1883 | |
1884 void | |
1885 gimple_set_bb (gimple stmt, basic_block bb) | |
1886 { | |
1887 stmt->gsbase.bb = bb; | |
1888 | |
1889 /* If the statement is a label, add the label to block-to-labels map | |
1890 so that we can speed up edge creation for GIMPLE_GOTOs. */ | |
1891 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL) | |
1892 { | |
1893 tree t; | |
1894 int uid; | |
1895 | |
1896 t = gimple_label_label (stmt); | |
1897 uid = LABEL_DECL_UID (t); | |
1898 if (uid == -1) | |
1899 { | |
1900 unsigned old_len = VEC_length (basic_block, label_to_block_map); | |
1901 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++; | |
1902 if (old_len <= (unsigned) uid) | |
1903 { | |
1904 unsigned new_len = 3 * uid / 2; | |
1905 | |
1906 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map, | |
1907 new_len); | |
1908 } | |
1909 } | |
1910 | |
1911 VEC_replace (basic_block, label_to_block_map, uid, bb); | |
1912 } | |
1913 } | |
1914 | |
1915 | |
1916 /* Fold the expression computed by STMT. If the expression can be | |
1917 folded, return the folded result, otherwise return NULL. STMT is | |
1918 not modified. */ | |
1919 | |
1920 tree | |
1921 gimple_fold (const_gimple stmt) | |
1922 { | |
1923 switch (gimple_code (stmt)) | |
1924 { | |
1925 case GIMPLE_COND: | |
1926 return fold_binary (gimple_cond_code (stmt), | |
1927 boolean_type_node, | |
1928 gimple_cond_lhs (stmt), | |
1929 gimple_cond_rhs (stmt)); | |
1930 | |
1931 case GIMPLE_ASSIGN: | |
1932 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) | |
1933 { | |
1934 case GIMPLE_UNARY_RHS: | |
1935 return fold_unary (gimple_assign_rhs_code (stmt), | |
1936 TREE_TYPE (gimple_assign_lhs (stmt)), | |
1937 gimple_assign_rhs1 (stmt)); | |
1938 case GIMPLE_BINARY_RHS: | |
1939 return fold_binary (gimple_assign_rhs_code (stmt), | |
1940 TREE_TYPE (gimple_assign_lhs (stmt)), | |
1941 gimple_assign_rhs1 (stmt), | |
1942 gimple_assign_rhs2 (stmt)); | |
1943 case GIMPLE_SINGLE_RHS: | |
1944 return fold (gimple_assign_rhs1 (stmt)); | |
1945 default:; | |
1946 } | |
1947 break; | |
1948 | |
1949 case GIMPLE_SWITCH: | |
1950 return gimple_switch_index (stmt); | |
1951 | |
1952 case GIMPLE_CALL: | |
1953 return NULL_TREE; | |
1954 | |
1955 default: | |
1956 break; | |
1957 } | |
1958 | |
1959 gcc_unreachable (); | |
1960 } | |
1961 | |
1962 | |
1963 /* Modify the RHS of the assignment pointed-to by GSI using the | |
1964 operands in the expression tree EXPR. | |
1965 | |
1966 NOTE: The statement pointed-to by GSI may be reallocated if it | |
1967 did not have enough operand slots. | |
1968 | |
1969 This function is useful to convert an existing tree expression into | |
1970 the flat representation used for the RHS of a GIMPLE assignment. | |
1971 It will reallocate memory as needed to expand or shrink the number | |
1972 of operand slots needed to represent EXPR. | |
1973 | |
1974 NOTE: If you find yourself building a tree and then calling this | |
1975 function, you are most certainly doing it the slow way. It is much | |
1976 better to build a new assignment or to use the function | |
1977 gimple_assign_set_rhs_with_ops, which does not require an | |
1978 expression tree to be built. */ | |
1979 | |
1980 void | |
1981 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr) | |
1982 { | |
1983 enum tree_code subcode; | |
1984 tree op1, op2; | |
1985 | |
1986 extract_ops_from_tree (expr, &subcode, &op1, &op2); | |
1987 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2); | |
1988 } | |
1989 | |
1990 | |
1991 /* Set the RHS of assignment statement pointed-to by GSI to CODE with | |
1992 operands OP1 and OP2. | |
1993 | |
1994 NOTE: The statement pointed-to by GSI may be reallocated if it | |
1995 did not have enough operand slots. */ | |
1996 | |
1997 void | |
1998 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code, | |
1999 tree op1, tree op2) | |
2000 { | |
2001 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code); | |
2002 gimple stmt = gsi_stmt (*gsi); | |
2003 | |
2004 /* If the new CODE needs more operands, allocate a new statement. */ | |
2005 if (gimple_num_ops (stmt) < new_rhs_ops + 1) | |
2006 { | |
2007 tree lhs = gimple_assign_lhs (stmt); | |
2008 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1); | |
2009 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt))); | |
2010 gsi_replace (gsi, new_stmt, true); | |
2011 stmt = new_stmt; | |
2012 | |
2013 /* The LHS needs to be reset as this also changes the SSA name | |
2014 on the LHS. */ | |
2015 gimple_assign_set_lhs (stmt, lhs); | |
2016 } | |
2017 | |
2018 gimple_set_num_ops (stmt, new_rhs_ops + 1); | |
2019 gimple_set_subcode (stmt, code); | |
2020 gimple_assign_set_rhs1 (stmt, op1); | |
2021 if (new_rhs_ops > 1) | |
2022 gimple_assign_set_rhs2 (stmt, op2); | |
2023 } | |
2024 | |
2025 | |
2026 /* Return the LHS of a statement that performs an assignment, | |
2027 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE | |
2028 for a call to a function that returns no value, or for a | |
2029 statement other than an assignment or a call. */ | |
2030 | |
2031 tree | |
2032 gimple_get_lhs (const_gimple stmt) | |
2033 { | |
2034 enum gimple_code code = gimple_code (stmt); | |
2035 | |
2036 if (code == GIMPLE_ASSIGN) | |
2037 return gimple_assign_lhs (stmt); | |
2038 else if (code == GIMPLE_CALL) | |
2039 return gimple_call_lhs (stmt); | |
2040 else | |
2041 return NULL_TREE; | |
2042 } | |
2043 | |
2044 | |
2045 /* Set the LHS of a statement that performs an assignment, | |
2046 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | |
2047 | |
2048 void | |
2049 gimple_set_lhs (gimple stmt, tree lhs) | |
2050 { | |
2051 enum gimple_code code = gimple_code (stmt); | |
2052 | |
2053 if (code == GIMPLE_ASSIGN) | |
2054 gimple_assign_set_lhs (stmt, lhs); | |
2055 else if (code == GIMPLE_CALL) | |
2056 gimple_call_set_lhs (stmt, lhs); | |
2057 else | |
2058 gcc_unreachable(); | |
2059 } | |
2060 | |
2061 | |
2062 /* Return a deep copy of statement STMT. All the operands from STMT | |
2063 are reallocated and copied using unshare_expr. The DEF, USE, VDEF | |
2064 and VUSE operand arrays are set to empty in the new copy. */ | |
2065 | |
2066 gimple | |
2067 gimple_copy (gimple stmt) | |
2068 { | |
2069 enum gimple_code code = gimple_code (stmt); | |
2070 unsigned num_ops = gimple_num_ops (stmt); | |
2071 gimple copy = gimple_alloc (code, num_ops); | |
2072 unsigned i; | |
2073 | |
2074 /* Shallow copy all the fields from STMT. */ | |
2075 memcpy (copy, stmt, gimple_size (code)); | |
2076 | |
2077 /* If STMT has sub-statements, deep-copy them as well. */ | |
2078 if (gimple_has_substatements (stmt)) | |
2079 { | |
2080 gimple_seq new_seq; | |
2081 tree t; | |
2082 | |
2083 switch (gimple_code (stmt)) | |
2084 { | |
2085 case GIMPLE_BIND: | |
2086 new_seq = gimple_seq_copy (gimple_bind_body (stmt)); | |
2087 gimple_bind_set_body (copy, new_seq); | |
2088 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt))); | |
2089 gimple_bind_set_block (copy, gimple_bind_block (stmt)); | |
2090 break; | |
2091 | |
2092 case GIMPLE_CATCH: | |
2093 new_seq = gimple_seq_copy (gimple_catch_handler (stmt)); | |
2094 gimple_catch_set_handler (copy, new_seq); | |
2095 t = unshare_expr (gimple_catch_types (stmt)); | |
2096 gimple_catch_set_types (copy, t); | |
2097 break; | |
2098 | |
2099 case GIMPLE_EH_FILTER: | |
2100 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt)); | |
2101 gimple_eh_filter_set_failure (copy, new_seq); | |
2102 t = unshare_expr (gimple_eh_filter_types (stmt)); | |
2103 gimple_eh_filter_set_types (copy, t); | |
2104 break; | |
2105 | |
2106 case GIMPLE_TRY: | |
2107 new_seq = gimple_seq_copy (gimple_try_eval (stmt)); | |
2108 gimple_try_set_eval (copy, new_seq); | |
2109 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt)); | |
2110 gimple_try_set_cleanup (copy, new_seq); | |
2111 break; | |
2112 | |
2113 case GIMPLE_OMP_FOR: | |
2114 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt)); | |
2115 gimple_omp_for_set_pre_body (copy, new_seq); | |
2116 t = unshare_expr (gimple_omp_for_clauses (stmt)); | |
2117 gimple_omp_for_set_clauses (copy, t); | |
2118 copy->gimple_omp_for.iter | |
2119 = GGC_NEWVEC (struct gimple_omp_for_iter, | |
2120 gimple_omp_for_collapse (stmt)); | |
2121 for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
2122 { | |
2123 gimple_omp_for_set_cond (copy, i, | |
2124 gimple_omp_for_cond (stmt, i)); | |
2125 gimple_omp_for_set_index (copy, i, | |
2126 gimple_omp_for_index (stmt, i)); | |
2127 t = unshare_expr (gimple_omp_for_initial (stmt, i)); | |
2128 gimple_omp_for_set_initial (copy, i, t); | |
2129 t = unshare_expr (gimple_omp_for_final (stmt, i)); | |
2130 gimple_omp_for_set_final (copy, i, t); | |
2131 t = unshare_expr (gimple_omp_for_incr (stmt, i)); | |
2132 gimple_omp_for_set_incr (copy, i, t); | |
2133 } | |
2134 goto copy_omp_body; | |
2135 | |
2136 case GIMPLE_OMP_PARALLEL: | |
2137 t = unshare_expr (gimple_omp_parallel_clauses (stmt)); | |
2138 gimple_omp_parallel_set_clauses (copy, t); | |
2139 t = unshare_expr (gimple_omp_parallel_child_fn (stmt)); | |
2140 gimple_omp_parallel_set_child_fn (copy, t); | |
2141 t = unshare_expr (gimple_omp_parallel_data_arg (stmt)); | |
2142 gimple_omp_parallel_set_data_arg (copy, t); | |
2143 goto copy_omp_body; | |
2144 | |
2145 case GIMPLE_OMP_TASK: | |
2146 t = unshare_expr (gimple_omp_task_clauses (stmt)); | |
2147 gimple_omp_task_set_clauses (copy, t); | |
2148 t = unshare_expr (gimple_omp_task_child_fn (stmt)); | |
2149 gimple_omp_task_set_child_fn (copy, t); | |
2150 t = unshare_expr (gimple_omp_task_data_arg (stmt)); | |
2151 gimple_omp_task_set_data_arg (copy, t); | |
2152 t = unshare_expr (gimple_omp_task_copy_fn (stmt)); | |
2153 gimple_omp_task_set_copy_fn (copy, t); | |
2154 t = unshare_expr (gimple_omp_task_arg_size (stmt)); | |
2155 gimple_omp_task_set_arg_size (copy, t); | |
2156 t = unshare_expr (gimple_omp_task_arg_align (stmt)); | |
2157 gimple_omp_task_set_arg_align (copy, t); | |
2158 goto copy_omp_body; | |
2159 | |
2160 case GIMPLE_OMP_CRITICAL: | |
2161 t = unshare_expr (gimple_omp_critical_name (stmt)); | |
2162 gimple_omp_critical_set_name (copy, t); | |
2163 goto copy_omp_body; | |
2164 | |
2165 case GIMPLE_OMP_SECTIONS: | |
2166 t = unshare_expr (gimple_omp_sections_clauses (stmt)); | |
2167 gimple_omp_sections_set_clauses (copy, t); | |
2168 t = unshare_expr (gimple_omp_sections_control (stmt)); | |
2169 gimple_omp_sections_set_control (copy, t); | |
2170 /* FALLTHRU */ | |
2171 | |
2172 case GIMPLE_OMP_SINGLE: | |
2173 case GIMPLE_OMP_SECTION: | |
2174 case GIMPLE_OMP_MASTER: | |
2175 case GIMPLE_OMP_ORDERED: | |
2176 copy_omp_body: | |
2177 new_seq = gimple_seq_copy (gimple_omp_body (stmt)); | |
2178 gimple_omp_set_body (copy, new_seq); | |
2179 break; | |
2180 | |
2181 case GIMPLE_WITH_CLEANUP_EXPR: | |
2182 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt)); | |
2183 gimple_wce_set_cleanup (copy, new_seq); | |
2184 break; | |
2185 | |
2186 default: | |
2187 gcc_unreachable (); | |
2188 } | |
2189 } | |
2190 | |
2191 /* Make copy of operands. */ | |
2192 if (num_ops > 0) | |
2193 { | |
2194 for (i = 0; i < num_ops; i++) | |
2195 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i))); | |
2196 | |
2197 /* Clear out SSA operand vectors on COPY. Note that we cannot | |
2198 call the API functions for setting addresses_taken, stores | |
2199 and loads. These functions free the previous values, and we | |
2200 cannot do that on COPY as it will affect the original | |
2201 statement. */ | |
2202 if (gimple_has_ops (stmt)) | |
2203 { | |
2204 gimple_set_def_ops (copy, NULL); | |
2205 gimple_set_use_ops (copy, NULL); | |
2206 copy->gsops.opbase.addresses_taken = NULL; | |
2207 } | |
2208 | |
2209 if (gimple_has_mem_ops (stmt)) | |
2210 { | |
2211 gimple_set_vdef_ops (copy, NULL); | |
2212 gimple_set_vuse_ops (copy, NULL); | |
2213 copy->gsmem.membase.stores = NULL; | |
2214 copy->gsmem.membase.loads = NULL; | |
2215 } | |
2216 | |
2217 update_stmt (copy); | |
2218 } | |
2219 | |
2220 return copy; | |
2221 } | |
2222 | |
2223 | |
2224 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has | |
2225 a MODIFIED field. */ | |
2226 | |
2227 void | |
2228 gimple_set_modified (gimple s, bool modifiedp) | |
2229 { | |
2230 if (gimple_has_ops (s)) | |
2231 { | |
2232 s->gsbase.modified = (unsigned) modifiedp; | |
2233 | |
2234 if (modifiedp | |
2235 && cfun->gimple_df | |
2236 && is_gimple_call (s) | |
2237 && gimple_call_noreturn_p (s)) | |
2238 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s); | |
2239 } | |
2240 } | |
2241 | |
2242 | |
2243 /* Return true if statement S has side-effects. We consider a | |
2244 statement to have side effects if: | |
2245 | |
2246 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST. | |
2247 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */ | |
2248 | |
2249 bool | |
2250 gimple_has_side_effects (const_gimple s) | |
2251 { | |
2252 unsigned i; | |
2253 | |
2254 /* We don't have to scan the arguments to check for | |
2255 volatile arguments, though, at present, we still | |
2256 do a scan to check for TREE_SIDE_EFFECTS. */ | |
2257 if (gimple_has_volatile_ops (s)) | |
2258 return true; | |
2259 | |
2260 if (is_gimple_call (s)) | |
2261 { | |
2262 unsigned nargs = gimple_call_num_args (s); | |
2263 | |
2264 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE))) | |
2265 return true; | |
2266 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE) | |
2267 /* An infinite loop is considered a side effect. */ | |
2268 return true; | |
2269 | |
2270 if (gimple_call_lhs (s) | |
2271 && TREE_SIDE_EFFECTS (gimple_call_lhs (s))) | |
2272 { | |
2273 gcc_assert (gimple_has_volatile_ops (s)); | |
2274 return true; | |
2275 } | |
2276 | |
2277 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))) | |
2278 return true; | |
2279 | |
2280 for (i = 0; i < nargs; i++) | |
2281 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))) | |
2282 { | |
2283 gcc_assert (gimple_has_volatile_ops (s)); | |
2284 return true; | |
2285 } | |
2286 | |
2287 return false; | |
2288 } | |
2289 else | |
2290 { | |
2291 for (i = 0; i < gimple_num_ops (s); i++) | |
2292 if (TREE_SIDE_EFFECTS (gimple_op (s, i))) | |
2293 { | |
2294 gcc_assert (gimple_has_volatile_ops (s)); | |
2295 return true; | |
2296 } | |
2297 } | |
2298 | |
2299 return false; | |
2300 } | |
2301 | |
2302 /* Return true if the RHS of statement S has side effects. | |
2303 We may use it to determine if it is admissable to replace | |
2304 an assignment or call with a copy of a previously-computed | |
2305 value. In such cases, side-effects due the the LHS are | |
2306 preserved. */ | |
2307 | |
2308 bool | |
2309 gimple_rhs_has_side_effects (const_gimple s) | |
2310 { | |
2311 unsigned i; | |
2312 | |
2313 if (is_gimple_call (s)) | |
2314 { | |
2315 unsigned nargs = gimple_call_num_args (s); | |
2316 | |
2317 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE))) | |
2318 return true; | |
2319 | |
2320 /* We cannot use gimple_has_volatile_ops here, | |
2321 because we must ignore a volatile LHS. */ | |
2322 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)) | |
2323 || TREE_THIS_VOLATILE (gimple_call_fn (s))) | |
2324 { | |
2325 gcc_assert (gimple_has_volatile_ops (s)); | |
2326 return true; | |
2327 } | |
2328 | |
2329 for (i = 0; i < nargs; i++) | |
2330 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)) | |
2331 || TREE_THIS_VOLATILE (gimple_call_arg (s, i))) | |
2332 return true; | |
2333 | |
2334 return false; | |
2335 } | |
2336 else if (is_gimple_assign (s)) | |
2337 { | |
2338 /* Skip the first operand, the LHS. */ | |
2339 for (i = 1; i < gimple_num_ops (s); i++) | |
2340 if (TREE_SIDE_EFFECTS (gimple_op (s, i)) | |
2341 || TREE_THIS_VOLATILE (gimple_op (s, i))) | |
2342 { | |
2343 gcc_assert (gimple_has_volatile_ops (s)); | |
2344 return true; | |
2345 } | |
2346 } | |
2347 else | |
2348 { | |
2349 /* For statements without an LHS, examine all arguments. */ | |
2350 for (i = 0; i < gimple_num_ops (s); i++) | |
2351 if (TREE_SIDE_EFFECTS (gimple_op (s, i)) | |
2352 || TREE_THIS_VOLATILE (gimple_op (s, i))) | |
2353 { | |
2354 gcc_assert (gimple_has_volatile_ops (s)); | |
2355 return true; | |
2356 } | |
2357 } | |
2358 | |
2359 return false; | |
2360 } | |
2361 | |
2362 | |
2363 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p. | |
2364 Return true if S can trap. If INCLUDE_LHS is true and S is a | |
2365 GIMPLE_ASSIGN, the LHS of the assignment is also checked. | |
2366 Otherwise, only the RHS of the assignment is checked. */ | |
2367 | |
2368 static bool | |
2369 gimple_could_trap_p_1 (gimple s, bool include_lhs) | |
2370 { | |
2371 unsigned i, start; | |
2372 tree t, div = NULL_TREE; | |
2373 enum tree_code op; | |
2374 | |
2375 start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0; | |
2376 | |
2377 for (i = start; i < gimple_num_ops (s); i++) | |
2378 if (tree_could_trap_p (gimple_op (s, i))) | |
2379 return true; | |
2380 | |
2381 switch (gimple_code (s)) | |
2382 { | |
2383 case GIMPLE_ASM: | |
2384 return gimple_asm_volatile_p (s); | |
2385 | |
2386 case GIMPLE_CALL: | |
2387 t = gimple_call_fndecl (s); | |
2388 /* Assume that calls to weak functions may trap. */ | |
2389 if (!t || !DECL_P (t) || DECL_WEAK (t)) | |
2390 return true; | |
2391 return false; | |
2392 | |
2393 case GIMPLE_ASSIGN: | |
2394 t = gimple_expr_type (s); | |
2395 op = gimple_assign_rhs_code (s); | |
2396 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS) | |
2397 div = gimple_assign_rhs2 (s); | |
2398 return (operation_could_trap_p (op, FLOAT_TYPE_P (t), | |
2399 (INTEGRAL_TYPE_P (t) | |
2400 && TYPE_OVERFLOW_TRAPS (t)), | |
2401 div)); | |
2402 | |
2403 default: | |
2404 break; | |
2405 } | |
2406 | |
2407 return false; | |
2408 | |
2409 } | |
2410 | |
2411 | |
2412 /* Return true if statement S can trap. */ | |
2413 | |
2414 bool | |
2415 gimple_could_trap_p (gimple s) | |
2416 { | |
2417 return gimple_could_trap_p_1 (s, true); | |
2418 } | |
2419 | |
2420 | |
2421 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */ | |
2422 | |
2423 bool | |
2424 gimple_assign_rhs_could_trap_p (gimple s) | |
2425 { | |
2426 gcc_assert (is_gimple_assign (s)); | |
2427 return gimple_could_trap_p_1 (s, false); | |
2428 } | |
2429 | |
2430 | |
2431 /* Print debugging information for gimple stmts generated. */ | |
2432 | |
2433 void | |
2434 dump_gimple_statistics (void) | |
2435 { | |
2436 #ifdef GATHER_STATISTICS | |
2437 int i, total_tuples = 0, total_bytes = 0; | |
2438 | |
2439 fprintf (stderr, "\nGIMPLE statements\n"); | |
2440 fprintf (stderr, "Kind Stmts Bytes\n"); | |
2441 fprintf (stderr, "---------------------------------------\n"); | |
2442 for (i = 0; i < (int) gimple_alloc_kind_all; ++i) | |
2443 { | |
2444 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i], | |
2445 gimple_alloc_counts[i], gimple_alloc_sizes[i]); | |
2446 total_tuples += gimple_alloc_counts[i]; | |
2447 total_bytes += gimple_alloc_sizes[i]; | |
2448 } | |
2449 fprintf (stderr, "---------------------------------------\n"); | |
2450 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes); | |
2451 fprintf (stderr, "---------------------------------------\n"); | |
2452 #else | |
2453 fprintf (stderr, "No gimple statistics\n"); | |
2454 #endif | |
2455 } | |
2456 | |
2457 | |
2458 /* Deep copy SYMS into the set of symbols stored by STMT. If SYMS is | |
2459 NULL or empty, the storage used is freed up. */ | |
2460 | |
2461 void | |
2462 gimple_set_stored_syms (gimple stmt, bitmap syms, bitmap_obstack *obs) | |
2463 { | |
2464 gcc_assert (gimple_has_mem_ops (stmt)); | |
2465 | |
2466 if (syms == NULL || bitmap_empty_p (syms)) | |
2467 BITMAP_FREE (stmt->gsmem.membase.stores); | |
2468 else | |
2469 { | |
2470 if (stmt->gsmem.membase.stores == NULL) | |
2471 stmt->gsmem.membase.stores = BITMAP_ALLOC (obs); | |
2472 | |
2473 bitmap_copy (stmt->gsmem.membase.stores, syms); | |
2474 } | |
2475 } | |
2476 | |
2477 | |
2478 /* Deep copy SYMS into the set of symbols loaded by STMT. If SYMS is | |
2479 NULL or empty, the storage used is freed up. */ | |
2480 | |
2481 void | |
2482 gimple_set_loaded_syms (gimple stmt, bitmap syms, bitmap_obstack *obs) | |
2483 { | |
2484 gcc_assert (gimple_has_mem_ops (stmt)); | |
2485 | |
2486 if (syms == NULL || bitmap_empty_p (syms)) | |
2487 BITMAP_FREE (stmt->gsmem.membase.loads); | |
2488 else | |
2489 { | |
2490 if (stmt->gsmem.membase.loads == NULL) | |
2491 stmt->gsmem.membase.loads = BITMAP_ALLOC (obs); | |
2492 | |
2493 bitmap_copy (stmt->gsmem.membase.loads, syms); | |
2494 } | |
2495 } | |
2496 | |
2497 | |
2498 /* Return the number of operands needed on the RHS of a GIMPLE | |
2499 assignment for an expression with tree code CODE. */ | |
2500 | |
2501 unsigned | |
2502 get_gimple_rhs_num_ops (enum tree_code code) | |
2503 { | |
2504 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code); | |
2505 | |
2506 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS) | |
2507 return 1; | |
2508 else if (rhs_class == GIMPLE_BINARY_RHS) | |
2509 return 2; | |
2510 else | |
2511 gcc_unreachable (); | |
2512 } | |
2513 | |
2514 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \ | |
2515 (unsigned char) \ | |
2516 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \ | |
2517 : ((TYPE) == tcc_binary \ | |
2518 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \ | |
2519 : ((TYPE) == tcc_constant \ | |
2520 || (TYPE) == tcc_declaration \ | |
2521 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \ | |
2522 : ((SYM) == TRUTH_AND_EXPR \ | |
2523 || (SYM) == TRUTH_OR_EXPR \ | |
2524 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \ | |
2525 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \ | |
2526 : ((SYM) == COND_EXPR \ | |
2527 || (SYM) == CONSTRUCTOR \ | |
2528 || (SYM) == OBJ_TYPE_REF \ | |
2529 || (SYM) == ASSERT_EXPR \ | |
2530 || (SYM) == ADDR_EXPR \ | |
2531 || (SYM) == WITH_SIZE_EXPR \ | |
2532 || (SYM) == EXC_PTR_EXPR \ | |
2533 || (SYM) == SSA_NAME \ | |
2534 || (SYM) == FILTER_EXPR \ | |
2535 || (SYM) == POLYNOMIAL_CHREC \ | |
2536 || (SYM) == DOT_PROD_EXPR \ | |
2537 || (SYM) == VEC_COND_EXPR \ | |
2538 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \ | |
2539 : GIMPLE_INVALID_RHS), | |
2540 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS, | |
2541 | |
2542 const unsigned char gimple_rhs_class_table[] = { | |
2543 #include "all-tree.def" | |
2544 }; | |
2545 | |
2546 #undef DEFTREECODE | |
2547 #undef END_OF_BASE_TREE_CODES | |
2548 | |
2549 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */ | |
2550 | |
2551 /* Validation of GIMPLE expressions. */ | |
2552 | |
2553 /* Return true if OP is an acceptable tree node to be used as a GIMPLE | |
2554 operand. */ | |
2555 | |
2556 bool | |
2557 is_gimple_operand (const_tree op) | |
2558 { | |
2559 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS; | |
2560 } | |
2561 | |
2562 | |
2563 /* Return true if T is a GIMPLE RHS for an assignment to a temporary. */ | |
2564 | |
2565 bool | |
2566 is_gimple_formal_tmp_rhs (tree t) | |
2567 { | |
2568 if (is_gimple_lvalue (t) || is_gimple_val (t)) | |
2569 return true; | |
2570 | |
2571 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS; | |
2572 } | |
2573 | |
2574 /* Returns true iff T is a valid RHS for an assignment to a renamed | |
2575 user -- or front-end generated artificial -- variable. */ | |
2576 | |
2577 bool | |
2578 is_gimple_reg_rhs (tree t) | |
2579 { | |
2580 /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto | |
2581 and the LHS is a user variable, then we need to introduce a formal | |
2582 temporary. This way the optimizers can determine that the user | |
2583 variable is only modified if evaluation of the RHS does not throw. | |
2584 | |
2585 Don't force a temp of a non-renamable type; the copy could be | |
2586 arbitrarily expensive. Instead we will generate a VDEF for | |
2587 the assignment. */ | |
2588 | |
2589 if (is_gimple_reg_type (TREE_TYPE (t)) && tree_could_throw_p (t)) | |
2590 return false; | |
2591 | |
2592 return is_gimple_formal_tmp_rhs (t); | |
2593 } | |
2594 | |
2595 /* Returns true iff T is a valid RHS for an assignment to an un-renamed | |
2596 LHS, or for a call argument. */ | |
2597 | |
2598 bool | |
2599 is_gimple_mem_rhs (tree t) | |
2600 { | |
2601 /* If we're dealing with a renamable type, either source or dest must be | |
2602 a renamed variable. */ | |
2603 if (is_gimple_reg_type (TREE_TYPE (t))) | |
2604 return is_gimple_val (t); | |
2605 else | |
2606 return is_gimple_formal_tmp_rhs (t); | |
2607 } | |
2608 | |
2609 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */ | |
2610 | |
2611 bool | |
2612 is_gimple_lvalue (tree t) | |
2613 { | |
2614 return (is_gimple_addressable (t) | |
2615 || TREE_CODE (t) == WITH_SIZE_EXPR | |
2616 /* These are complex lvalues, but don't have addresses, so they | |
2617 go here. */ | |
2618 || TREE_CODE (t) == BIT_FIELD_REF); | |
2619 } | |
2620 | |
2621 /* Return true if T is a GIMPLE condition. */ | |
2622 | |
2623 bool | |
2624 is_gimple_condexpr (tree t) | |
2625 { | |
2626 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t) | |
2627 && !tree_could_trap_p (t) | |
2628 && is_gimple_val (TREE_OPERAND (t, 0)) | |
2629 && is_gimple_val (TREE_OPERAND (t, 1)))); | |
2630 } | |
2631 | |
2632 /* Return true if T is something whose address can be taken. */ | |
2633 | |
2634 bool | |
2635 is_gimple_addressable (tree t) | |
2636 { | |
2637 return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t)); | |
2638 } | |
2639 | |
2640 /* Return true if T is a valid gimple constant. */ | |
2641 | |
2642 bool | |
2643 is_gimple_constant (const_tree t) | |
2644 { | |
2645 switch (TREE_CODE (t)) | |
2646 { | |
2647 case INTEGER_CST: | |
2648 case REAL_CST: | |
2649 case FIXED_CST: | |
2650 case STRING_CST: | |
2651 case COMPLEX_CST: | |
2652 case VECTOR_CST: | |
2653 return true; | |
2654 | |
2655 /* Vector constant constructors are gimple invariant. */ | |
2656 case CONSTRUCTOR: | |
2657 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) | |
2658 return TREE_CONSTANT (t); | |
2659 else | |
2660 return false; | |
2661 | |
2662 default: | |
2663 return false; | |
2664 } | |
2665 } | |
2666 | |
2667 /* Return true if T is a gimple address. */ | |
2668 | |
2669 bool | |
2670 is_gimple_address (const_tree t) | |
2671 { | |
2672 tree op; | |
2673 | |
2674 if (TREE_CODE (t) != ADDR_EXPR) | |
2675 return false; | |
2676 | |
2677 op = TREE_OPERAND (t, 0); | |
2678 while (handled_component_p (op)) | |
2679 { | |
2680 if ((TREE_CODE (op) == ARRAY_REF | |
2681 || TREE_CODE (op) == ARRAY_RANGE_REF) | |
2682 && !is_gimple_val (TREE_OPERAND (op, 1))) | |
2683 return false; | |
2684 | |
2685 op = TREE_OPERAND (op, 0); | |
2686 } | |
2687 | |
2688 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op)) | |
2689 return true; | |
2690 | |
2691 switch (TREE_CODE (op)) | |
2692 { | |
2693 case PARM_DECL: | |
2694 case RESULT_DECL: | |
2695 case LABEL_DECL: | |
2696 case FUNCTION_DECL: | |
2697 case VAR_DECL: | |
2698 case CONST_DECL: | |
2699 return true; | |
2700 | |
2701 default: | |
2702 return false; | |
2703 } | |
2704 } | |
2705 | |
2706 /* Strip out all handled components that produce invariant | |
2707 offsets. */ | |
2708 | |
2709 static const_tree | |
2710 strip_invariant_refs (const_tree op) | |
2711 { | |
2712 while (handled_component_p (op)) | |
2713 { | |
2714 switch (TREE_CODE (op)) | |
2715 { | |
2716 case ARRAY_REF: | |
2717 case ARRAY_RANGE_REF: | |
2718 if (!is_gimple_constant (TREE_OPERAND (op, 1)) | |
2719 || TREE_OPERAND (op, 2) != NULL_TREE | |
2720 || TREE_OPERAND (op, 3) != NULL_TREE) | |
2721 return NULL; | |
2722 break; | |
2723 | |
2724 case COMPONENT_REF: | |
2725 if (TREE_OPERAND (op, 2) != NULL_TREE) | |
2726 return NULL; | |
2727 break; | |
2728 | |
2729 default:; | |
2730 } | |
2731 op = TREE_OPERAND (op, 0); | |
2732 } | |
2733 | |
2734 return op; | |
2735 } | |
2736 | |
2737 /* Return true if T is a gimple invariant address. */ | |
2738 | |
2739 bool | |
2740 is_gimple_invariant_address (const_tree t) | |
2741 { | |
2742 const_tree op; | |
2743 | |
2744 if (TREE_CODE (t) != ADDR_EXPR) | |
2745 return false; | |
2746 | |
2747 op = strip_invariant_refs (TREE_OPERAND (t, 0)); | |
2748 | |
2749 return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op)); | |
2750 } | |
2751 | |
2752 /* Return true if T is a gimple invariant address at IPA level | |
2753 (so addresses of variables on stack are not allowed). */ | |
2754 | |
2755 bool | |
2756 is_gimple_ip_invariant_address (const_tree t) | |
2757 { | |
2758 const_tree op; | |
2759 | |
2760 if (TREE_CODE (t) != ADDR_EXPR) | |
2761 return false; | |
2762 | |
2763 op = strip_invariant_refs (TREE_OPERAND (t, 0)); | |
2764 | |
2765 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op)); | |
2766 } | |
2767 | |
2768 /* Return true if T is a GIMPLE minimal invariant. It's a restricted | |
2769 form of function invariant. */ | |
2770 | |
2771 bool | |
2772 is_gimple_min_invariant (const_tree t) | |
2773 { | |
2774 if (TREE_CODE (t) == ADDR_EXPR) | |
2775 return is_gimple_invariant_address (t); | |
2776 | |
2777 return is_gimple_constant (t); | |
2778 } | |
2779 | |
2780 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted | |
2781 form of gimple minimal invariant. */ | |
2782 | |
2783 bool | |
2784 is_gimple_ip_invariant (const_tree t) | |
2785 { | |
2786 if (TREE_CODE (t) == ADDR_EXPR) | |
2787 return is_gimple_ip_invariant_address (t); | |
2788 | |
2789 return is_gimple_constant (t); | |
2790 } | |
2791 | |
2792 /* Return true if T looks like a valid GIMPLE statement. */ | |
2793 | |
2794 bool | |
2795 is_gimple_stmt (tree t) | |
2796 { | |
2797 const enum tree_code code = TREE_CODE (t); | |
2798 | |
2799 switch (code) | |
2800 { | |
2801 case NOP_EXPR: | |
2802 /* The only valid NOP_EXPR is the empty statement. */ | |
2803 return IS_EMPTY_STMT (t); | |
2804 | |
2805 case BIND_EXPR: | |
2806 case COND_EXPR: | |
2807 /* These are only valid if they're void. */ | |
2808 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t)); | |
2809 | |
2810 case SWITCH_EXPR: | |
2811 case GOTO_EXPR: | |
2812 case RETURN_EXPR: | |
2813 case LABEL_EXPR: | |
2814 case CASE_LABEL_EXPR: | |
2815 case TRY_CATCH_EXPR: | |
2816 case TRY_FINALLY_EXPR: | |
2817 case EH_FILTER_EXPR: | |
2818 case CATCH_EXPR: | |
2819 case CHANGE_DYNAMIC_TYPE_EXPR: | |
2820 case ASM_EXPR: | |
2821 case RESX_EXPR: | |
2822 case STATEMENT_LIST: | |
2823 case OMP_PARALLEL: | |
2824 case OMP_FOR: | |
2825 case OMP_SECTIONS: | |
2826 case OMP_SECTION: | |
2827 case OMP_SINGLE: | |
2828 case OMP_MASTER: | |
2829 case OMP_ORDERED: | |
2830 case OMP_CRITICAL: | |
2831 case OMP_TASK: | |
2832 /* These are always void. */ | |
2833 return true; | |
2834 | |
2835 case CALL_EXPR: | |
2836 case MODIFY_EXPR: | |
2837 case PREDICT_EXPR: | |
2838 /* These are valid regardless of their type. */ | |
2839 return true; | |
2840 | |
2841 default: | |
2842 return false; | |
2843 } | |
2844 } | |
2845 | |
2846 /* Return true if T is a variable. */ | |
2847 | |
2848 bool | |
2849 is_gimple_variable (tree t) | |
2850 { | |
2851 return (TREE_CODE (t) == VAR_DECL | |
2852 || TREE_CODE (t) == PARM_DECL | |
2853 || TREE_CODE (t) == RESULT_DECL | |
2854 || TREE_CODE (t) == SSA_NAME); | |
2855 } | |
2856 | |
2857 /* Return true if T is a GIMPLE identifier (something with an address). */ | |
2858 | |
2859 bool | |
2860 is_gimple_id (tree t) | |
2861 { | |
2862 return (is_gimple_variable (t) | |
2863 || TREE_CODE (t) == FUNCTION_DECL | |
2864 || TREE_CODE (t) == LABEL_DECL | |
2865 || TREE_CODE (t) == CONST_DECL | |
2866 /* Allow string constants, since they are addressable. */ | |
2867 || TREE_CODE (t) == STRING_CST); | |
2868 } | |
2869 | |
2870 /* Return true if TYPE is a suitable type for a scalar register variable. */ | |
2871 | |
2872 bool | |
2873 is_gimple_reg_type (tree type) | |
2874 { | |
2875 /* In addition to aggregate types, we also exclude complex types if not | |
2876 optimizing because they can be subject to partial stores in GNU C by | |
2877 means of the __real__ and __imag__ operators and we cannot promote | |
2878 them to total stores (see gimplify_modify_expr_complex_part). */ | |
2879 return !(AGGREGATE_TYPE_P (type) | |
2880 || (TREE_CODE (type) == COMPLEX_TYPE && !optimize)); | |
2881 | |
2882 } | |
2883 | |
2884 /* Return true if T is a non-aggregate register variable. */ | |
2885 | |
2886 bool | |
2887 is_gimple_reg (tree t) | |
2888 { | |
2889 if (TREE_CODE (t) == SSA_NAME) | |
2890 t = SSA_NAME_VAR (t); | |
2891 | |
2892 if (MTAG_P (t)) | |
2893 return false; | |
2894 | |
2895 if (!is_gimple_variable (t)) | |
2896 return false; | |
2897 | |
2898 if (!is_gimple_reg_type (TREE_TYPE (t))) | |
2899 return false; | |
2900 | |
2901 /* A volatile decl is not acceptable because we can't reuse it as | |
2902 needed. We need to copy it into a temp first. */ | |
2903 if (TREE_THIS_VOLATILE (t)) | |
2904 return false; | |
2905 | |
2906 /* We define "registers" as things that can be renamed as needed, | |
2907 which with our infrastructure does not apply to memory. */ | |
2908 if (needs_to_live_in_memory (t)) | |
2909 return false; | |
2910 | |
2911 /* Hard register variables are an interesting case. For those that | |
2912 are call-clobbered, we don't know where all the calls are, since | |
2913 we don't (want to) take into account which operations will turn | |
2914 into libcalls at the rtl level. For those that are call-saved, | |
2915 we don't currently model the fact that calls may in fact change | |
2916 global hard registers, nor do we examine ASM_CLOBBERS at the tree | |
2917 level, and so miss variable changes that might imply. All around, | |
2918 it seems safest to not do too much optimization with these at the | |
2919 tree level at all. We'll have to rely on the rtl optimizers to | |
2920 clean this up, as there we've got all the appropriate bits exposed. */ | |
2921 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) | |
2922 return false; | |
2923 | |
2924 /* Complex and vector values must have been put into SSA-like form. | |
2925 That is, no assignments to the individual components. */ | |
2926 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE | |
2927 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) | |
2928 return DECL_GIMPLE_REG_P (t); | |
2929 | |
2930 return true; | |
2931 } | |
2932 | |
2933 | |
2934 /* Returns true if T is a GIMPLE formal temporary variable. */ | |
2935 | |
2936 bool | |
2937 is_gimple_formal_tmp_var (tree t) | |
2938 { | |
2939 if (TREE_CODE (t) == SSA_NAME) | |
2940 return true; | |
2941 | |
2942 return TREE_CODE (t) == VAR_DECL && DECL_GIMPLE_FORMAL_TEMP_P (t); | |
2943 } | |
2944 | |
2945 /* Returns true if T is a GIMPLE formal temporary register variable. */ | |
2946 | |
2947 bool | |
2948 is_gimple_formal_tmp_reg (tree t) | |
2949 { | |
2950 /* The intent of this is to get hold of a value that won't change. | |
2951 An SSA_NAME qualifies no matter if its of a user variable or not. */ | |
2952 if (TREE_CODE (t) == SSA_NAME) | |
2953 return true; | |
2954 | |
2955 /* We don't know the lifetime characteristics of user variables. */ | |
2956 if (!is_gimple_formal_tmp_var (t)) | |
2957 return false; | |
2958 | |
2959 /* Finally, it must be capable of being placed in a register. */ | |
2960 return is_gimple_reg (t); | |
2961 } | |
2962 | |
2963 /* Return true if T is a GIMPLE variable whose address is not needed. */ | |
2964 | |
2965 bool | |
2966 is_gimple_non_addressable (tree t) | |
2967 { | |
2968 if (TREE_CODE (t) == SSA_NAME) | |
2969 t = SSA_NAME_VAR (t); | |
2970 | |
2971 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t)); | |
2972 } | |
2973 | |
2974 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */ | |
2975 | |
2976 bool | |
2977 is_gimple_val (tree t) | |
2978 { | |
2979 /* Make loads from volatiles and memory vars explicit. */ | |
2980 if (is_gimple_variable (t) | |
2981 && is_gimple_reg_type (TREE_TYPE (t)) | |
2982 && !is_gimple_reg (t)) | |
2983 return false; | |
2984 | |
2985 /* FIXME make these decls. That can happen only when we expose the | |
2986 entire landing-pad construct at the tree level. */ | |
2987 if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR) | |
2988 return true; | |
2989 | |
2990 return (is_gimple_variable (t) || is_gimple_min_invariant (t)); | |
2991 } | |
2992 | |
2993 /* Similarly, but accept hard registers as inputs to asm statements. */ | |
2994 | |
2995 bool | |
2996 is_gimple_asm_val (tree t) | |
2997 { | |
2998 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) | |
2999 return true; | |
3000 | |
3001 return is_gimple_val (t); | |
3002 } | |
3003 | |
3004 /* Return true if T is a GIMPLE minimal lvalue. */ | |
3005 | |
3006 bool | |
3007 is_gimple_min_lval (tree t) | |
3008 { | |
3009 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF); | |
3010 } | |
3011 | |
3012 /* Return true if T is a typecast operation. */ | |
3013 | |
3014 bool | |
3015 is_gimple_cast (tree t) | |
3016 { | |
3017 return (CONVERT_EXPR_P (t) | |
3018 || TREE_CODE (t) == FIX_TRUNC_EXPR); | |
3019 } | |
3020 | |
3021 /* Return true if T is a valid function operand of a CALL_EXPR. */ | |
3022 | |
3023 bool | |
3024 is_gimple_call_addr (tree t) | |
3025 { | |
3026 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t)); | |
3027 } | |
3028 | |
3029 /* If T makes a function call, return the corresponding CALL_EXPR operand. | |
3030 Otherwise, return NULL_TREE. */ | |
3031 | |
3032 tree | |
3033 get_call_expr_in (tree t) | |
3034 { | |
3035 if (TREE_CODE (t) == MODIFY_EXPR) | |
3036 t = TREE_OPERAND (t, 1); | |
3037 if (TREE_CODE (t) == WITH_SIZE_EXPR) | |
3038 t = TREE_OPERAND (t, 0); | |
3039 if (TREE_CODE (t) == CALL_EXPR) | |
3040 return t; | |
3041 return NULL_TREE; | |
3042 } | |
3043 | |
3044 | |
3045 /* Given a memory reference expression T, return its base address. | |
3046 The base address of a memory reference expression is the main | |
3047 object being referenced. For instance, the base address for | |
3048 'array[i].fld[j]' is 'array'. You can think of this as stripping | |
3049 away the offset part from a memory address. | |
3050 | |
3051 This function calls handled_component_p to strip away all the inner | |
3052 parts of the memory reference until it reaches the base object. */ | |
3053 | |
3054 tree | |
3055 get_base_address (tree t) | |
3056 { | |
3057 while (handled_component_p (t)) | |
3058 t = TREE_OPERAND (t, 0); | |
3059 | |
3060 if (SSA_VAR_P (t) | |
3061 || TREE_CODE (t) == STRING_CST | |
3062 || TREE_CODE (t) == CONSTRUCTOR | |
3063 || INDIRECT_REF_P (t)) | |
3064 return t; | |
3065 else | |
3066 return NULL_TREE; | |
3067 } | |
3068 | |
3069 void | |
3070 recalculate_side_effects (tree t) | |
3071 { | |
3072 enum tree_code code = TREE_CODE (t); | |
3073 int len = TREE_OPERAND_LENGTH (t); | |
3074 int i; | |
3075 | |
3076 switch (TREE_CODE_CLASS (code)) | |
3077 { | |
3078 case tcc_expression: | |
3079 switch (code) | |
3080 { | |
3081 case INIT_EXPR: | |
3082 case MODIFY_EXPR: | |
3083 case VA_ARG_EXPR: | |
3084 case PREDECREMENT_EXPR: | |
3085 case PREINCREMENT_EXPR: | |
3086 case POSTDECREMENT_EXPR: | |
3087 case POSTINCREMENT_EXPR: | |
3088 /* All of these have side-effects, no matter what their | |
3089 operands are. */ | |
3090 return; | |
3091 | |
3092 default: | |
3093 break; | |
3094 } | |
3095 /* Fall through. */ | |
3096 | |
3097 case tcc_comparison: /* a comparison expression */ | |
3098 case tcc_unary: /* a unary arithmetic expression */ | |
3099 case tcc_binary: /* a binary arithmetic expression */ | |
3100 case tcc_reference: /* a reference */ | |
3101 case tcc_vl_exp: /* a function call */ | |
3102 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); | |
3103 for (i = 0; i < len; ++i) | |
3104 { | |
3105 tree op = TREE_OPERAND (t, i); | |
3106 if (op && TREE_SIDE_EFFECTS (op)) | |
3107 TREE_SIDE_EFFECTS (t) = 1; | |
3108 } | |
3109 break; | |
3110 | |
3111 case tcc_constant: | |
3112 /* No side-effects. */ | |
3113 return; | |
3114 | |
3115 default: | |
3116 gcc_unreachable (); | |
3117 } | |
3118 } | |
3119 | |
3120 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns | |
3121 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if | |
3122 we failed to create one. */ | |
3123 | |
3124 tree | |
3125 canonicalize_cond_expr_cond (tree t) | |
3126 { | |
3127 /* For (bool)x use x != 0. */ | |
3128 if (TREE_CODE (t) == NOP_EXPR | |
3129 && TREE_TYPE (t) == boolean_type_node) | |
3130 { | |
3131 tree top0 = TREE_OPERAND (t, 0); | |
3132 t = build2 (NE_EXPR, TREE_TYPE (t), | |
3133 top0, build_int_cst (TREE_TYPE (top0), 0)); | |
3134 } | |
3135 /* For !x use x == 0. */ | |
3136 else if (TREE_CODE (t) == TRUTH_NOT_EXPR) | |
3137 { | |
3138 tree top0 = TREE_OPERAND (t, 0); | |
3139 t = build2 (EQ_EXPR, TREE_TYPE (t), | |
3140 top0, build_int_cst (TREE_TYPE (top0), 0)); | |
3141 } | |
3142 /* For cmp ? 1 : 0 use cmp. */ | |
3143 else if (TREE_CODE (t) == COND_EXPR | |
3144 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0)) | |
3145 && integer_onep (TREE_OPERAND (t, 1)) | |
3146 && integer_zerop (TREE_OPERAND (t, 2))) | |
3147 { | |
3148 tree top0 = TREE_OPERAND (t, 0); | |
3149 t = build2 (TREE_CODE (top0), TREE_TYPE (t), | |
3150 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1)); | |
3151 } | |
3152 | |
3153 if (is_gimple_condexpr (t)) | |
3154 return t; | |
3155 | |
3156 return NULL_TREE; | |
3157 } | |
3158 | |
3159 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in | |
3160 the positions marked by the set ARGS_TO_SKIP. */ | |
3161 | |
3162 gimple | |
3163 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip) | |
3164 { | |
3165 int i; | |
3166 tree fn = gimple_call_fn (stmt); | |
3167 int nargs = gimple_call_num_args (stmt); | |
3168 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs); | |
3169 gimple new_stmt; | |
3170 | |
3171 for (i = 0; i < nargs; i++) | |
3172 if (!bitmap_bit_p (args_to_skip, i)) | |
3173 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i)); | |
3174 | |
3175 new_stmt = gimple_build_call_vec (fn, vargs); | |
3176 VEC_free (tree, heap, vargs); | |
3177 if (gimple_call_lhs (stmt)) | |
3178 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt)); | |
3179 | |
3180 gimple_set_block (new_stmt, gimple_block (stmt)); | |
3181 if (gimple_has_location (stmt)) | |
3182 gimple_set_location (new_stmt, gimple_location (stmt)); | |
3183 | |
3184 /* Carry all the flags to the new GIMPLE_CALL. */ | |
3185 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt)); | |
3186 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt)); | |
3187 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt)); | |
3188 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt)); | |
3189 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt)); | |
3190 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt)); | |
3191 return new_stmt; | |
3192 } | |
3193 | |
3194 #include "gt-gimple.h" |