Mercurial > hg > CbC > CbC_gcc
annotate gcc/gimple-low.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
rev | line source |
---|---|
0 | 1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE. |
2 | |
111 | 3 Copyright (C) 2003-2017 Free Software Foundation, Inc. |
0 | 4 |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify it under | |
8 the terms of the GNU General Public License as published by the Free | |
9 Software Foundation; either version 3, or (at your option) any later | |
10 version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
111 | 24 #include "backend.h" |
0 | 25 #include "tree.h" |
26 #include "gimple.h" | |
27 #include "tree-pass.h" | |
111 | 28 #include "fold-const.h" |
29 #include "tree-nested.h" | |
30 #include "calls.h" | |
31 #include "gimple-iterator.h" | |
32 #include "gimple-low.h" | |
33 #include "predict.h" | |
34 #include "gimple-predict.h" | |
0 | 35 |
36 /* The differences between High GIMPLE and Low GIMPLE are the | |
37 following: | |
38 | |
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears). | |
40 | |
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control | |
42 flow and exception regions are built as an on-the-side region | |
43 hierarchy (See tree-eh.c:lower_eh_constructs). | |
44 | |
45 3- Multiple identical return statements are grouped into a single | |
46 return and gotos to the unique return site. */ | |
47 | |
48 /* Match a return statement with a label. During lowering, we identify | |
49 identical return statements and replace duplicates with a jump to | |
50 the corresponding label. */ | |
51 struct return_statements_t | |
52 { | |
53 tree label; | |
111 | 54 greturn *stmt; |
0 | 55 }; |
56 typedef struct return_statements_t return_statements_t; | |
57 | |
58 | |
59 struct lower_data | |
60 { | |
61 /* Block the current statement belongs to. */ | |
62 tree block; | |
63 | |
64 /* A vector of label and return statements to be moved to the end | |
65 of the function. */ | |
111 | 66 vec<return_statements_t> return_statements; |
0 | 67 |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
68 /* True if the current statement cannot fall through. */ |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
69 bool cannot_fallthru; |
0 | 70 }; |
71 | |
72 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *); | |
73 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *); | |
111 | 74 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *); |
0 | 75 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *); |
76 static void lower_builtin_setjmp (gimple_stmt_iterator *); | |
111 | 77 static void lower_builtin_posix_memalign (gimple_stmt_iterator *); |
0 | 78 |
79 | |
80 /* Lower the body of current_function_decl from High GIMPLE into Low | |
81 GIMPLE. */ | |
82 | |
83 static unsigned int | |
84 lower_function_body (void) | |
85 { | |
86 struct lower_data data; | |
87 gimple_seq body = gimple_body (current_function_decl); | |
88 gimple_seq lowered_body; | |
89 gimple_stmt_iterator i; | |
111 | 90 gimple *bind; |
91 gimple *x; | |
0 | 92 |
93 /* The gimplifier should've left a body of exactly one statement, | |
94 namely a GIMPLE_BIND. */ | |
95 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body) | |
96 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND); | |
97 | |
98 memset (&data, 0, sizeof (data)); | |
99 data.block = DECL_INITIAL (current_function_decl); | |
100 BLOCK_SUBBLOCKS (data.block) = NULL_TREE; | |
101 BLOCK_CHAIN (data.block) = NULL_TREE; | |
102 TREE_ASM_WRITTEN (data.block) = 1; | |
111 | 103 data.return_statements.create (8); |
0 | 104 |
105 bind = gimple_seq_first_stmt (body); | |
106 lowered_body = NULL; | |
107 gimple_seq_add_stmt (&lowered_body, bind); | |
108 i = gsi_start (lowered_body); | |
109 lower_gimple_bind (&i, &data); | |
110 | |
111 i = gsi_last (lowered_body); | |
112 | |
113 /* If the function falls off the end, we need a null return statement. | |
114 If we've already got one in the return_statements vector, we don't | |
115 need to do anything special. Otherwise build one by hand. */ | |
111 | 116 bool may_fallthru = gimple_seq_may_fallthru (lowered_body); |
117 if (may_fallthru | |
118 && (data.return_statements.is_empty () | |
119 || (gimple_return_retval (data.return_statements.last().stmt) | |
120 != NULL))) | |
0 | 121 { |
122 x = gimple_build_return (NULL); | |
123 gimple_set_location (x, cfun->function_end_locus); | |
124 gimple_set_block (x, DECL_INITIAL (current_function_decl)); | |
125 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING); | |
111 | 126 may_fallthru = false; |
0 | 127 } |
128 | |
129 /* If we lowered any return statements, emit the representative | |
130 at the end of the function. */ | |
111 | 131 while (!data.return_statements.is_empty ()) |
0 | 132 { |
111 | 133 return_statements_t t = data.return_statements.pop (); |
0 | 134 x = gimple_build_label (t.label); |
135 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING); | |
136 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING); | |
111 | 137 if (may_fallthru) |
138 { | |
139 /* Remove the line number from the representative return statement. | |
140 It now fills in for the fallthru too. Failure to remove this | |
141 will result in incorrect results for coverage analysis. */ | |
142 gimple_set_location (t.stmt, UNKNOWN_LOCATION); | |
143 may_fallthru = false; | |
144 } | |
0 | 145 } |
146 | |
111 | 147 /* Once the old body has been lowered, replace it with the new |
148 lowered sequence. */ | |
149 gimple_set_body (current_function_decl, lowered_body); | |
0 | 150 |
151 gcc_assert (data.block == DECL_INITIAL (current_function_decl)); | |
152 BLOCK_SUBBLOCKS (data.block) | |
153 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block)); | |
154 | |
155 clear_block_marks (data.block); | |
111 | 156 data.return_statements.release (); |
0 | 157 return 0; |
158 } | |
159 | |
111 | 160 namespace { |
161 | |
162 const pass_data pass_data_lower_cf = | |
0 | 163 { |
111 | 164 GIMPLE_PASS, /* type */ |
165 "lower", /* name */ | |
166 OPTGROUP_NONE, /* optinfo_flags */ | |
167 TV_NONE, /* tv_id */ | |
168 PROP_gimple_any, /* properties_required */ | |
169 PROP_gimple_lcf, /* properties_provided */ | |
170 0, /* properties_destroyed */ | |
171 0, /* todo_flags_start */ | |
172 0, /* todo_flags_finish */ | |
0 | 173 }; |
174 | |
111 | 175 class pass_lower_cf : public gimple_opt_pass |
0 | 176 { |
111 | 177 public: |
178 pass_lower_cf (gcc::context *ctxt) | |
179 : gimple_opt_pass (pass_data_lower_cf, ctxt) | |
180 {} | |
0 | 181 |
111 | 182 /* opt_pass methods: */ |
183 virtual unsigned int execute (function *) { return lower_function_body (); } | |
0 | 184 |
111 | 185 }; // class pass_lower_cf |
186 | |
187 } // anon namespace | |
188 | |
189 gimple_opt_pass * | |
190 make_pass_lower_cf (gcc::context *ctxt) | |
191 { | |
192 return new pass_lower_cf (ctxt); | |
0 | 193 } |
194 | |
195 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered | |
196 when they are changed -- if this has to be done, the lowering routine must | |
197 do it explicitly. DATA is passed through the recursion. */ | |
198 | |
199 static void | |
111 | 200 lower_sequence (gimple_seq *seq, struct lower_data *data) |
0 | 201 { |
202 gimple_stmt_iterator gsi; | |
203 | |
111 | 204 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); ) |
0 | 205 lower_stmt (&gsi, data); |
206 } | |
207 | |
208 | |
209 /* Lower the OpenMP directive statement pointed by GSI. DATA is | |
210 passed through the recursion. */ | |
211 | |
212 static void | |
213 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data) | |
214 { | |
111 | 215 gimple *stmt; |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
216 |
0 | 217 stmt = gsi_stmt (*gsi); |
218 | |
111 | 219 lower_sequence (gimple_omp_body_ptr (stmt), data); |
220 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING); | |
0 | 221 gimple_omp_set_body (stmt, NULL); |
111 | 222 gsi_next (gsi); |
0 | 223 } |
224 | |
225 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
226 /* Lower statement GSI. DATA is passed through the recursion. We try to |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
227 track the fallthruness of statements and get rid of unreachable return |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
228 statements in order to prevent the EH lowering pass from adding useless |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
229 edges that can cause bogus warnings to be issued later; this guess need |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
230 not be 100% accurate, simply be conservative and reset cannot_fallthru |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
231 to false if we don't know. */ |
0 | 232 |
233 static void | |
234 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data) | |
235 { | |
111 | 236 gimple *stmt = gsi_stmt (*gsi); |
0 | 237 |
238 gimple_set_block (stmt, data->block); | |
239 | |
240 switch (gimple_code (stmt)) | |
241 { | |
242 case GIMPLE_BIND: | |
243 lower_gimple_bind (gsi, data); | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
244 /* Propagate fallthruness. */ |
0 | 245 return; |
246 | |
247 case GIMPLE_COND: | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
248 case GIMPLE_GOTO: |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
249 case GIMPLE_SWITCH: |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
250 data->cannot_fallthru = true; |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
251 gsi_next (gsi); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
252 return; |
0 | 253 |
254 case GIMPLE_RETURN: | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
255 if (data->cannot_fallthru) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
256 { |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
257 gsi_remove (gsi, false); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
258 /* Propagate fallthruness. */ |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
259 } |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
260 else |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
261 { |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
262 lower_gimple_return (gsi, data); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
263 data->cannot_fallthru = true; |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
264 } |
0 | 265 return; |
266 | |
267 case GIMPLE_TRY: | |
111 | 268 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) |
269 lower_try_catch (gsi, data); | |
270 else | |
271 { | |
272 /* It must be a GIMPLE_TRY_FINALLY. */ | |
273 bool cannot_fallthru; | |
274 lower_sequence (gimple_try_eval_ptr (stmt), data); | |
275 cannot_fallthru = data->cannot_fallthru; | |
276 | |
277 /* The finally clause is always executed after the try clause, | |
278 so if it does not fall through, then the try-finally will not | |
279 fall through. Otherwise, if the try clause does not fall | |
280 through, then when the finally clause falls through it will | |
281 resume execution wherever the try clause was going. So the | |
282 whole try-finally will only fall through if both the try | |
283 clause and the finally clause fall through. */ | |
284 data->cannot_fallthru = false; | |
285 lower_sequence (gimple_try_cleanup_ptr (stmt), data); | |
286 data->cannot_fallthru |= cannot_fallthru; | |
287 gsi_next (gsi); | |
288 } | |
289 return; | |
290 | |
291 case GIMPLE_EH_ELSE: | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
292 { |
111 | 293 geh_else *eh_else_stmt = as_a <geh_else *> (stmt); |
294 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data); | |
295 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data); | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
296 } |
0 | 297 break; |
298 | |
299 case GIMPLE_NOP: | |
300 case GIMPLE_ASM: | |
301 case GIMPLE_ASSIGN: | |
302 case GIMPLE_PREDICT: | |
303 case GIMPLE_LABEL: | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
304 case GIMPLE_EH_MUST_NOT_THROW: |
0 | 305 case GIMPLE_OMP_FOR: |
306 case GIMPLE_OMP_SECTIONS: | |
307 case GIMPLE_OMP_SECTIONS_SWITCH: | |
308 case GIMPLE_OMP_SECTION: | |
309 case GIMPLE_OMP_SINGLE: | |
310 case GIMPLE_OMP_MASTER: | |
111 | 311 case GIMPLE_OMP_TASKGROUP: |
0 | 312 case GIMPLE_OMP_ORDERED: |
313 case GIMPLE_OMP_CRITICAL: | |
314 case GIMPLE_OMP_RETURN: | |
315 case GIMPLE_OMP_ATOMIC_LOAD: | |
316 case GIMPLE_OMP_ATOMIC_STORE: | |
317 case GIMPLE_OMP_CONTINUE: | |
318 break; | |
319 | |
320 case GIMPLE_CALL: | |
321 { | |
322 tree decl = gimple_call_fndecl (stmt); | |
111 | 323 unsigned i; |
324 | |
325 for (i = 0; i < gimple_call_num_args (stmt); i++) | |
326 { | |
327 tree arg = gimple_call_arg (stmt, i); | |
328 if (EXPR_P (arg)) | |
329 TREE_SET_BLOCK (arg, data->block); | |
330 } | |
0 | 331 |
332 if (decl | |
111 | 333 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) |
0 | 334 { |
111 | 335 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP) |
336 { | |
337 lower_builtin_setjmp (gsi); | |
338 data->cannot_fallthru = false; | |
339 return; | |
340 } | |
341 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN | |
342 && flag_tree_bit_ccp | |
343 && gimple_builtin_call_types_compatible_p (stmt, decl)) | |
344 { | |
345 lower_builtin_posix_memalign (gsi); | |
346 return; | |
347 } | |
0 | 348 } |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
349 |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
350 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN)) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
351 { |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
352 data->cannot_fallthru = true; |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
353 gsi_next (gsi); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
354 return; |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
355 } |
0 | 356 } |
357 break; | |
358 | |
359 case GIMPLE_OMP_PARALLEL: | |
360 case GIMPLE_OMP_TASK: | |
111 | 361 case GIMPLE_OMP_TARGET: |
362 case GIMPLE_OMP_TEAMS: | |
363 case GIMPLE_OMP_GRID_BODY: | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
364 data->cannot_fallthru = false; |
0 | 365 lower_omp_directive (gsi, data); |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
366 data->cannot_fallthru = false; |
0 | 367 return; |
368 | |
111 | 369 case GIMPLE_TRANSACTION: |
370 lower_sequence (gimple_transaction_body_ptr ( | |
371 as_a <gtransaction *> (stmt)), | |
372 data); | |
373 break; | |
374 | |
0 | 375 default: |
376 gcc_unreachable (); | |
377 } | |
378 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
379 data->cannot_fallthru = false; |
0 | 380 gsi_next (gsi); |
381 } | |
382 | |
383 /* Lower a bind_expr TSI. DATA is passed through the recursion. */ | |
384 | |
385 static void | |
386 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data) | |
387 { | |
388 tree old_block = data->block; | |
111 | 389 gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi)); |
0 | 390 tree new_block = gimple_bind_block (stmt); |
391 | |
392 if (new_block) | |
393 { | |
394 if (new_block == old_block) | |
395 { | |
396 /* The outermost block of the original function may not be the | |
397 outermost statement chain of the gimplified function. So we | |
398 may see the outermost block just inside the function. */ | |
399 gcc_assert (new_block == DECL_INITIAL (current_function_decl)); | |
400 new_block = NULL; | |
401 } | |
402 else | |
403 { | |
404 /* We do not expect to handle duplicate blocks. */ | |
405 gcc_assert (!TREE_ASM_WRITTEN (new_block)); | |
406 TREE_ASM_WRITTEN (new_block) = 1; | |
407 | |
408 /* Block tree may get clobbered by inlining. Normally this would | |
409 be fixed in rest_of_decl_compilation using block notes, but | |
410 since we are not going to emit them, it is up to us. */ | |
411 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block); | |
412 BLOCK_SUBBLOCKS (old_block) = new_block; | |
413 BLOCK_SUBBLOCKS (new_block) = NULL_TREE; | |
414 BLOCK_SUPERCONTEXT (new_block) = old_block; | |
415 | |
416 data->block = new_block; | |
417 } | |
418 } | |
419 | |
420 record_vars (gimple_bind_vars (stmt)); | |
111 | 421 |
422 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer | |
423 need gimple_bind_vars. */ | |
424 tree next; | |
425 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find | |
426 it by marking all BLOCK_VARS. */ | |
427 if (gimple_bind_block (stmt)) | |
428 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t)) | |
429 TREE_VISITED (t) = 1; | |
430 for (tree var = gimple_bind_vars (stmt); | |
431 var && ! TREE_VISITED (var); var = next) | |
432 { | |
433 next = DECL_CHAIN (var); | |
434 DECL_CHAIN (var) = NULL_TREE; | |
435 } | |
436 /* Unmark BLOCK_VARS. */ | |
437 if (gimple_bind_block (stmt)) | |
438 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t)) | |
439 TREE_VISITED (t) = 0; | |
440 | |
441 lower_sequence (gimple_bind_body_ptr (stmt), data); | |
0 | 442 |
443 if (new_block) | |
444 { | |
445 gcc_assert (data->block == new_block); | |
446 | |
447 BLOCK_SUBBLOCKS (new_block) | |
448 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block)); | |
449 data->block = old_block; | |
450 } | |
451 | |
452 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */ | |
453 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT); | |
454 gsi_remove (gsi, false); | |
455 } | |
456 | |
111 | 457 /* Same as above, but for a GIMPLE_TRY_CATCH. */ |
0 | 458 |
111 | 459 static void |
460 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data) | |
0 | 461 { |
111 | 462 bool cannot_fallthru; |
463 gimple *stmt = gsi_stmt (*gsi); | |
464 gimple_stmt_iterator i; | |
0 | 465 |
111 | 466 /* We don't handle GIMPLE_TRY_FINALLY. */ |
467 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH); | |
468 | |
469 lower_sequence (gimple_try_eval_ptr (stmt), data); | |
470 cannot_fallthru = data->cannot_fallthru; | |
0 | 471 |
111 | 472 i = gsi_start (*gimple_try_cleanup_ptr (stmt)); |
473 switch (gimple_code (gsi_stmt (i))) | |
0 | 474 { |
111 | 475 case GIMPLE_CATCH: |
476 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a | |
477 catch expression and a body. The whole try/catch may fall | |
0 | 478 through iff any of the catch bodies falls through. */ |
111 | 479 for (; !gsi_end_p (i); gsi_next (&i)) |
0 | 480 { |
111 | 481 data->cannot_fallthru = false; |
482 lower_sequence (gimple_catch_handler_ptr ( | |
483 as_a <gcatch *> (gsi_stmt (i))), | |
484 data); | |
485 if (!data->cannot_fallthru) | |
486 cannot_fallthru = false; | |
0 | 487 } |
111 | 488 break; |
0 | 489 |
111 | 490 case GIMPLE_EH_FILTER: |
0 | 491 /* The exception filter expression only matters if there is an |
492 exception. If the exception does not match EH_FILTER_TYPES, | |
493 we will execute EH_FILTER_FAILURE, and we will fall through | |
494 if that falls through. If the exception does match | |
495 EH_FILTER_TYPES, the stack unwinder will continue up the | |
496 stack, so we will not fall through. We don't know whether we | |
497 will throw an exception which matches EH_FILTER_TYPES or not, | |
498 so we just ignore EH_FILTER_TYPES and assume that we might | |
499 throw an exception which doesn't match. */ | |
111 | 500 data->cannot_fallthru = false; |
501 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data); | |
502 if (!data->cannot_fallthru) | |
503 cannot_fallthru = false; | |
504 break; | |
0 | 505 |
506 default: | |
507 /* This case represents statements to be executed when an | |
508 exception occurs. Those statements are implicitly followed | |
111 | 509 by a GIMPLE_RESX to resume execution after the exception. So |
510 in this case the try/catch never falls through. */ | |
511 data->cannot_fallthru = false; | |
512 lower_sequence (gimple_try_cleanup_ptr (stmt), data); | |
513 break; | |
0 | 514 } |
111 | 515 |
516 data->cannot_fallthru = cannot_fallthru; | |
517 gsi_next (gsi); | |
0 | 518 } |
519 | |
520 | |
111 | 521 /* Try to determine whether a TRY_CATCH expression can fall through. |
522 This is a subroutine of gimple_stmt_may_fallthru. */ | |
0 | 523 |
524 static bool | |
111 | 525 gimple_try_catch_may_fallthru (gtry *stmt) |
0 | 526 { |
527 gimple_stmt_iterator i; | |
528 | |
529 /* We don't handle GIMPLE_TRY_FINALLY. */ | |
530 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH); | |
531 | |
532 /* If the TRY block can fall through, the whole TRY_CATCH can | |
533 fall through. */ | |
534 if (gimple_seq_may_fallthru (gimple_try_eval (stmt))) | |
535 return true; | |
536 | |
111 | 537 i = gsi_start (*gimple_try_cleanup_ptr (stmt)); |
0 | 538 switch (gimple_code (gsi_stmt (i))) |
539 { | |
540 case GIMPLE_CATCH: | |
541 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a | |
542 catch expression and a body. The whole try/catch may fall | |
543 through iff any of the catch bodies falls through. */ | |
544 for (; !gsi_end_p (i); gsi_next (&i)) | |
545 { | |
111 | 546 if (gimple_seq_may_fallthru (gimple_catch_handler ( |
547 as_a <gcatch *> (gsi_stmt (i))))) | |
0 | 548 return true; |
549 } | |
550 return false; | |
551 | |
552 case GIMPLE_EH_FILTER: | |
553 /* The exception filter expression only matters if there is an | |
554 exception. If the exception does not match EH_FILTER_TYPES, | |
555 we will execute EH_FILTER_FAILURE, and we will fall through | |
556 if that falls through. If the exception does match | |
557 EH_FILTER_TYPES, the stack unwinder will continue up the | |
558 stack, so we will not fall through. We don't know whether we | |
559 will throw an exception which matches EH_FILTER_TYPES or not, | |
560 so we just ignore EH_FILTER_TYPES and assume that we might | |
561 throw an exception which doesn't match. */ | |
562 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i))); | |
563 | |
564 default: | |
565 /* This case represents statements to be executed when an | |
566 exception occurs. Those statements are implicitly followed | |
567 by a GIMPLE_RESX to resume execution after the exception. So | |
568 in this case the try/catch never falls through. */ | |
569 return false; | |
570 } | |
571 } | |
572 | |
573 | |
574 /* Try to determine if we can continue executing the statement | |
575 immediately following STMT. This guess need not be 100% accurate; | |
576 simply be conservative and return true if we don't know. This is | |
577 used only to avoid stupidly generating extra code. If we're wrong, | |
578 we'll just delete the extra code later. */ | |
579 | |
580 bool | |
111 | 581 gimple_stmt_may_fallthru (gimple *stmt) |
0 | 582 { |
583 if (!stmt) | |
584 return true; | |
585 | |
586 switch (gimple_code (stmt)) | |
587 { | |
588 case GIMPLE_GOTO: | |
589 case GIMPLE_RETURN: | |
590 case GIMPLE_RESX: | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
591 /* Easy cases. If the last statement of the seq implies |
0 | 592 control transfer, then we can't fall through. */ |
593 return false; | |
594 | |
595 case GIMPLE_SWITCH: | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
596 /* Switch has already been lowered and represents a branch |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
597 to a selected label and hence can't fall through. */ |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
598 return false; |
0 | 599 |
600 case GIMPLE_COND: | |
601 /* GIMPLE_COND's are already lowered into a two-way branch. They | |
602 can't fall through. */ | |
603 return false; | |
604 | |
605 case GIMPLE_BIND: | |
111 | 606 return gimple_seq_may_fallthru ( |
607 gimple_bind_body (as_a <gbind *> (stmt))); | |
0 | 608 |
609 case GIMPLE_TRY: | |
610 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) | |
111 | 611 return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt)); |
0 | 612 |
613 /* It must be a GIMPLE_TRY_FINALLY. */ | |
614 | |
615 /* The finally clause is always executed after the try clause, | |
616 so if it does not fall through, then the try-finally will not | |
617 fall through. Otherwise, if the try clause does not fall | |
618 through, then when the finally clause falls through it will | |
619 resume execution wherever the try clause was going. So the | |
620 whole try-finally will only fall through if both the try | |
621 clause and the finally clause fall through. */ | |
622 return (gimple_seq_may_fallthru (gimple_try_eval (stmt)) | |
623 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt))); | |
624 | |
111 | 625 case GIMPLE_EH_ELSE: |
626 { | |
627 geh_else *eh_else_stmt = as_a <geh_else *> (stmt); | |
628 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt)) | |
629 || gimple_seq_may_fallthru (gimple_eh_else_e_body ( | |
630 eh_else_stmt))); | |
631 } | |
632 | |
0 | 633 case GIMPLE_CALL: |
634 /* Functions that do not return do not fall through. */ | |
111 | 635 return !gimple_call_noreturn_p (stmt); |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
636 |
0 | 637 default: |
638 return true; | |
639 } | |
640 } | |
641 | |
642 | |
643 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */ | |
644 | |
645 bool | |
646 gimple_seq_may_fallthru (gimple_seq seq) | |
647 { | |
648 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq)); | |
649 } | |
650 | |
651 | |
652 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */ | |
653 | |
654 static void | |
655 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data) | |
656 { | |
111 | 657 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi)); |
658 gimple *t; | |
0 | 659 int i; |
660 return_statements_t tmp_rs; | |
661 | |
662 /* Match this up with an existing return statement that's been created. */ | |
111 | 663 for (i = data->return_statements.length () - 1; |
0 | 664 i >= 0; i--) |
665 { | |
111 | 666 tmp_rs = data->return_statements[i]; |
0 | 667 |
668 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt)) | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
669 { |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
670 /* Remove the line number from the representative return statement. |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
671 It now fills in for many such returns. Failure to remove this |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
672 will result in incorrect results for coverage analysis. */ |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
673 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
674 |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
675 goto found; |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
676 } |
0 | 677 } |
678 | |
679 /* Not found. Create a new label and record the return statement. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
680 tmp_rs.label = create_artificial_label (cfun->function_end_locus); |
0 | 681 tmp_rs.stmt = stmt; |
111 | 682 data->return_statements.safe_push (tmp_rs); |
0 | 683 |
684 /* Generate a goto statement and remove the return statement. */ | |
685 found: | |
111 | 686 /* When not optimizing, make sure user returns are preserved. */ |
687 if (!optimize && gimple_has_location (stmt)) | |
688 DECL_ARTIFICIAL (tmp_rs.label) = 0; | |
0 | 689 t = gimple_build_goto (tmp_rs.label); |
690 gimple_set_location (t, gimple_location (stmt)); | |
691 gimple_set_block (t, gimple_block (stmt)); | |
692 gsi_insert_before (gsi, t, GSI_SAME_STMT); | |
693 gsi_remove (gsi, false); | |
694 } | |
695 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
696 /* Lower a __builtin_setjmp GSI. |
0 | 697 |
698 __builtin_setjmp is passed a pointer to an array of five words (not | |
699 all will be used on all machines). It operates similarly to the C | |
700 library function of the same name, but is more efficient. | |
701 | |
111 | 702 It is lowered into 2 other builtins, namely __builtin_setjmp_setup, |
703 __builtin_setjmp_receiver. | |
0 | 704 |
705 After full lowering, the body of the function should look like: | |
706 | |
707 { | |
708 int D.1844; | |
709 int D.2844; | |
710 | |
711 [...] | |
712 | |
713 __builtin_setjmp_setup (&buf, &<D1847>); | |
714 D.1844 = 0; | |
715 goto <D1846>; | |
716 <D1847>:; | |
717 __builtin_setjmp_receiver (&<D1847>); | |
718 D.1844 = 1; | |
719 <D1846>:; | |
720 if (D.1844 == 0) goto <D1848>; else goto <D1849>; | |
721 | |
722 [...] | |
723 | |
724 __builtin_setjmp_setup (&buf, &<D2847>); | |
725 D.2844 = 0; | |
726 goto <D2846>; | |
727 <D2847>:; | |
728 __builtin_setjmp_receiver (&<D2847>); | |
729 D.2844 = 1; | |
730 <D2846>:; | |
731 if (D.2844 == 0) goto <D2848>; else goto <D2849>; | |
732 | |
733 [...] | |
734 | |
735 <D3850>:; | |
736 return; | |
737 } | |
738 | |
111 | 739 During cfg creation an extra per-function (or per-OpenMP region) |
740 block with ABNORMAL_DISPATCHER internal call will be added, unique | |
741 destination of all the abnormal call edges and the unique source of | |
742 all the abnormal edges to the receivers, thus keeping the complexity | |
743 explosion localized. */ | |
0 | 744 |
745 static void | |
746 lower_builtin_setjmp (gimple_stmt_iterator *gsi) | |
747 { | |
111 | 748 gimple *stmt = gsi_stmt (*gsi); |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
749 location_t loc = gimple_location (stmt); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
750 tree cont_label = create_artificial_label (loc); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
751 tree next_label = create_artificial_label (loc); |
0 | 752 tree dest, t, arg; |
111 | 753 gimple *g; |
754 | |
755 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL | |
756 these builtins are modelled as non-local label jumps to the label | |
757 that is passed to these two builtins, so pretend we have a non-local | |
758 label during GIMPLE passes too. See PR60003. */ | |
759 cfun->has_nonlocal_label = 1; | |
0 | 760 |
761 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is | |
762 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */ | |
763 FORCED_LABEL (next_label) = 1; | |
764 | |
111 | 765 tree orig_dest = dest = gimple_call_lhs (stmt); |
766 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME) | |
767 dest = create_tmp_reg (TREE_TYPE (orig_dest)); | |
0 | 768 |
769 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */ | |
111 | 770 arg = build_addr (next_label); |
771 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP); | |
0 | 772 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg); |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
773 gimple_set_location (g, loc); |
0 | 774 gimple_set_block (g, gimple_block (stmt)); |
775 gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
776 | |
777 /* Build 'DEST = 0' and insert. */ | |
778 if (dest) | |
779 { | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
780 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest))); |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
781 gimple_set_location (g, loc); |
0 | 782 gimple_set_block (g, gimple_block (stmt)); |
783 gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
784 } | |
785 | |
786 /* Build 'goto CONT_LABEL' and insert. */ | |
787 g = gimple_build_goto (cont_label); | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
788 gsi_insert_before (gsi, g, GSI_SAME_STMT); |
0 | 789 |
790 /* Build 'NEXT_LABEL:' and insert. */ | |
791 g = gimple_build_label (next_label); | |
792 gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
793 | |
794 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */ | |
111 | 795 arg = build_addr (next_label); |
796 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER); | |
0 | 797 g = gimple_build_call (t, 1, arg); |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
798 gimple_set_location (g, loc); |
0 | 799 gimple_set_block (g, gimple_block (stmt)); |
800 gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
801 | |
802 /* Build 'DEST = 1' and insert. */ | |
803 if (dest) | |
804 { | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
805 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest), |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
806 integer_one_node)); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
807 gimple_set_location (g, loc); |
0 | 808 gimple_set_block (g, gimple_block (stmt)); |
809 gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
810 } | |
811 | |
812 /* Build 'CONT_LABEL:' and insert. */ | |
813 g = gimple_build_label (cont_label); | |
814 gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
815 | |
111 | 816 /* Build orig_dest = dest if necessary. */ |
817 if (dest != orig_dest) | |
818 { | |
819 g = gimple_build_assign (orig_dest, dest); | |
820 gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
821 } | |
822 | |
0 | 823 /* Remove the call to __builtin_setjmp. */ |
824 gsi_remove (gsi, false); | |
825 } | |
111 | 826 |
827 /* Lower calls to posix_memalign to | |
828 res = posix_memalign (ptr, align, size); | |
829 if (res == 0) | |
830 *ptr = __builtin_assume_aligned (*ptr, align); | |
831 or to | |
832 void *tem; | |
833 res = posix_memalign (&tem, align, size); | |
834 if (res == 0) | |
835 ptr = __builtin_assume_aligned (tem, align); | |
836 in case the first argument was &ptr. That way we can get at the | |
837 alignment of the heap pointer in CCP. */ | |
838 | |
839 static void | |
840 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi) | |
841 { | |
842 gimple *stmt, *call = gsi_stmt (*gsi); | |
843 tree pptr = gimple_call_arg (call, 0); | |
844 tree align = gimple_call_arg (call, 1); | |
845 tree res = gimple_call_lhs (call); | |
846 tree ptr = create_tmp_reg (ptr_type_node); | |
847 if (TREE_CODE (pptr) == ADDR_EXPR) | |
848 { | |
849 tree tem = create_tmp_var (ptr_type_node); | |
850 TREE_ADDRESSABLE (tem) = 1; | |
851 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem)); | |
852 stmt = gimple_build_assign (ptr, tem); | |
853 } | |
854 else | |
855 stmt = gimple_build_assign (ptr, | |
856 fold_build2 (MEM_REF, ptr_type_node, pptr, | |
857 build_int_cst (ptr_type_node, 0))); | |
858 if (res == NULL_TREE) | |
859 { | |
860 res = create_tmp_reg (integer_type_node); | |
861 gimple_call_set_lhs (call, res); | |
862 } | |
863 tree align_label = create_artificial_label (UNKNOWN_LOCATION); | |
864 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION); | |
865 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node, | |
866 align_label, noalign_label); | |
867 gsi_insert_after (gsi, cond, GSI_NEW_STMT); | |
868 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT); | |
869 gsi_insert_after (gsi, stmt, GSI_NEW_STMT); | |
870 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED), | |
871 2, ptr, align); | |
872 gimple_call_set_lhs (stmt, ptr); | |
873 gsi_insert_after (gsi, stmt, GSI_NEW_STMT); | |
874 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr, | |
875 build_int_cst (ptr_type_node, 0)), | |
876 ptr); | |
877 gsi_insert_after (gsi, stmt, GSI_NEW_STMT); | |
878 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT); | |
879 } | |
0 | 880 |
881 | |
882 /* Record the variables in VARS into function FN. */ | |
883 | |
884 void | |
885 record_vars_into (tree vars, tree fn) | |
886 { | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
887 for (; vars; vars = DECL_CHAIN (vars)) |
0 | 888 { |
889 tree var = vars; | |
890 | |
891 /* BIND_EXPRs contains also function/type/constant declarations | |
892 we don't need to care about. */ | |
111 | 893 if (!VAR_P (var)) |
0 | 894 continue; |
895 | |
896 /* Nothing to do in this case. */ | |
897 if (DECL_EXTERNAL (var)) | |
898 continue; | |
899 | |
900 /* Record the variable. */ | |
111 | 901 add_local_decl (DECL_STRUCT_FUNCTION (fn), var); |
0 | 902 } |
903 } | |
904 | |
905 | |
906 /* Record the variables in VARS into current_function_decl. */ | |
907 | |
908 void | |
909 record_vars (tree vars) | |
910 { | |
911 record_vars_into (vars, current_function_decl); | |
912 } |