Mercurial > hg > CbC > CbC_gcc
annotate gcc/tree-complex.c @ 131:84e7813d76e9
gcc-8.2
author | mir3636 |
---|---|
date | Thu, 25 Oct 2018 07:37:49 +0900 |
parents | 04ced10e8804 |
children | 1830386684a0 |
rev | line source |
---|---|
0 | 1 /* Lower complex number operations to scalar operations. |
131 | 2 Copyright (C) 2004-2018 Free Software Foundation, Inc. |
0 | 3 |
4 This file is part of GCC. | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
5 |
0 | 6 GCC is free software; you can redistribute it and/or modify it |
7 under the terms of the GNU General Public License as published by the | |
8 Free Software Foundation; either version 3, or (at your option) any | |
9 later version. | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
10 |
0 | 11 GCC is distributed in the hope that it will be useful, but WITHOUT |
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 for more details. | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
15 |
0 | 16 You should have received a copy of the GNU General Public License |
17 along with GCC; see the file COPYING3. If not see | |
18 <http://www.gnu.org/licenses/>. */ | |
19 | |
20 #include "config.h" | |
21 #include "system.h" | |
22 #include "coretypes.h" | |
111 | 23 #include "backend.h" |
24 #include "rtl.h" | |
0 | 25 #include "tree.h" |
26 #include "gimple.h" | |
111 | 27 #include "cfghooks.h" |
0 | 28 #include "tree-pass.h" |
111 | 29 #include "ssa.h" |
30 #include "fold-const.h" | |
31 #include "stor-layout.h" | |
32 #include "tree-eh.h" | |
33 #include "gimplify.h" | |
34 #include "gimple-iterator.h" | |
35 #include "gimplify-me.h" | |
36 #include "tree-cfg.h" | |
37 #include "tree-dfa.h" | |
38 #include "tree-ssa.h" | |
0 | 39 #include "tree-ssa-propagate.h" |
111 | 40 #include "tree-hasher.h" |
41 #include "cfgloop.h" | |
42 #include "cfganal.h" | |
0 | 43 |
44 | |
45 /* For each complex ssa name, a lattice value. We're interested in finding | |
46 out whether a complex number is degenerate in some way, having only real | |
47 or only complex parts. */ | |
48 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
49 enum |
0 | 50 { |
51 UNINITIALIZED = 0, | |
52 ONLY_REAL = 1, | |
53 ONLY_IMAG = 2, | |
54 VARYING = 3 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
55 }; |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
56 |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
57 /* The type complex_lattice_t holds combinations of the above |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
58 constants. */ |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
59 typedef int complex_lattice_t; |
0 | 60 |
61 #define PAIR(a, b) ((a) << 2 | (b)) | |
62 | |
131 | 63 class complex_propagate : public ssa_propagation_engine |
64 { | |
65 enum ssa_prop_result visit_stmt (gimple *, edge *, tree *) FINAL OVERRIDE; | |
66 enum ssa_prop_result visit_phi (gphi *) FINAL OVERRIDE; | |
67 }; | |
0 | 68 |
111 | 69 static vec<complex_lattice_t> complex_lattice_values; |
0 | 70 |
71 /* For each complex variable, a pair of variables for the components exists in | |
72 the hashtable. */ | |
111 | 73 static int_tree_htab_type *complex_variable_components; |
0 | 74 |
75 /* For each complex SSA_NAME, a pair of ssa names for the components. */ | |
111 | 76 static vec<tree> complex_ssa_name_components; |
77 | |
78 /* Vector of PHI triplets (original complex PHI and corresponding real and | |
79 imag PHIs if real and/or imag PHIs contain temporarily | |
80 non-SSA_NAME/non-invariant args that need to be replaced by SSA_NAMEs. */ | |
81 static vec<gphi *> phis_to_revisit; | |
0 | 82 |
83 /* Lookup UID in the complex_variable_components hashtable and return the | |
84 associated tree. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
85 static tree |
0 | 86 cvc_lookup (unsigned int uid) |
87 { | |
111 | 88 struct int_tree_map in; |
0 | 89 in.uid = uid; |
111 | 90 return complex_variable_components->find_with_hash (in, uid).to; |
0 | 91 } |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
92 |
0 | 93 /* Insert the pair UID, TO into the complex_variable_components hashtable. */ |
94 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
95 static void |
0 | 96 cvc_insert (unsigned int uid, tree to) |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
97 { |
111 | 98 int_tree_map h; |
99 int_tree_map *loc; | |
0 | 100 |
111 | 101 h.uid = uid; |
102 loc = complex_variable_components->find_slot_with_hash (h, uid, INSERT); | |
103 loc->uid = uid; | |
104 loc->to = to; | |
0 | 105 } |
106 | |
107 /* Return true if T is not a zero constant. In the case of real values, | |
108 we're only interested in +0.0. */ | |
109 | |
110 static int | |
111 some_nonzerop (tree t) | |
112 { | |
113 int zerop = false; | |
114 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
115 /* Operations with real or imaginary part of a complex number zero |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
116 cannot be treated the same as operations with a real or imaginary |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
117 operand if we care about the signs of zeros in the result. */ |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
118 if (TREE_CODE (t) == REAL_CST && !flag_signed_zeros) |
111 | 119 zerop = real_identical (&TREE_REAL_CST (t), &dconst0); |
0 | 120 else if (TREE_CODE (t) == FIXED_CST) |
121 zerop = fixed_zerop (t); | |
122 else if (TREE_CODE (t) == INTEGER_CST) | |
123 zerop = integer_zerop (t); | |
124 | |
125 return !zerop; | |
126 } | |
127 | |
128 | |
129 /* Compute a lattice value from the components of a complex type REAL | |
130 and IMAG. */ | |
131 | |
132 static complex_lattice_t | |
133 find_lattice_value_parts (tree real, tree imag) | |
134 { | |
135 int r, i; | |
136 complex_lattice_t ret; | |
137 | |
138 r = some_nonzerop (real); | |
139 i = some_nonzerop (imag); | |
140 ret = r * ONLY_REAL + i * ONLY_IMAG; | |
141 | |
142 /* ??? On occasion we could do better than mapping 0+0i to real, but we | |
143 certainly don't want to leave it UNINITIALIZED, which eventually gets | |
144 mapped to VARYING. */ | |
145 if (ret == UNINITIALIZED) | |
146 ret = ONLY_REAL; | |
147 | |
148 return ret; | |
149 } | |
150 | |
151 | |
152 /* Compute a lattice value from gimple_val T. */ | |
153 | |
154 static complex_lattice_t | |
155 find_lattice_value (tree t) | |
156 { | |
157 tree real, imag; | |
158 | |
159 switch (TREE_CODE (t)) | |
160 { | |
161 case SSA_NAME: | |
111 | 162 return complex_lattice_values[SSA_NAME_VERSION (t)]; |
0 | 163 |
164 case COMPLEX_CST: | |
165 real = TREE_REALPART (t); | |
166 imag = TREE_IMAGPART (t); | |
167 break; | |
168 | |
169 default: | |
170 gcc_unreachable (); | |
171 } | |
172 | |
173 return find_lattice_value_parts (real, imag); | |
174 } | |
175 | |
176 /* Determine if LHS is something for which we're interested in seeing | |
177 simulation results. */ | |
178 | |
179 static bool | |
180 is_complex_reg (tree lhs) | |
181 { | |
182 return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs); | |
183 } | |
184 | |
185 /* Mark the incoming parameters to the function as VARYING. */ | |
186 | |
187 static void | |
188 init_parameter_lattice_values (void) | |
189 { | |
190 tree parm, ssa_name; | |
191 | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
192 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) |
0 | 193 if (is_complex_reg (parm) |
111 | 194 && (ssa_name = ssa_default_def (cfun, parm)) != NULL_TREE) |
195 complex_lattice_values[SSA_NAME_VERSION (ssa_name)] = VARYING; | |
0 | 196 } |
197 | |
198 /* Initialize simulation state for each statement. Return false if we | |
199 found no statements we want to simulate, and thus there's nothing | |
200 for the entire pass to do. */ | |
201 | |
202 static bool | |
203 init_dont_simulate_again (void) | |
204 { | |
205 basic_block bb; | |
206 bool saw_a_complex_op = false; | |
207 | |
111 | 208 FOR_EACH_BB_FN (bb, cfun) |
0 | 209 { |
111 | 210 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); |
211 gsi_next (&gsi)) | |
0 | 212 { |
111 | 213 gphi *phi = gsi.phi (); |
0 | 214 prop_set_simulate_again (phi, |
215 is_complex_reg (gimple_phi_result (phi))); | |
216 } | |
217 | |
111 | 218 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); |
219 gsi_next (&gsi)) | |
0 | 220 { |
111 | 221 gimple *stmt; |
0 | 222 tree op0, op1; |
223 bool sim_again_p; | |
224 | |
225 stmt = gsi_stmt (gsi); | |
226 op0 = op1 = NULL_TREE; | |
227 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
228 /* Most control-altering statements must be initially |
0 | 229 simulated, else we won't cover the entire cfg. */ |
230 sim_again_p = stmt_ends_bb_p (stmt); | |
231 | |
232 switch (gimple_code (stmt)) | |
233 { | |
234 case GIMPLE_CALL: | |
235 if (gimple_call_lhs (stmt)) | |
236 sim_again_p = is_complex_reg (gimple_call_lhs (stmt)); | |
237 break; | |
238 | |
239 case GIMPLE_ASSIGN: | |
240 sim_again_p = is_complex_reg (gimple_assign_lhs (stmt)); | |
241 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR | |
242 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR) | |
243 op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); | |
244 else | |
245 op0 = gimple_assign_rhs1 (stmt); | |
246 if (gimple_num_ops (stmt) > 2) | |
247 op1 = gimple_assign_rhs2 (stmt); | |
248 break; | |
249 | |
250 case GIMPLE_COND: | |
251 op0 = gimple_cond_lhs (stmt); | |
252 op1 = gimple_cond_rhs (stmt); | |
253 break; | |
254 | |
255 default: | |
256 break; | |
257 } | |
258 | |
259 if (op0 || op1) | |
260 switch (gimple_expr_code (stmt)) | |
261 { | |
262 case EQ_EXPR: | |
263 case NE_EXPR: | |
264 case PLUS_EXPR: | |
265 case MINUS_EXPR: | |
266 case MULT_EXPR: | |
267 case TRUNC_DIV_EXPR: | |
268 case CEIL_DIV_EXPR: | |
269 case FLOOR_DIV_EXPR: | |
270 case ROUND_DIV_EXPR: | |
271 case RDIV_EXPR: | |
272 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE | |
273 || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE) | |
274 saw_a_complex_op = true; | |
275 break; | |
276 | |
277 case NEGATE_EXPR: | |
278 case CONJ_EXPR: | |
279 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE) | |
280 saw_a_complex_op = true; | |
281 break; | |
282 | |
283 case REALPART_EXPR: | |
284 case IMAGPART_EXPR: | |
285 /* The total store transformation performed during | |
286 gimplification creates such uninitialized loads | |
287 and we need to lower the statement to be able | |
288 to fix things up. */ | |
289 if (TREE_CODE (op0) == SSA_NAME | |
290 && ssa_undefined_value_p (op0)) | |
291 saw_a_complex_op = true; | |
292 break; | |
293 | |
294 default: | |
295 break; | |
296 } | |
297 | |
298 prop_set_simulate_again (stmt, sim_again_p); | |
299 } | |
300 } | |
301 | |
302 return saw_a_complex_op; | |
303 } | |
304 | |
305 | |
306 /* Evaluate statement STMT against the complex lattice defined above. */ | |
307 | |
131 | 308 enum ssa_prop_result |
309 complex_propagate::visit_stmt (gimple *stmt, edge *taken_edge_p ATTRIBUTE_UNUSED, | |
310 tree *result_p) | |
0 | 311 { |
312 complex_lattice_t new_l, old_l, op1_l, op2_l; | |
313 unsigned int ver; | |
314 tree lhs; | |
315 | |
316 lhs = gimple_get_lhs (stmt); | |
317 /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */ | |
318 if (!lhs) | |
319 return SSA_PROP_VARYING; | |
320 | |
321 /* These conditions should be satisfied due to the initial filter | |
322 set up in init_dont_simulate_again. */ | |
323 gcc_assert (TREE_CODE (lhs) == SSA_NAME); | |
324 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); | |
325 | |
326 *result_p = lhs; | |
327 ver = SSA_NAME_VERSION (lhs); | |
111 | 328 old_l = complex_lattice_values[ver]; |
0 | 329 |
330 switch (gimple_expr_code (stmt)) | |
331 { | |
332 case SSA_NAME: | |
333 case COMPLEX_CST: | |
334 new_l = find_lattice_value (gimple_assign_rhs1 (stmt)); | |
335 break; | |
336 | |
337 case COMPLEX_EXPR: | |
338 new_l = find_lattice_value_parts (gimple_assign_rhs1 (stmt), | |
339 gimple_assign_rhs2 (stmt)); | |
340 break; | |
341 | |
342 case PLUS_EXPR: | |
343 case MINUS_EXPR: | |
344 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt)); | |
345 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt)); | |
346 | |
347 /* We've set up the lattice values such that IOR neatly | |
348 models addition. */ | |
349 new_l = op1_l | op2_l; | |
350 break; | |
351 | |
352 case MULT_EXPR: | |
353 case RDIV_EXPR: | |
354 case TRUNC_DIV_EXPR: | |
355 case CEIL_DIV_EXPR: | |
356 case FLOOR_DIV_EXPR: | |
357 case ROUND_DIV_EXPR: | |
358 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt)); | |
359 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt)); | |
360 | |
361 /* Obviously, if either varies, so does the result. */ | |
362 if (op1_l == VARYING || op2_l == VARYING) | |
363 new_l = VARYING; | |
364 /* Don't prematurely promote variables if we've not yet seen | |
365 their inputs. */ | |
366 else if (op1_l == UNINITIALIZED) | |
367 new_l = op2_l; | |
368 else if (op2_l == UNINITIALIZED) | |
369 new_l = op1_l; | |
370 else | |
371 { | |
372 /* At this point both numbers have only one component. If the | |
373 numbers are of opposite kind, the result is imaginary, | |
374 otherwise the result is real. The add/subtract translates | |
375 the real/imag from/to 0/1; the ^ performs the comparison. */ | |
376 new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL; | |
377 | |
378 /* Don't allow the lattice value to flip-flop indefinitely. */ | |
379 new_l |= old_l; | |
380 } | |
381 break; | |
382 | |
383 case NEGATE_EXPR: | |
384 case CONJ_EXPR: | |
385 new_l = find_lattice_value (gimple_assign_rhs1 (stmt)); | |
386 break; | |
387 | |
388 default: | |
389 new_l = VARYING; | |
390 break; | |
391 } | |
392 | |
393 /* If nothing changed this round, let the propagator know. */ | |
394 if (new_l == old_l) | |
395 return SSA_PROP_NOT_INTERESTING; | |
396 | |
111 | 397 complex_lattice_values[ver] = new_l; |
0 | 398 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; |
399 } | |
400 | |
401 /* Evaluate a PHI node against the complex lattice defined above. */ | |
402 | |
131 | 403 enum ssa_prop_result |
404 complex_propagate::visit_phi (gphi *phi) | |
0 | 405 { |
406 complex_lattice_t new_l, old_l; | |
407 unsigned int ver; | |
408 tree lhs; | |
409 int i; | |
410 | |
411 lhs = gimple_phi_result (phi); | |
412 | |
413 /* This condition should be satisfied due to the initial filter | |
414 set up in init_dont_simulate_again. */ | |
415 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); | |
416 | |
417 /* We've set up the lattice values such that IOR neatly models PHI meet. */ | |
418 new_l = UNINITIALIZED; | |
419 for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i) | |
420 new_l |= find_lattice_value (gimple_phi_arg_def (phi, i)); | |
421 | |
422 ver = SSA_NAME_VERSION (lhs); | |
111 | 423 old_l = complex_lattice_values[ver]; |
0 | 424 |
425 if (new_l == old_l) | |
426 return SSA_PROP_NOT_INTERESTING; | |
427 | |
111 | 428 complex_lattice_values[ver] = new_l; |
0 | 429 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; |
430 } | |
431 | |
432 /* Create one backing variable for a complex component of ORIG. */ | |
433 | |
434 static tree | |
435 create_one_component_var (tree type, tree orig, const char *prefix, | |
436 const char *suffix, enum tree_code code) | |
437 { | |
438 tree r = create_tmp_var (type, prefix); | |
439 | |
440 DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (orig); | |
441 DECL_ARTIFICIAL (r) = 1; | |
442 | |
443 if (DECL_NAME (orig) && !DECL_IGNORED_P (orig)) | |
444 { | |
445 const char *name = IDENTIFIER_POINTER (DECL_NAME (orig)); | |
111 | 446 name = ACONCAT ((name, suffix, NULL)); |
447 DECL_NAME (r) = get_identifier (name); | |
0 | 448 |
449 SET_DECL_DEBUG_EXPR (r, build1 (code, type, orig)); | |
111 | 450 DECL_HAS_DEBUG_EXPR_P (r) = 1; |
0 | 451 DECL_IGNORED_P (r) = 0; |
452 TREE_NO_WARNING (r) = TREE_NO_WARNING (orig); | |
453 } | |
454 else | |
455 { | |
456 DECL_IGNORED_P (r) = 1; | |
457 TREE_NO_WARNING (r) = 1; | |
458 } | |
459 | |
460 return r; | |
461 } | |
462 | |
463 /* Retrieve a value for a complex component of VAR. */ | |
464 | |
465 static tree | |
466 get_component_var (tree var, bool imag_p) | |
467 { | |
468 size_t decl_index = DECL_UID (var) * 2 + imag_p; | |
469 tree ret = cvc_lookup (decl_index); | |
470 | |
471 if (ret == NULL) | |
472 { | |
473 ret = create_one_component_var (TREE_TYPE (TREE_TYPE (var)), var, | |
474 imag_p ? "CI" : "CR", | |
475 imag_p ? "$imag" : "$real", | |
476 imag_p ? IMAGPART_EXPR : REALPART_EXPR); | |
477 cvc_insert (decl_index, ret); | |
478 } | |
479 | |
480 return ret; | |
481 } | |
482 | |
483 /* Retrieve a value for a complex component of SSA_NAME. */ | |
484 | |
485 static tree | |
486 get_component_ssa_name (tree ssa_name, bool imag_p) | |
487 { | |
488 complex_lattice_t lattice = find_lattice_value (ssa_name); | |
489 size_t ssa_name_index; | |
490 tree ret; | |
491 | |
492 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) | |
493 { | |
494 tree inner_type = TREE_TYPE (TREE_TYPE (ssa_name)); | |
495 if (SCALAR_FLOAT_TYPE_P (inner_type)) | |
496 return build_real (inner_type, dconst0); | |
497 else | |
498 return build_int_cst (inner_type, 0); | |
499 } | |
500 | |
501 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; | |
111 | 502 ret = complex_ssa_name_components[ssa_name_index]; |
0 | 503 if (ret == NULL) |
504 { | |
111 | 505 if (SSA_NAME_VAR (ssa_name)) |
506 ret = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); | |
507 else | |
508 ret = TREE_TYPE (TREE_TYPE (ssa_name)); | |
509 ret = make_ssa_name (ret); | |
0 | 510 |
511 /* Copy some properties from the original. In particular, whether it | |
512 is used in an abnormal phi, and whether it's uninitialized. */ | |
513 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret) | |
514 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name); | |
111 | 515 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name) |
516 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL) | |
0 | 517 { |
518 SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name); | |
111 | 519 set_ssa_default_def (cfun, SSA_NAME_VAR (ret), ret); |
0 | 520 } |
521 | |
111 | 522 complex_ssa_name_components[ssa_name_index] = ret; |
0 | 523 } |
524 | |
525 return ret; | |
526 } | |
527 | |
528 /* Set a value for a complex component of SSA_NAME, return a | |
529 gimple_seq of stuff that needs doing. */ | |
530 | |
531 static gimple_seq | |
532 set_component_ssa_name (tree ssa_name, bool imag_p, tree value) | |
533 { | |
534 complex_lattice_t lattice = find_lattice_value (ssa_name); | |
535 size_t ssa_name_index; | |
536 tree comp; | |
111 | 537 gimple *last; |
0 | 538 gimple_seq list; |
539 | |
540 /* We know the value must be zero, else there's a bug in our lattice | |
541 analysis. But the value may well be a variable known to contain | |
542 zero. We should be safe ignoring it. */ | |
543 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) | |
544 return NULL; | |
545 | |
546 /* If we've already assigned an SSA_NAME to this component, then this | |
547 means that our walk of the basic blocks found a use before the set. | |
548 This is fine. Now we should create an initialization for the value | |
549 we created earlier. */ | |
550 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; | |
111 | 551 comp = complex_ssa_name_components[ssa_name_index]; |
0 | 552 if (comp) |
553 ; | |
554 | |
555 /* If we've nothing assigned, and the value we're given is already stable, | |
556 then install that as the value for this SSA_NAME. This preemptively | |
557 copy-propagates the value, which avoids unnecessary memory allocation. */ | |
558 else if (is_gimple_min_invariant (value) | |
559 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) | |
560 { | |
111 | 561 complex_ssa_name_components[ssa_name_index] = value; |
0 | 562 return NULL; |
563 } | |
564 else if (TREE_CODE (value) == SSA_NAME | |
565 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) | |
566 { | |
567 /* Replace an anonymous base value with the variable from cvc_lookup. | |
568 This should result in better debug info. */ | |
111 | 569 if (SSA_NAME_VAR (ssa_name) |
570 && (!SSA_NAME_VAR (value) || DECL_IGNORED_P (SSA_NAME_VAR (value))) | |
0 | 571 && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name))) |
572 { | |
573 comp = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); | |
574 replace_ssa_name_symbol (value, comp); | |
575 } | |
576 | |
111 | 577 complex_ssa_name_components[ssa_name_index] = value; |
0 | 578 return NULL; |
579 } | |
580 | |
581 /* Finally, we need to stabilize the result by installing the value into | |
582 a new ssa name. */ | |
583 else | |
584 comp = get_component_ssa_name (ssa_name, imag_p); | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
585 |
0 | 586 /* Do all the work to assign VALUE to COMP. */ |
587 list = NULL; | |
588 value = force_gimple_operand (value, &list, false, NULL); | |
589 last = gimple_build_assign (comp, value); | |
590 gimple_seq_add_stmt (&list, last); | |
591 gcc_assert (SSA_NAME_DEF_STMT (comp) == last); | |
592 | |
593 return list; | |
594 } | |
595 | |
596 /* Extract the real or imaginary part of a complex variable or constant. | |
597 Make sure that it's a proper gimple_val and gimplify it if not. | |
598 Emit any new code before gsi. */ | |
599 | |
600 static tree | |
601 extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p, | |
111 | 602 bool gimple_p, bool phiarg_p = false) |
0 | 603 { |
604 switch (TREE_CODE (t)) | |
605 { | |
606 case COMPLEX_CST: | |
607 return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t); | |
608 | |
609 case COMPLEX_EXPR: | |
610 gcc_unreachable (); | |
611 | |
111 | 612 case BIT_FIELD_REF: |
613 { | |
614 tree inner_type = TREE_TYPE (TREE_TYPE (t)); | |
615 t = unshare_expr (t); | |
616 TREE_TYPE (t) = inner_type; | |
617 TREE_OPERAND (t, 1) = TYPE_SIZE (inner_type); | |
618 if (imagpart_p) | |
619 TREE_OPERAND (t, 2) = size_binop (PLUS_EXPR, TREE_OPERAND (t, 2), | |
620 TYPE_SIZE (inner_type)); | |
621 if (gimple_p) | |
622 t = force_gimple_operand_gsi (gsi, t, true, NULL, true, | |
623 GSI_SAME_STMT); | |
624 return t; | |
625 } | |
626 | |
0 | 627 case VAR_DECL: |
628 case RESULT_DECL: | |
629 case PARM_DECL: | |
630 case COMPONENT_REF: | |
631 case ARRAY_REF: | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
632 case VIEW_CONVERT_EXPR: |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
633 case MEM_REF: |
0 | 634 { |
635 tree inner_type = TREE_TYPE (TREE_TYPE (t)); | |
636 | |
637 t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR), | |
638 inner_type, unshare_expr (t)); | |
639 | |
640 if (gimple_p) | |
641 t = force_gimple_operand_gsi (gsi, t, true, NULL, true, | |
642 GSI_SAME_STMT); | |
643 | |
644 return t; | |
645 } | |
646 | |
647 case SSA_NAME: | |
111 | 648 t = get_component_ssa_name (t, imagpart_p); |
649 if (TREE_CODE (t) == SSA_NAME && SSA_NAME_DEF_STMT (t) == NULL) | |
650 gcc_assert (phiarg_p); | |
651 return t; | |
0 | 652 |
653 default: | |
654 gcc_unreachable (); | |
655 } | |
656 } | |
657 | |
658 /* Update the complex components of the ssa name on the lhs of STMT. */ | |
659 | |
660 static void | |
111 | 661 update_complex_components (gimple_stmt_iterator *gsi, gimple *stmt, tree r, |
0 | 662 tree i) |
663 { | |
664 tree lhs; | |
665 gimple_seq list; | |
666 | |
667 lhs = gimple_get_lhs (stmt); | |
668 | |
669 list = set_component_ssa_name (lhs, false, r); | |
670 if (list) | |
671 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); | |
672 | |
673 list = set_component_ssa_name (lhs, true, i); | |
674 if (list) | |
675 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); | |
676 } | |
677 | |
678 static void | |
679 update_complex_components_on_edge (edge e, tree lhs, tree r, tree i) | |
680 { | |
681 gimple_seq list; | |
682 | |
683 list = set_component_ssa_name (lhs, false, r); | |
684 if (list) | |
685 gsi_insert_seq_on_edge (e, list); | |
686 | |
687 list = set_component_ssa_name (lhs, true, i); | |
688 if (list) | |
689 gsi_insert_seq_on_edge (e, list); | |
690 } | |
691 | |
692 | |
693 /* Update an assignment to a complex variable in place. */ | |
694 | |
695 static void | |
696 update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i) | |
697 { | |
111 | 698 gimple *stmt; |
699 | |
700 gimple_assign_set_rhs_with_ops (gsi, COMPLEX_EXPR, r, i); | |
701 stmt = gsi_stmt (*gsi); | |
702 update_stmt (stmt); | |
703 if (maybe_clean_eh_stmt (stmt)) | |
704 gimple_purge_dead_eh_edges (gimple_bb (stmt)); | |
0 | 705 |
131 | 706 update_complex_components (gsi, gsi_stmt (*gsi), r, i); |
0 | 707 } |
708 | |
709 | |
710 /* Generate code at the entry point of the function to initialize the | |
711 component variables for a complex parameter. */ | |
712 | |
713 static void | |
714 update_parameter_components (void) | |
715 { | |
111 | 716 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
0 | 717 tree parm; |
718 | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
719 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) |
0 | 720 { |
721 tree type = TREE_TYPE (parm); | |
722 tree ssa_name, r, i; | |
723 | |
724 if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm)) | |
725 continue; | |
726 | |
727 type = TREE_TYPE (type); | |
111 | 728 ssa_name = ssa_default_def (cfun, parm); |
0 | 729 if (!ssa_name) |
730 continue; | |
731 | |
732 r = build1 (REALPART_EXPR, type, ssa_name); | |
733 i = build1 (IMAGPART_EXPR, type, ssa_name); | |
734 update_complex_components_on_edge (entry_edge, ssa_name, r, i); | |
735 } | |
736 } | |
737 | |
738 /* Generate code to set the component variables of a complex variable | |
739 to match the PHI statements in block BB. */ | |
740 | |
741 static void | |
742 update_phi_components (basic_block bb) | |
743 { | |
111 | 744 gphi_iterator gsi; |
0 | 745 |
746 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
747 { | |
111 | 748 gphi *phi = gsi.phi (); |
0 | 749 |
750 if (is_complex_reg (gimple_phi_result (phi))) | |
751 { | |
111 | 752 gphi *p[2] = { NULL, NULL }; |
753 unsigned int i, j, n; | |
754 bool revisit_phi = false; | |
0 | 755 |
111 | 756 for (j = 0; j < 2; j++) |
0 | 757 { |
111 | 758 tree l = get_component_ssa_name (gimple_phi_result (phi), j > 0); |
759 if (TREE_CODE (l) == SSA_NAME) | |
760 p[j] = create_phi_node (l, bb); | |
0 | 761 } |
762 | |
763 for (i = 0, n = gimple_phi_num_args (phi); i < n; ++i) | |
764 { | |
765 tree comp, arg = gimple_phi_arg_def (phi, i); | |
111 | 766 for (j = 0; j < 2; j++) |
767 if (p[j]) | |
768 { | |
769 comp = extract_component (NULL, arg, j > 0, false, true); | |
770 if (TREE_CODE (comp) == SSA_NAME | |
771 && SSA_NAME_DEF_STMT (comp) == NULL) | |
772 { | |
773 /* For the benefit of any gimple simplification during | |
774 this pass that might walk SSA_NAME def stmts, | |
775 don't add SSA_NAMEs without definitions into the | |
776 PHI arguments, but put a decl in there instead | |
777 temporarily, and revisit this PHI later on. */ | |
778 if (SSA_NAME_VAR (comp)) | |
779 comp = SSA_NAME_VAR (comp); | |
780 else | |
781 comp = create_tmp_reg (TREE_TYPE (comp), | |
782 get_name (comp)); | |
783 revisit_phi = true; | |
784 } | |
785 SET_PHI_ARG_DEF (p[j], i, comp); | |
786 } | |
787 } | |
788 | |
789 if (revisit_phi) | |
790 { | |
791 phis_to_revisit.safe_push (phi); | |
792 phis_to_revisit.safe_push (p[0]); | |
793 phis_to_revisit.safe_push (p[1]); | |
0 | 794 } |
795 } | |
796 } | |
797 } | |
798 | |
799 /* Expand a complex move to scalars. */ | |
800 | |
801 static void | |
802 expand_complex_move (gimple_stmt_iterator *gsi, tree type) | |
803 { | |
804 tree inner_type = TREE_TYPE (type); | |
805 tree r, i, lhs, rhs; | |
111 | 806 gimple *stmt = gsi_stmt (*gsi); |
0 | 807 |
808 if (is_gimple_assign (stmt)) | |
809 { | |
810 lhs = gimple_assign_lhs (stmt); | |
811 if (gimple_num_ops (stmt) == 2) | |
812 rhs = gimple_assign_rhs1 (stmt); | |
813 else | |
814 rhs = NULL_TREE; | |
815 } | |
816 else if (is_gimple_call (stmt)) | |
817 { | |
818 lhs = gimple_call_lhs (stmt); | |
819 rhs = NULL_TREE; | |
820 } | |
821 else | |
822 gcc_unreachable (); | |
823 | |
824 if (TREE_CODE (lhs) == SSA_NAME) | |
825 { | |
826 if (is_ctrl_altering_stmt (stmt)) | |
827 { | |
828 edge e; | |
829 | |
830 /* The value is not assigned on the exception edges, so we need not | |
831 concern ourselves there. We do need to update on the fallthru | |
832 edge. Find it. */ | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
833 e = find_fallthru_edge (gsi_bb (*gsi)->succs); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
834 if (!e) |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
835 gcc_unreachable (); |
0 | 836 |
837 r = build1 (REALPART_EXPR, inner_type, lhs); | |
838 i = build1 (IMAGPART_EXPR, inner_type, lhs); | |
839 update_complex_components_on_edge (e, lhs, r, i); | |
840 } | |
841 else if (is_gimple_call (stmt) | |
842 || gimple_has_side_effects (stmt) | |
843 || gimple_assign_rhs_code (stmt) == PAREN_EXPR) | |
844 { | |
845 r = build1 (REALPART_EXPR, inner_type, lhs); | |
846 i = build1 (IMAGPART_EXPR, inner_type, lhs); | |
847 update_complex_components (gsi, stmt, r, i); | |
848 } | |
849 else | |
850 { | |
851 if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR) | |
852 { | |
853 r = extract_component (gsi, rhs, 0, true); | |
854 i = extract_component (gsi, rhs, 1, true); | |
855 } | |
856 else | |
857 { | |
858 r = gimple_assign_rhs1 (stmt); | |
859 i = gimple_assign_rhs2 (stmt); | |
860 } | |
861 update_complex_assignment (gsi, r, i); | |
862 } | |
863 } | |
864 else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs)) | |
865 { | |
866 tree x; | |
111 | 867 gimple *t; |
868 location_t loc; | |
0 | 869 |
111 | 870 loc = gimple_location (stmt); |
0 | 871 r = extract_component (gsi, rhs, 0, false); |
872 i = extract_component (gsi, rhs, 1, false); | |
873 | |
874 x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs)); | |
875 t = gimple_build_assign (x, r); | |
111 | 876 gimple_set_location (t, loc); |
0 | 877 gsi_insert_before (gsi, t, GSI_SAME_STMT); |
878 | |
879 if (stmt == gsi_stmt (*gsi)) | |
880 { | |
881 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); | |
882 gimple_assign_set_lhs (stmt, x); | |
883 gimple_assign_set_rhs1 (stmt, i); | |
884 } | |
885 else | |
886 { | |
887 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); | |
888 t = gimple_build_assign (x, i); | |
111 | 889 gimple_set_location (t, loc); |
0 | 890 gsi_insert_before (gsi, t, GSI_SAME_STMT); |
891 | |
892 stmt = gsi_stmt (*gsi); | |
893 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN); | |
111 | 894 gimple_return_set_retval (as_a <greturn *> (stmt), lhs); |
0 | 895 } |
896 | |
897 update_stmt (stmt); | |
898 } | |
899 } | |
900 | |
901 /* Expand complex addition to scalars: | |
902 a + b = (ar + br) + i(ai + bi) | |
903 a - b = (ar - br) + i(ai + bi) | |
904 */ | |
905 | |
906 static void | |
907 expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type, | |
908 tree ar, tree ai, tree br, tree bi, | |
909 enum tree_code code, | |
910 complex_lattice_t al, complex_lattice_t bl) | |
911 { | |
912 tree rr, ri; | |
913 | |
914 switch (PAIR (al, bl)) | |
915 { | |
916 case PAIR (ONLY_REAL, ONLY_REAL): | |
917 rr = gimplify_build2 (gsi, code, inner_type, ar, br); | |
918 ri = ai; | |
919 break; | |
920 | |
921 case PAIR (ONLY_REAL, ONLY_IMAG): | |
922 rr = ar; | |
923 if (code == MINUS_EXPR) | |
924 ri = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, bi); | |
925 else | |
926 ri = bi; | |
927 break; | |
928 | |
929 case PAIR (ONLY_IMAG, ONLY_REAL): | |
930 if (code == MINUS_EXPR) | |
931 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ar, br); | |
932 else | |
933 rr = br; | |
934 ri = ai; | |
935 break; | |
936 | |
937 case PAIR (ONLY_IMAG, ONLY_IMAG): | |
938 rr = ar; | |
939 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); | |
940 break; | |
941 | |
942 case PAIR (VARYING, ONLY_REAL): | |
943 rr = gimplify_build2 (gsi, code, inner_type, ar, br); | |
944 ri = ai; | |
945 break; | |
946 | |
947 case PAIR (VARYING, ONLY_IMAG): | |
948 rr = ar; | |
949 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); | |
950 break; | |
951 | |
952 case PAIR (ONLY_REAL, VARYING): | |
953 if (code == MINUS_EXPR) | |
954 goto general; | |
955 rr = gimplify_build2 (gsi, code, inner_type, ar, br); | |
956 ri = bi; | |
957 break; | |
958 | |
959 case PAIR (ONLY_IMAG, VARYING): | |
960 if (code == MINUS_EXPR) | |
961 goto general; | |
962 rr = br; | |
963 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); | |
964 break; | |
965 | |
966 case PAIR (VARYING, VARYING): | |
967 general: | |
968 rr = gimplify_build2 (gsi, code, inner_type, ar, br); | |
969 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); | |
970 break; | |
971 | |
972 default: | |
973 gcc_unreachable (); | |
974 } | |
975 | |
976 update_complex_assignment (gsi, rr, ri); | |
977 } | |
978 | |
979 /* Expand a complex multiplication or division to a libcall to the c99 | |
131 | 980 compliant routines. TYPE is the complex type of the operation. |
981 If INPLACE_P replace the statement at GSI with | |
982 the libcall and return NULL_TREE. Else insert the call, assign its | |
983 result to an output variable and return that variable. If INPLACE_P | |
984 is true then the statement being replaced should be an assignment | |
985 statement. */ | |
0 | 986 |
131 | 987 static tree |
988 expand_complex_libcall (gimple_stmt_iterator *gsi, tree type, tree ar, tree ai, | |
989 tree br, tree bi, enum tree_code code, bool inplace_p) | |
0 | 990 { |
111 | 991 machine_mode mode; |
0 | 992 enum built_in_function bcode; |
131 | 993 tree fn, lhs; |
111 | 994 gcall *stmt; |
0 | 995 |
996 mode = TYPE_MODE (type); | |
997 gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT); | |
998 | |
999 if (code == MULT_EXPR) | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1000 bcode = ((enum built_in_function) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1001 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); |
0 | 1002 else if (code == RDIV_EXPR) |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1003 bcode = ((enum built_in_function) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1004 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); |
0 | 1005 else |
1006 gcc_unreachable (); | |
111 | 1007 fn = builtin_decl_explicit (bcode); |
131 | 1008 stmt = gimple_build_call (fn, 4, ar, ai, br, bi); |
0 | 1009 |
131 | 1010 if (inplace_p) |
1011 { | |
1012 gimple *old_stmt = gsi_stmt (*gsi); | |
1013 gimple_call_set_nothrow (stmt, !stmt_could_throw_p (cfun, old_stmt)); | |
1014 lhs = gimple_assign_lhs (old_stmt); | |
1015 gimple_call_set_lhs (stmt, lhs); | |
1016 gsi_replace (gsi, stmt, true); | |
0 | 1017 |
131 | 1018 type = TREE_TYPE (type); |
1019 if (stmt_can_throw_internal (cfun, stmt)) | |
1020 { | |
1021 edge_iterator ei; | |
1022 edge e; | |
1023 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs) | |
1024 if (!(e->flags & EDGE_EH)) | |
1025 break; | |
1026 basic_block bb = split_edge (e); | |
1027 gimple_stmt_iterator gsi2 = gsi_start_bb (bb); | |
1028 update_complex_components (&gsi2, stmt, | |
1029 build1 (REALPART_EXPR, type, lhs), | |
1030 build1 (IMAGPART_EXPR, type, lhs)); | |
1031 return NULL_TREE; | |
1032 } | |
1033 else | |
1034 update_complex_components (gsi, stmt, | |
1035 build1 (REALPART_EXPR, type, lhs), | |
1036 build1 (IMAGPART_EXPR, type, lhs)); | |
1037 SSA_NAME_DEF_STMT (lhs) = stmt; | |
1038 return NULL_TREE; | |
1039 } | |
0 | 1040 |
131 | 1041 gimple_call_set_nothrow (stmt, true); |
1042 lhs = make_ssa_name (type); | |
1043 gimple_call_set_lhs (stmt, lhs); | |
1044 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); | |
1045 | |
1046 return lhs; | |
1047 } | |
1048 | |
1049 /* Perform a complex multiplication on two complex constants A, B represented | |
1050 by AR, AI, BR, BI of type TYPE. | |
1051 The operation we want is: a * b = (ar*br - ai*bi) + i(ar*bi + br*ai). | |
1052 Insert the GIMPLE statements into GSI. Store the real and imaginary | |
1053 components of the result into RR and RI. */ | |
1054 | |
1055 static void | |
1056 expand_complex_multiplication_components (gimple_stmt_iterator *gsi, | |
1057 tree type, tree ar, tree ai, | |
1058 tree br, tree bi, | |
1059 tree *rr, tree *ri) | |
1060 { | |
1061 tree t1, t2, t3, t4; | |
1062 | |
1063 t1 = gimplify_build2 (gsi, MULT_EXPR, type, ar, br); | |
1064 t2 = gimplify_build2 (gsi, MULT_EXPR, type, ai, bi); | |
1065 t3 = gimplify_build2 (gsi, MULT_EXPR, type, ar, bi); | |
1066 | |
1067 /* Avoid expanding redundant multiplication for the common | |
1068 case of squaring a complex number. */ | |
1069 if (ar == br && ai == bi) | |
1070 t4 = t3; | |
1071 else | |
1072 t4 = gimplify_build2 (gsi, MULT_EXPR, type, ai, br); | |
1073 | |
1074 *rr = gimplify_build2 (gsi, MINUS_EXPR, type, t1, t2); | |
1075 *ri = gimplify_build2 (gsi, PLUS_EXPR, type, t3, t4); | |
0 | 1076 } |
1077 | |
1078 /* Expand complex multiplication to scalars: | |
1079 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai) | |
1080 */ | |
1081 | |
1082 static void | |
131 | 1083 expand_complex_multiplication (gimple_stmt_iterator *gsi, tree type, |
0 | 1084 tree ar, tree ai, tree br, tree bi, |
1085 complex_lattice_t al, complex_lattice_t bl) | |
1086 { | |
1087 tree rr, ri; | |
131 | 1088 tree inner_type = TREE_TYPE (type); |
0 | 1089 |
1090 if (al < bl) | |
1091 { | |
1092 complex_lattice_t tl; | |
1093 rr = ar, ar = br, br = rr; | |
1094 ri = ai, ai = bi, bi = ri; | |
1095 tl = al, al = bl, bl = tl; | |
1096 } | |
1097 | |
1098 switch (PAIR (al, bl)) | |
1099 { | |
1100 case PAIR (ONLY_REAL, ONLY_REAL): | |
1101 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); | |
1102 ri = ai; | |
1103 break; | |
1104 | |
1105 case PAIR (ONLY_IMAG, ONLY_REAL): | |
1106 rr = ar; | |
1107 if (TREE_CODE (ai) == REAL_CST | |
111 | 1108 && real_identical (&TREE_REAL_CST (ai), &dconst1)) |
0 | 1109 ri = br; |
1110 else | |
1111 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); | |
1112 break; | |
1113 | |
1114 case PAIR (ONLY_IMAG, ONLY_IMAG): | |
1115 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); | |
1116 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr); | |
1117 ri = ar; | |
1118 break; | |
1119 | |
1120 case PAIR (VARYING, ONLY_REAL): | |
1121 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); | |
1122 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); | |
1123 break; | |
1124 | |
1125 case PAIR (VARYING, ONLY_IMAG): | |
1126 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); | |
1127 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr); | |
1128 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); | |
1129 break; | |
1130 | |
1131 case PAIR (VARYING, VARYING): | |
1132 if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type)) | |
1133 { | |
131 | 1134 /* If optimizing for size or not at all just do a libcall. |
1135 Same if there are exception-handling edges or signaling NaNs. */ | |
1136 if (optimize == 0 || optimize_bb_for_size_p (gsi_bb (*gsi)) | |
1137 || stmt_can_throw_internal (cfun, gsi_stmt (*gsi)) | |
1138 || flag_signaling_nans) | |
1139 { | |
1140 expand_complex_libcall (gsi, type, ar, ai, br, bi, | |
1141 MULT_EXPR, true); | |
1142 return; | |
1143 } | |
1144 | |
1145 /* Else, expand x = a * b into | |
1146 x = (ar*br - ai*bi) + i(ar*bi + br*ai); | |
1147 if (isunordered (__real__ x, __imag__ x)) | |
1148 x = __muldc3 (a, b); */ | |
1149 | |
1150 tree tmpr, tmpi; | |
1151 expand_complex_multiplication_components (gsi, inner_type, ar, ai, | |
1152 br, bi, &tmpr, &tmpi); | |
1153 | |
1154 gimple *check | |
1155 = gimple_build_cond (UNORDERED_EXPR, tmpr, tmpi, | |
1156 NULL_TREE, NULL_TREE); | |
1157 | |
1158 basic_block orig_bb = gsi_bb (*gsi); | |
1159 /* We want to keep track of the original complex multiplication | |
1160 statement as we're going to modify it later in | |
1161 update_complex_assignment. Make sure that insert_cond_bb leaves | |
1162 that statement in the join block. */ | |
1163 gsi_prev (gsi); | |
1164 basic_block cond_bb | |
1165 = insert_cond_bb (gsi_bb (*gsi), gsi_stmt (*gsi), check, | |
1166 profile_probability::very_unlikely ()); | |
1167 | |
1168 | |
1169 gimple_stmt_iterator cond_bb_gsi = gsi_last_bb (cond_bb); | |
1170 gsi_insert_after (&cond_bb_gsi, gimple_build_nop (), GSI_NEW_STMT); | |
1171 | |
1172 tree libcall_res | |
1173 = expand_complex_libcall (&cond_bb_gsi, type, ar, ai, br, | |
1174 bi, MULT_EXPR, false); | |
1175 tree cond_real = gimplify_build1 (&cond_bb_gsi, REALPART_EXPR, | |
1176 inner_type, libcall_res); | |
1177 tree cond_imag = gimplify_build1 (&cond_bb_gsi, IMAGPART_EXPR, | |
1178 inner_type, libcall_res); | |
1179 | |
1180 basic_block join_bb = single_succ_edge (cond_bb)->dest; | |
1181 *gsi = gsi_start_nondebug_after_labels_bb (join_bb); | |
1182 | |
1183 /* We have a conditional block with some assignments in cond_bb. | |
1184 Wire up the PHIs to wrap up. */ | |
1185 rr = make_ssa_name (inner_type); | |
1186 ri = make_ssa_name (inner_type); | |
1187 edge cond_to_join = single_succ_edge (cond_bb); | |
1188 edge orig_to_join = find_edge (orig_bb, join_bb); | |
1189 | |
1190 gphi *real_phi = create_phi_node (rr, gsi_bb (*gsi)); | |
1191 add_phi_arg (real_phi, cond_real, cond_to_join, | |
1192 UNKNOWN_LOCATION); | |
1193 add_phi_arg (real_phi, tmpr, orig_to_join, UNKNOWN_LOCATION); | |
1194 | |
1195 gphi *imag_phi = create_phi_node (ri, gsi_bb (*gsi)); | |
1196 add_phi_arg (imag_phi, cond_imag, cond_to_join, | |
1197 UNKNOWN_LOCATION); | |
1198 add_phi_arg (imag_phi, tmpi, orig_to_join, UNKNOWN_LOCATION); | |
0 | 1199 } |
1200 else | |
131 | 1201 /* If we are not worrying about NaNs expand to |
1202 (ar*br - ai*bi) + i(ar*bi + br*ai) directly. */ | |
1203 expand_complex_multiplication_components (gsi, inner_type, ar, ai, | |
1204 br, bi, &rr, &ri); | |
0 | 1205 break; |
1206 | |
1207 default: | |
1208 gcc_unreachable (); | |
1209 } | |
1210 | |
1211 update_complex_assignment (gsi, rr, ri); | |
1212 } | |
1213 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1214 /* Keep this algorithm in sync with fold-const.c:const_binop(). |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1215 |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1216 Expand complex division to scalars, straightforward algorithm. |
0 | 1217 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) |
1218 t = br*br + bi*bi | |
1219 */ | |
1220 | |
1221 static void | |
1222 expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type, | |
1223 tree ar, tree ai, tree br, tree bi, | |
1224 enum tree_code code) | |
1225 { | |
1226 tree rr, ri, div, t1, t2, t3; | |
1227 | |
1228 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, br); | |
1229 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, bi); | |
1230 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2); | |
1231 | |
1232 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); | |
1233 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); | |
1234 t3 = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2); | |
1235 rr = gimplify_build2 (gsi, code, inner_type, t3, div); | |
1236 | |
1237 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); | |
1238 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); | |
1239 t3 = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2); | |
1240 ri = gimplify_build2 (gsi, code, inner_type, t3, div); | |
1241 | |
1242 update_complex_assignment (gsi, rr, ri); | |
1243 } | |
1244 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1245 /* Keep this algorithm in sync with fold-const.c:const_binop(). |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1246 |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1247 Expand complex division to scalars, modified algorithm to minimize |
0 | 1248 overflow with wide input ranges. */ |
1249 | |
1250 static void | |
1251 expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type, | |
1252 tree ar, tree ai, tree br, tree bi, | |
1253 enum tree_code code) | |
1254 { | |
1255 tree rr, ri, ratio, div, t1, t2, tr, ti, compare; | |
1256 basic_block bb_cond, bb_true, bb_false, bb_join; | |
111 | 1257 gimple *stmt; |
0 | 1258 |
1259 /* Examine |br| < |bi|, and branch. */ | |
1260 t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br); | |
1261 t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi); | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1262 compare = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1263 LT_EXPR, boolean_type_node, t1, t2); |
0 | 1264 STRIP_NOPS (compare); |
1265 | |
1266 bb_cond = bb_true = bb_false = bb_join = NULL; | |
1267 rr = ri = tr = ti = NULL; | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1268 if (TREE_CODE (compare) != INTEGER_CST) |
0 | 1269 { |
1270 edge e; | |
111 | 1271 gimple *stmt; |
0 | 1272 tree cond, tmp; |
1273 | |
131 | 1274 tmp = make_ssa_name (boolean_type_node); |
0 | 1275 stmt = gimple_build_assign (tmp, compare); |
1276 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); | |
1277 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1278 cond = fold_build2_loc (gimple_location (stmt), |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1279 EQ_EXPR, boolean_type_node, tmp, boolean_true_node); |
0 | 1280 stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE); |
1281 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); | |
1282 | |
1283 /* Split the original block, and create the TRUE and FALSE blocks. */ | |
1284 e = split_block (gsi_bb (*gsi), stmt); | |
1285 bb_cond = e->src; | |
1286 bb_join = e->dest; | |
1287 bb_true = create_empty_bb (bb_cond); | |
1288 bb_false = create_empty_bb (bb_true); | |
111 | 1289 bb_true->count = bb_false->count |
1290 = bb_cond->count.apply_probability (profile_probability::even ()); | |
0 | 1291 |
1292 /* Wire the blocks together. */ | |
1293 e->flags = EDGE_TRUE_VALUE; | |
111 | 1294 /* TODO: With value profile we could add an historgram to determine real |
1295 branch outcome. */ | |
1296 e->probability = profile_probability::even (); | |
0 | 1297 redirect_edge_succ (e, bb_true); |
111 | 1298 edge e2 = make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE); |
1299 e2->probability = profile_probability::even (); | |
1300 make_single_succ_edge (bb_true, bb_join, EDGE_FALLTHRU); | |
1301 make_single_succ_edge (bb_false, bb_join, EDGE_FALLTHRU); | |
1302 add_bb_to_loop (bb_true, bb_cond->loop_father); | |
1303 add_bb_to_loop (bb_false, bb_cond->loop_father); | |
0 | 1304 |
1305 /* Update dominance info. Note that bb_join's data was | |
1306 updated by split_block. */ | |
1307 if (dom_info_available_p (CDI_DOMINATORS)) | |
1308 { | |
1309 set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond); | |
1310 set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond); | |
1311 } | |
1312 | |
111 | 1313 rr = create_tmp_reg (inner_type); |
1314 ri = create_tmp_reg (inner_type); | |
0 | 1315 } |
1316 | |
1317 /* In the TRUE branch, we compute | |
1318 ratio = br/bi; | |
1319 div = (br * ratio) + bi; | |
1320 tr = (ar * ratio) + ai; | |
1321 ti = (ai * ratio) - ar; | |
1322 tr = tr / div; | |
1323 ti = ti / div; */ | |
1324 if (bb_true || integer_nonzerop (compare)) | |
1325 { | |
1326 if (bb_true) | |
1327 { | |
1328 *gsi = gsi_last_bb (bb_true); | |
1329 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); | |
1330 } | |
1331 | |
1332 ratio = gimplify_build2 (gsi, code, inner_type, br, bi); | |
1333 | |
1334 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, ratio); | |
1335 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, bi); | |
1336 | |
1337 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio); | |
1338 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ai); | |
1339 | |
1340 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio); | |
1341 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, ar); | |
1342 | |
1343 tr = gimplify_build2 (gsi, code, inner_type, tr, div); | |
1344 ti = gimplify_build2 (gsi, code, inner_type, ti, div); | |
1345 | |
1346 if (bb_true) | |
1347 { | |
1348 stmt = gimple_build_assign (rr, tr); | |
1349 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); | |
1350 stmt = gimple_build_assign (ri, ti); | |
1351 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); | |
1352 gsi_remove (gsi, true); | |
1353 } | |
1354 } | |
1355 | |
1356 /* In the FALSE branch, we compute | |
1357 ratio = d/c; | |
1358 divisor = (d * ratio) + c; | |
1359 tr = (b * ratio) + a; | |
1360 ti = b - (a * ratio); | |
1361 tr = tr / div; | |
1362 ti = ti / div; */ | |
1363 if (bb_false || integer_zerop (compare)) | |
1364 { | |
1365 if (bb_false) | |
1366 { | |
1367 *gsi = gsi_last_bb (bb_false); | |
1368 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); | |
1369 } | |
1370 | |
1371 ratio = gimplify_build2 (gsi, code, inner_type, bi, br); | |
1372 | |
1373 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, ratio); | |
1374 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, br); | |
1375 | |
1376 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio); | |
1377 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ar); | |
1378 | |
1379 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio); | |
1380 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, t1); | |
1381 | |
1382 tr = gimplify_build2 (gsi, code, inner_type, tr, div); | |
1383 ti = gimplify_build2 (gsi, code, inner_type, ti, div); | |
1384 | |
1385 if (bb_false) | |
1386 { | |
1387 stmt = gimple_build_assign (rr, tr); | |
1388 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); | |
1389 stmt = gimple_build_assign (ri, ti); | |
1390 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); | |
1391 gsi_remove (gsi, true); | |
1392 } | |
1393 } | |
1394 | |
1395 if (bb_join) | |
1396 *gsi = gsi_start_bb (bb_join); | |
1397 else | |
1398 rr = tr, ri = ti; | |
1399 | |
1400 update_complex_assignment (gsi, rr, ri); | |
1401 } | |
1402 | |
1403 /* Expand complex division to scalars. */ | |
1404 | |
1405 static void | |
131 | 1406 expand_complex_division (gimple_stmt_iterator *gsi, tree type, |
0 | 1407 tree ar, tree ai, tree br, tree bi, |
1408 enum tree_code code, | |
1409 complex_lattice_t al, complex_lattice_t bl) | |
1410 { | |
1411 tree rr, ri; | |
1412 | |
131 | 1413 tree inner_type = TREE_TYPE (type); |
0 | 1414 switch (PAIR (al, bl)) |
1415 { | |
1416 case PAIR (ONLY_REAL, ONLY_REAL): | |
1417 rr = gimplify_build2 (gsi, code, inner_type, ar, br); | |
1418 ri = ai; | |
1419 break; | |
1420 | |
1421 case PAIR (ONLY_REAL, ONLY_IMAG): | |
1422 rr = ai; | |
1423 ri = gimplify_build2 (gsi, code, inner_type, ar, bi); | |
1424 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri); | |
1425 break; | |
1426 | |
1427 case PAIR (ONLY_IMAG, ONLY_REAL): | |
1428 rr = ar; | |
1429 ri = gimplify_build2 (gsi, code, inner_type, ai, br); | |
1430 break; | |
1431 | |
1432 case PAIR (ONLY_IMAG, ONLY_IMAG): | |
1433 rr = gimplify_build2 (gsi, code, inner_type, ai, bi); | |
1434 ri = ar; | |
1435 break; | |
1436 | |
1437 case PAIR (VARYING, ONLY_REAL): | |
1438 rr = gimplify_build2 (gsi, code, inner_type, ar, br); | |
1439 ri = gimplify_build2 (gsi, code, inner_type, ai, br); | |
1440 break; | |
1441 | |
1442 case PAIR (VARYING, ONLY_IMAG): | |
1443 rr = gimplify_build2 (gsi, code, inner_type, ai, bi); | |
1444 ri = gimplify_build2 (gsi, code, inner_type, ar, bi); | |
1445 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri); | |
111 | 1446 break; |
0 | 1447 |
1448 case PAIR (ONLY_REAL, VARYING): | |
1449 case PAIR (ONLY_IMAG, VARYING): | |
1450 case PAIR (VARYING, VARYING): | |
1451 switch (flag_complex_method) | |
1452 { | |
1453 case 0: | |
1454 /* straightforward implementation of complex divide acceptable. */ | |
1455 expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code); | |
1456 break; | |
1457 | |
1458 case 2: | |
1459 if (SCALAR_FLOAT_TYPE_P (inner_type)) | |
1460 { | |
131 | 1461 expand_complex_libcall (gsi, type, ar, ai, br, bi, code, true); |
0 | 1462 break; |
1463 } | |
1464 /* FALLTHRU */ | |
1465 | |
1466 case 1: | |
1467 /* wide ranges of inputs must work for complex divide. */ | |
1468 expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code); | |
1469 break; | |
1470 | |
1471 default: | |
1472 gcc_unreachable (); | |
1473 } | |
1474 return; | |
1475 | |
1476 default: | |
1477 gcc_unreachable (); | |
1478 } | |
1479 | |
1480 update_complex_assignment (gsi, rr, ri); | |
1481 } | |
1482 | |
1483 /* Expand complex negation to scalars: | |
1484 -a = (-ar) + i(-ai) | |
1485 */ | |
1486 | |
1487 static void | |
1488 expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type, | |
1489 tree ar, tree ai) | |
1490 { | |
1491 tree rr, ri; | |
1492 | |
1493 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ar); | |
1494 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai); | |
1495 | |
1496 update_complex_assignment (gsi, rr, ri); | |
1497 } | |
1498 | |
1499 /* Expand complex conjugate to scalars: | |
1500 ~a = (ar) + i(-ai) | |
1501 */ | |
1502 | |
1503 static void | |
1504 expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type, | |
1505 tree ar, tree ai) | |
1506 { | |
1507 tree ri; | |
1508 | |
1509 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai); | |
1510 | |
1511 update_complex_assignment (gsi, ar, ri); | |
1512 } | |
1513 | |
1514 /* Expand complex comparison (EQ or NE only). */ | |
1515 | |
1516 static void | |
1517 expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai, | |
1518 tree br, tree bi, enum tree_code code) | |
1519 { | |
1520 tree cr, ci, cc, type; | |
111 | 1521 gimple *stmt; |
0 | 1522 |
1523 cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br); | |
1524 ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi); | |
1525 cc = gimplify_build2 (gsi, | |
1526 (code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR), | |
1527 boolean_type_node, cr, ci); | |
1528 | |
1529 stmt = gsi_stmt (*gsi); | |
1530 | |
1531 switch (gimple_code (stmt)) | |
1532 { | |
1533 case GIMPLE_RETURN: | |
111 | 1534 { |
1535 greturn *return_stmt = as_a <greturn *> (stmt); | |
1536 type = TREE_TYPE (gimple_return_retval (return_stmt)); | |
1537 gimple_return_set_retval (return_stmt, fold_convert (type, cc)); | |
1538 } | |
0 | 1539 break; |
1540 | |
1541 case GIMPLE_ASSIGN: | |
1542 type = TREE_TYPE (gimple_assign_lhs (stmt)); | |
1543 gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc)); | |
1544 stmt = gsi_stmt (*gsi); | |
1545 break; | |
1546 | |
1547 case GIMPLE_COND: | |
111 | 1548 { |
1549 gcond *cond_stmt = as_a <gcond *> (stmt); | |
1550 gimple_cond_set_code (cond_stmt, EQ_EXPR); | |
1551 gimple_cond_set_lhs (cond_stmt, cc); | |
1552 gimple_cond_set_rhs (cond_stmt, boolean_true_node); | |
1553 } | |
0 | 1554 break; |
1555 | |
1556 default: | |
1557 gcc_unreachable (); | |
1558 } | |
1559 | |
1560 update_stmt (stmt); | |
1561 } | |
1562 | |
111 | 1563 /* Expand inline asm that sets some complex SSA_NAMEs. */ |
1564 | |
1565 static void | |
1566 expand_complex_asm (gimple_stmt_iterator *gsi) | |
1567 { | |
1568 gasm *stmt = as_a <gasm *> (gsi_stmt (*gsi)); | |
1569 unsigned int i; | |
1570 | |
1571 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) | |
1572 { | |
1573 tree link = gimple_asm_output_op (stmt, i); | |
1574 tree op = TREE_VALUE (link); | |
1575 if (TREE_CODE (op) == SSA_NAME | |
1576 && TREE_CODE (TREE_TYPE (op)) == COMPLEX_TYPE) | |
1577 { | |
1578 tree type = TREE_TYPE (op); | |
1579 tree inner_type = TREE_TYPE (type); | |
1580 tree r = build1 (REALPART_EXPR, inner_type, op); | |
1581 tree i = build1 (IMAGPART_EXPR, inner_type, op); | |
1582 gimple_seq list = set_component_ssa_name (op, false, r); | |
1583 | |
1584 if (list) | |
1585 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); | |
1586 | |
1587 list = set_component_ssa_name (op, true, i); | |
1588 if (list) | |
1589 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); | |
1590 } | |
1591 } | |
1592 } | |
0 | 1593 |
1594 /* Process one statement. If we identify a complex operation, expand it. */ | |
1595 | |
1596 static void | |
1597 expand_complex_operations_1 (gimple_stmt_iterator *gsi) | |
1598 { | |
111 | 1599 gimple *stmt = gsi_stmt (*gsi); |
0 | 1600 tree type, inner_type, lhs; |
1601 tree ac, ar, ai, bc, br, bi; | |
1602 complex_lattice_t al, bl; | |
1603 enum tree_code code; | |
1604 | |
111 | 1605 if (gimple_code (stmt) == GIMPLE_ASM) |
1606 { | |
1607 expand_complex_asm (gsi); | |
1608 return; | |
1609 } | |
1610 | |
0 | 1611 lhs = gimple_get_lhs (stmt); |
1612 if (!lhs && gimple_code (stmt) != GIMPLE_COND) | |
1613 return; | |
1614 | |
1615 type = TREE_TYPE (gimple_op (stmt, 0)); | |
1616 code = gimple_expr_code (stmt); | |
1617 | |
1618 /* Initial filter for operations we handle. */ | |
1619 switch (code) | |
1620 { | |
1621 case PLUS_EXPR: | |
1622 case MINUS_EXPR: | |
1623 case MULT_EXPR: | |
1624 case TRUNC_DIV_EXPR: | |
1625 case CEIL_DIV_EXPR: | |
1626 case FLOOR_DIV_EXPR: | |
1627 case ROUND_DIV_EXPR: | |
1628 case RDIV_EXPR: | |
1629 case NEGATE_EXPR: | |
1630 case CONJ_EXPR: | |
1631 if (TREE_CODE (type) != COMPLEX_TYPE) | |
1632 return; | |
1633 inner_type = TREE_TYPE (type); | |
1634 break; | |
1635 | |
1636 case EQ_EXPR: | |
1637 case NE_EXPR: | |
1638 /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR | |
111 | 1639 subcode, so we need to access the operands using gimple_op. */ |
0 | 1640 inner_type = TREE_TYPE (gimple_op (stmt, 1)); |
1641 if (TREE_CODE (inner_type) != COMPLEX_TYPE) | |
1642 return; | |
1643 break; | |
1644 | |
1645 default: | |
1646 { | |
1647 tree rhs; | |
1648 | |
1649 /* GIMPLE_COND may also fallthru here, but we do not need to | |
1650 do anything with it. */ | |
1651 if (gimple_code (stmt) == GIMPLE_COND) | |
1652 return; | |
1653 | |
1654 if (TREE_CODE (type) == COMPLEX_TYPE) | |
1655 expand_complex_move (gsi, type); | |
1656 else if (is_gimple_assign (stmt) | |
1657 && (gimple_assign_rhs_code (stmt) == REALPART_EXPR | |
1658 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR) | |
1659 && TREE_CODE (lhs) == SSA_NAME) | |
1660 { | |
1661 rhs = gimple_assign_rhs1 (stmt); | |
1662 rhs = extract_component (gsi, TREE_OPERAND (rhs, 0), | |
1663 gimple_assign_rhs_code (stmt) | |
1664 == IMAGPART_EXPR, | |
1665 false); | |
1666 gimple_assign_set_rhs_from_tree (gsi, rhs); | |
1667 stmt = gsi_stmt (*gsi); | |
1668 update_stmt (stmt); | |
1669 } | |
1670 } | |
1671 return; | |
1672 } | |
1673 | |
1674 /* Extract the components of the two complex values. Make sure and | |
1675 handle the common case of the same value used twice specially. */ | |
1676 if (is_gimple_assign (stmt)) | |
1677 { | |
1678 ac = gimple_assign_rhs1 (stmt); | |
1679 bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL; | |
1680 } | |
1681 /* GIMPLE_CALL can not get here. */ | |
1682 else | |
1683 { | |
1684 ac = gimple_cond_lhs (stmt); | |
1685 bc = gimple_cond_rhs (stmt); | |
1686 } | |
1687 | |
1688 ar = extract_component (gsi, ac, false, true); | |
1689 ai = extract_component (gsi, ac, true, true); | |
1690 | |
1691 if (ac == bc) | |
1692 br = ar, bi = ai; | |
1693 else if (bc) | |
1694 { | |
1695 br = extract_component (gsi, bc, 0, true); | |
1696 bi = extract_component (gsi, bc, 1, true); | |
1697 } | |
1698 else | |
1699 br = bi = NULL_TREE; | |
1700 | |
131 | 1701 al = find_lattice_value (ac); |
1702 if (al == UNINITIALIZED) | |
1703 al = VARYING; | |
0 | 1704 |
131 | 1705 if (TREE_CODE_CLASS (code) == tcc_unary) |
1706 bl = UNINITIALIZED; | |
1707 else if (ac == bc) | |
1708 bl = al; | |
1709 else | |
1710 { | |
1711 bl = find_lattice_value (bc); | |
1712 if (bl == UNINITIALIZED) | |
1713 bl = VARYING; | |
0 | 1714 } |
1715 | |
1716 switch (code) | |
1717 { | |
1718 case PLUS_EXPR: | |
1719 case MINUS_EXPR: | |
1720 expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl); | |
1721 break; | |
1722 | |
1723 case MULT_EXPR: | |
131 | 1724 expand_complex_multiplication (gsi, type, ar, ai, br, bi, al, bl); |
0 | 1725 break; |
1726 | |
1727 case TRUNC_DIV_EXPR: | |
1728 case CEIL_DIV_EXPR: | |
1729 case FLOOR_DIV_EXPR: | |
1730 case ROUND_DIV_EXPR: | |
1731 case RDIV_EXPR: | |
131 | 1732 expand_complex_division (gsi, type, ar, ai, br, bi, code, al, bl); |
0 | 1733 break; |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1734 |
0 | 1735 case NEGATE_EXPR: |
1736 expand_complex_negation (gsi, inner_type, ar, ai); | |
1737 break; | |
1738 | |
1739 case CONJ_EXPR: | |
1740 expand_complex_conjugate (gsi, inner_type, ar, ai); | |
1741 break; | |
1742 | |
1743 case EQ_EXPR: | |
1744 case NE_EXPR: | |
1745 expand_complex_comparison (gsi, ar, ai, br, bi, code); | |
1746 break; | |
1747 | |
1748 default: | |
1749 gcc_unreachable (); | |
1750 } | |
1751 } | |
1752 | |
1753 | |
1754 /* Entry point for complex operation lowering during optimization. */ | |
1755 | |
1756 static unsigned int | |
1757 tree_lower_complex (void) | |
1758 { | |
1759 gimple_stmt_iterator gsi; | |
1760 basic_block bb; | |
111 | 1761 int n_bbs, i; |
1762 int *rpo; | |
0 | 1763 |
1764 if (!init_dont_simulate_again ()) | |
1765 return 0; | |
1766 | |
111 | 1767 complex_lattice_values.create (num_ssa_names); |
1768 complex_lattice_values.safe_grow_cleared (num_ssa_names); | |
0 | 1769 |
1770 init_parameter_lattice_values (); | |
131 | 1771 class complex_propagate complex_propagate; |
1772 complex_propagate.ssa_propagate (); | |
0 | 1773 |
111 | 1774 complex_variable_components = new int_tree_htab_type (10); |
0 | 1775 |
111 | 1776 complex_ssa_name_components.create (2 * num_ssa_names); |
1777 complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names); | |
0 | 1778 |
1779 update_parameter_components (); | |
1780 | |
111 | 1781 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun)); |
1782 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false); | |
1783 for (i = 0; i < n_bbs; i++) | |
0 | 1784 { |
111 | 1785 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]); |
131 | 1786 if (!bb) |
1787 continue; | |
0 | 1788 update_phi_components (bb); |
1789 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1790 expand_complex_operations_1 (&gsi); | |
1791 } | |
1792 | |
111 | 1793 free (rpo); |
1794 | |
1795 if (!phis_to_revisit.is_empty ()) | |
1796 { | |
1797 unsigned int n = phis_to_revisit.length (); | |
1798 for (unsigned int j = 0; j < n; j += 3) | |
1799 for (unsigned int k = 0; k < 2; k++) | |
1800 if (gphi *phi = phis_to_revisit[j + k + 1]) | |
1801 { | |
1802 unsigned int m = gimple_phi_num_args (phi); | |
1803 for (unsigned int l = 0; l < m; ++l) | |
1804 { | |
1805 tree op = gimple_phi_arg_def (phi, l); | |
1806 if (TREE_CODE (op) == SSA_NAME | |
1807 || is_gimple_min_invariant (op)) | |
1808 continue; | |
1809 tree arg = gimple_phi_arg_def (phis_to_revisit[j], l); | |
1810 op = extract_component (NULL, arg, k > 0, false, false); | |
1811 SET_PHI_ARG_DEF (phi, l, op); | |
1812 } | |
1813 } | |
1814 phis_to_revisit.release (); | |
1815 } | |
1816 | |
0 | 1817 gsi_commit_edge_inserts (); |
1818 | |
111 | 1819 delete complex_variable_components; |
1820 complex_variable_components = NULL; | |
1821 complex_ssa_name_components.release (); | |
1822 complex_lattice_values.release (); | |
0 | 1823 return 0; |
1824 } | |
1825 | |
111 | 1826 namespace { |
1827 | |
1828 const pass_data pass_data_lower_complex = | |
1829 { | |
1830 GIMPLE_PASS, /* type */ | |
1831 "cplxlower", /* name */ | |
1832 OPTGROUP_NONE, /* optinfo_flags */ | |
1833 TV_NONE, /* tv_id */ | |
1834 PROP_ssa, /* properties_required */ | |
1835 PROP_gimple_lcx, /* properties_provided */ | |
1836 0, /* properties_destroyed */ | |
1837 0, /* todo_flags_start */ | |
1838 TODO_update_ssa, /* todo_flags_finish */ | |
1839 }; | |
1840 | |
1841 class pass_lower_complex : public gimple_opt_pass | |
0 | 1842 { |
111 | 1843 public: |
1844 pass_lower_complex (gcc::context *ctxt) | |
1845 : gimple_opt_pass (pass_data_lower_complex, ctxt) | |
1846 {} | |
1847 | |
1848 /* opt_pass methods: */ | |
1849 opt_pass * clone () { return new pass_lower_complex (m_ctxt); } | |
1850 virtual unsigned int execute (function *) { return tree_lower_complex (); } | |
1851 | |
1852 }; // class pass_lower_complex | |
1853 | |
1854 } // anon namespace | |
1855 | |
1856 gimple_opt_pass * | |
1857 make_pass_lower_complex (gcc::context *ctxt) | |
1858 { | |
1859 return new pass_lower_complex (ctxt); | |
1860 } | |
0 | 1861 |
1862 | |
111 | 1863 namespace { |
1864 | |
1865 const pass_data pass_data_lower_complex_O0 = | |
0 | 1866 { |
111 | 1867 GIMPLE_PASS, /* type */ |
1868 "cplxlower0", /* name */ | |
1869 OPTGROUP_NONE, /* optinfo_flags */ | |
1870 TV_NONE, /* tv_id */ | |
1871 PROP_cfg, /* properties_required */ | |
1872 PROP_gimple_lcx, /* properties_provided */ | |
1873 0, /* properties_destroyed */ | |
1874 0, /* todo_flags_start */ | |
1875 TODO_update_ssa, /* todo_flags_finish */ | |
1876 }; | |
0 | 1877 |
111 | 1878 class pass_lower_complex_O0 : public gimple_opt_pass |
0 | 1879 { |
111 | 1880 public: |
1881 pass_lower_complex_O0 (gcc::context *ctxt) | |
1882 : gimple_opt_pass (pass_data_lower_complex_O0, ctxt) | |
1883 {} | |
1884 | |
1885 /* opt_pass methods: */ | |
1886 virtual bool gate (function *fun) | |
1887 { | |
1888 /* With errors, normal optimization passes are not run. If we don't | |
1889 lower complex operations at all, rtl expansion will abort. */ | |
1890 return !(fun->curr_properties & PROP_gimple_lcx); | |
1891 } | |
1892 | |
1893 virtual unsigned int execute (function *) { return tree_lower_complex (); } | |
1894 | |
1895 }; // class pass_lower_complex_O0 | |
1896 | |
1897 } // anon namespace | |
1898 | |
1899 gimple_opt_pass * | |
1900 make_pass_lower_complex_O0 (gcc::context *ctxt) | |
1901 { | |
1902 return new pass_lower_complex_O0 (ctxt); | |
1903 } |