0
|
1 /* Lower complex number operations to scalar operations.
|
|
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
|
|
3
|
|
4 This file is part of GCC.
|
|
5
|
|
6 GCC is free software; you can redistribute it and/or modify it
|
|
7 under the terms of the GNU General Public License as published by the
|
|
8 Free Software Foundation; either version 3, or (at your option) any
|
|
9 later version.
|
|
10
|
|
11 GCC is distributed in the hope that it will be useful, but WITHOUT
|
|
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
14 for more details.
|
|
15
|
|
16 You should have received a copy of the GNU General Public License
|
|
17 along with GCC; see the file COPYING3. If not see
|
|
18 <http://www.gnu.org/licenses/>. */
|
|
19
|
|
20 #include "config.h"
|
|
21 #include "system.h"
|
|
22 #include "coretypes.h"
|
|
23 #include "tm.h"
|
|
24 #include "tree.h"
|
|
25 #include "rtl.h"
|
|
26 #include "real.h"
|
|
27 #include "flags.h"
|
|
28 #include "tree-flow.h"
|
|
29 #include "gimple.h"
|
|
30 #include "tree-iterator.h"
|
|
31 #include "tree-pass.h"
|
|
32 #include "tree-ssa-propagate.h"
|
|
33 #include "diagnostic.h"
|
|
34
|
|
35
|
|
36 /* For each complex ssa name, a lattice value. We're interested in finding
|
|
37 out whether a complex number is degenerate in some way, having only real
|
|
38 or only complex parts. */
|
|
39
|
|
40 typedef enum
|
|
41 {
|
|
42 UNINITIALIZED = 0,
|
|
43 ONLY_REAL = 1,
|
|
44 ONLY_IMAG = 2,
|
|
45 VARYING = 3
|
|
46 } complex_lattice_t;
|
|
47
|
|
48 #define PAIR(a, b) ((a) << 2 | (b))
|
|
49
|
|
50 DEF_VEC_I(complex_lattice_t);
|
|
51 DEF_VEC_ALLOC_I(complex_lattice_t, heap);
|
|
52
|
|
53 static VEC(complex_lattice_t, heap) *complex_lattice_values;
|
|
54
|
|
55 /* For each complex variable, a pair of variables for the components exists in
|
|
56 the hashtable. */
|
|
57 static htab_t complex_variable_components;
|
|
58
|
|
59 /* For each complex SSA_NAME, a pair of ssa names for the components. */
|
|
60 static VEC(tree, heap) *complex_ssa_name_components;
|
|
61
|
|
62 /* Lookup UID in the complex_variable_components hashtable and return the
|
|
63 associated tree. */
|
|
64 static tree
|
|
65 cvc_lookup (unsigned int uid)
|
|
66 {
|
|
67 struct int_tree_map *h, in;
|
|
68 in.uid = uid;
|
|
69 h = (struct int_tree_map *) htab_find_with_hash (complex_variable_components, &in, uid);
|
|
70 return h ? h->to : NULL;
|
|
71 }
|
|
72
|
|
73 /* Insert the pair UID, TO into the complex_variable_components hashtable. */
|
|
74
|
|
75 static void
|
|
76 cvc_insert (unsigned int uid, tree to)
|
|
77 {
|
|
78 struct int_tree_map *h;
|
|
79 void **loc;
|
|
80
|
|
81 h = XNEW (struct int_tree_map);
|
|
82 h->uid = uid;
|
|
83 h->to = to;
|
|
84 loc = htab_find_slot_with_hash (complex_variable_components, h,
|
|
85 uid, INSERT);
|
|
86 *(struct int_tree_map **) loc = h;
|
|
87 }
|
|
88
|
|
89 /* Return true if T is not a zero constant. In the case of real values,
|
|
90 we're only interested in +0.0. */
|
|
91
|
|
92 static int
|
|
93 some_nonzerop (tree t)
|
|
94 {
|
|
95 int zerop = false;
|
|
96
|
|
97 if (TREE_CODE (t) == REAL_CST)
|
|
98 zerop = REAL_VALUES_IDENTICAL (TREE_REAL_CST (t), dconst0);
|
|
99 else if (TREE_CODE (t) == FIXED_CST)
|
|
100 zerop = fixed_zerop (t);
|
|
101 else if (TREE_CODE (t) == INTEGER_CST)
|
|
102 zerop = integer_zerop (t);
|
|
103
|
|
104 return !zerop;
|
|
105 }
|
|
106
|
|
107
|
|
108 /* Compute a lattice value from the components of a complex type REAL
|
|
109 and IMAG. */
|
|
110
|
|
111 static complex_lattice_t
|
|
112 find_lattice_value_parts (tree real, tree imag)
|
|
113 {
|
|
114 int r, i;
|
|
115 complex_lattice_t ret;
|
|
116
|
|
117 r = some_nonzerop (real);
|
|
118 i = some_nonzerop (imag);
|
|
119 ret = r * ONLY_REAL + i * ONLY_IMAG;
|
|
120
|
|
121 /* ??? On occasion we could do better than mapping 0+0i to real, but we
|
|
122 certainly don't want to leave it UNINITIALIZED, which eventually gets
|
|
123 mapped to VARYING. */
|
|
124 if (ret == UNINITIALIZED)
|
|
125 ret = ONLY_REAL;
|
|
126
|
|
127 return ret;
|
|
128 }
|
|
129
|
|
130
|
|
131 /* Compute a lattice value from gimple_val T. */
|
|
132
|
|
133 static complex_lattice_t
|
|
134 find_lattice_value (tree t)
|
|
135 {
|
|
136 tree real, imag;
|
|
137
|
|
138 switch (TREE_CODE (t))
|
|
139 {
|
|
140 case SSA_NAME:
|
|
141 return VEC_index (complex_lattice_t, complex_lattice_values,
|
|
142 SSA_NAME_VERSION (t));
|
|
143
|
|
144 case COMPLEX_CST:
|
|
145 real = TREE_REALPART (t);
|
|
146 imag = TREE_IMAGPART (t);
|
|
147 break;
|
|
148
|
|
149 default:
|
|
150 gcc_unreachable ();
|
|
151 }
|
|
152
|
|
153 return find_lattice_value_parts (real, imag);
|
|
154 }
|
|
155
|
|
156 /* Determine if LHS is something for which we're interested in seeing
|
|
157 simulation results. */
|
|
158
|
|
159 static bool
|
|
160 is_complex_reg (tree lhs)
|
|
161 {
|
|
162 return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs);
|
|
163 }
|
|
164
|
|
165 /* Mark the incoming parameters to the function as VARYING. */
|
|
166
|
|
167 static void
|
|
168 init_parameter_lattice_values (void)
|
|
169 {
|
|
170 tree parm, ssa_name;
|
|
171
|
|
172 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = TREE_CHAIN (parm))
|
|
173 if (is_complex_reg (parm)
|
|
174 && var_ann (parm) != NULL
|
|
175 && (ssa_name = gimple_default_def (cfun, parm)) != NULL_TREE)
|
|
176 VEC_replace (complex_lattice_t, complex_lattice_values,
|
|
177 SSA_NAME_VERSION (ssa_name), VARYING);
|
|
178 }
|
|
179
|
|
180 /* Initialize simulation state for each statement. Return false if we
|
|
181 found no statements we want to simulate, and thus there's nothing
|
|
182 for the entire pass to do. */
|
|
183
|
|
184 static bool
|
|
185 init_dont_simulate_again (void)
|
|
186 {
|
|
187 basic_block bb;
|
|
188 gimple_stmt_iterator gsi;
|
|
189 gimple phi;
|
|
190 bool saw_a_complex_op = false;
|
|
191
|
|
192 FOR_EACH_BB (bb)
|
|
193 {
|
|
194 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
195 {
|
|
196 phi = gsi_stmt (gsi);
|
|
197 prop_set_simulate_again (phi,
|
|
198 is_complex_reg (gimple_phi_result (phi)));
|
|
199 }
|
|
200
|
|
201 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
202 {
|
|
203 gimple stmt;
|
|
204 tree op0, op1;
|
|
205 bool sim_again_p;
|
|
206
|
|
207 stmt = gsi_stmt (gsi);
|
|
208 op0 = op1 = NULL_TREE;
|
|
209
|
|
210 /* Most control-altering statements must be initially
|
|
211 simulated, else we won't cover the entire cfg. */
|
|
212 sim_again_p = stmt_ends_bb_p (stmt);
|
|
213
|
|
214 switch (gimple_code (stmt))
|
|
215 {
|
|
216 case GIMPLE_CALL:
|
|
217 if (gimple_call_lhs (stmt))
|
|
218 sim_again_p = is_complex_reg (gimple_call_lhs (stmt));
|
|
219 break;
|
|
220
|
|
221 case GIMPLE_ASSIGN:
|
|
222 sim_again_p = is_complex_reg (gimple_assign_lhs (stmt));
|
|
223 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
|
|
224 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
|
|
225 op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
|
|
226 else
|
|
227 op0 = gimple_assign_rhs1 (stmt);
|
|
228 if (gimple_num_ops (stmt) > 2)
|
|
229 op1 = gimple_assign_rhs2 (stmt);
|
|
230 break;
|
|
231
|
|
232 case GIMPLE_COND:
|
|
233 op0 = gimple_cond_lhs (stmt);
|
|
234 op1 = gimple_cond_rhs (stmt);
|
|
235 break;
|
|
236
|
|
237 default:
|
|
238 break;
|
|
239 }
|
|
240
|
|
241 if (op0 || op1)
|
|
242 switch (gimple_expr_code (stmt))
|
|
243 {
|
|
244 case EQ_EXPR:
|
|
245 case NE_EXPR:
|
|
246 case PLUS_EXPR:
|
|
247 case MINUS_EXPR:
|
|
248 case MULT_EXPR:
|
|
249 case TRUNC_DIV_EXPR:
|
|
250 case CEIL_DIV_EXPR:
|
|
251 case FLOOR_DIV_EXPR:
|
|
252 case ROUND_DIV_EXPR:
|
|
253 case RDIV_EXPR:
|
|
254 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE
|
|
255 || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE)
|
|
256 saw_a_complex_op = true;
|
|
257 break;
|
|
258
|
|
259 case NEGATE_EXPR:
|
|
260 case CONJ_EXPR:
|
|
261 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
|
|
262 saw_a_complex_op = true;
|
|
263 break;
|
|
264
|
|
265 case REALPART_EXPR:
|
|
266 case IMAGPART_EXPR:
|
|
267 /* The total store transformation performed during
|
|
268 gimplification creates such uninitialized loads
|
|
269 and we need to lower the statement to be able
|
|
270 to fix things up. */
|
|
271 if (TREE_CODE (op0) == SSA_NAME
|
|
272 && ssa_undefined_value_p (op0))
|
|
273 saw_a_complex_op = true;
|
|
274 break;
|
|
275
|
|
276 default:
|
|
277 break;
|
|
278 }
|
|
279
|
|
280 prop_set_simulate_again (stmt, sim_again_p);
|
|
281 }
|
|
282 }
|
|
283
|
|
284 return saw_a_complex_op;
|
|
285 }
|
|
286
|
|
287
|
|
288 /* Evaluate statement STMT against the complex lattice defined above. */
|
|
289
|
|
290 static enum ssa_prop_result
|
|
291 complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
|
|
292 tree *result_p)
|
|
293 {
|
|
294 complex_lattice_t new_l, old_l, op1_l, op2_l;
|
|
295 unsigned int ver;
|
|
296 tree lhs;
|
|
297
|
|
298 lhs = gimple_get_lhs (stmt);
|
|
299 /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */
|
|
300 if (!lhs)
|
|
301 return SSA_PROP_VARYING;
|
|
302
|
|
303 /* These conditions should be satisfied due to the initial filter
|
|
304 set up in init_dont_simulate_again. */
|
|
305 gcc_assert (TREE_CODE (lhs) == SSA_NAME);
|
|
306 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
|
|
307
|
|
308 *result_p = lhs;
|
|
309 ver = SSA_NAME_VERSION (lhs);
|
|
310 old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
|
|
311
|
|
312 switch (gimple_expr_code (stmt))
|
|
313 {
|
|
314 case SSA_NAME:
|
|
315 case COMPLEX_CST:
|
|
316 new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
|
|
317 break;
|
|
318
|
|
319 case COMPLEX_EXPR:
|
|
320 new_l = find_lattice_value_parts (gimple_assign_rhs1 (stmt),
|
|
321 gimple_assign_rhs2 (stmt));
|
|
322 break;
|
|
323
|
|
324 case PLUS_EXPR:
|
|
325 case MINUS_EXPR:
|
|
326 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
|
|
327 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
|
|
328
|
|
329 /* We've set up the lattice values such that IOR neatly
|
|
330 models addition. */
|
|
331 new_l = op1_l | op2_l;
|
|
332 break;
|
|
333
|
|
334 case MULT_EXPR:
|
|
335 case RDIV_EXPR:
|
|
336 case TRUNC_DIV_EXPR:
|
|
337 case CEIL_DIV_EXPR:
|
|
338 case FLOOR_DIV_EXPR:
|
|
339 case ROUND_DIV_EXPR:
|
|
340 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
|
|
341 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
|
|
342
|
|
343 /* Obviously, if either varies, so does the result. */
|
|
344 if (op1_l == VARYING || op2_l == VARYING)
|
|
345 new_l = VARYING;
|
|
346 /* Don't prematurely promote variables if we've not yet seen
|
|
347 their inputs. */
|
|
348 else if (op1_l == UNINITIALIZED)
|
|
349 new_l = op2_l;
|
|
350 else if (op2_l == UNINITIALIZED)
|
|
351 new_l = op1_l;
|
|
352 else
|
|
353 {
|
|
354 /* At this point both numbers have only one component. If the
|
|
355 numbers are of opposite kind, the result is imaginary,
|
|
356 otherwise the result is real. The add/subtract translates
|
|
357 the real/imag from/to 0/1; the ^ performs the comparison. */
|
|
358 new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL;
|
|
359
|
|
360 /* Don't allow the lattice value to flip-flop indefinitely. */
|
|
361 new_l |= old_l;
|
|
362 }
|
|
363 break;
|
|
364
|
|
365 case NEGATE_EXPR:
|
|
366 case CONJ_EXPR:
|
|
367 new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
|
|
368 break;
|
|
369
|
|
370 default:
|
|
371 new_l = VARYING;
|
|
372 break;
|
|
373 }
|
|
374
|
|
375 /* If nothing changed this round, let the propagator know. */
|
|
376 if (new_l == old_l)
|
|
377 return SSA_PROP_NOT_INTERESTING;
|
|
378
|
|
379 VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
|
|
380 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
|
|
381 }
|
|
382
|
|
383 /* Evaluate a PHI node against the complex lattice defined above. */
|
|
384
|
|
385 static enum ssa_prop_result
|
|
386 complex_visit_phi (gimple phi)
|
|
387 {
|
|
388 complex_lattice_t new_l, old_l;
|
|
389 unsigned int ver;
|
|
390 tree lhs;
|
|
391 int i;
|
|
392
|
|
393 lhs = gimple_phi_result (phi);
|
|
394
|
|
395 /* This condition should be satisfied due to the initial filter
|
|
396 set up in init_dont_simulate_again. */
|
|
397 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
|
|
398
|
|
399 /* We've set up the lattice values such that IOR neatly models PHI meet. */
|
|
400 new_l = UNINITIALIZED;
|
|
401 for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i)
|
|
402 new_l |= find_lattice_value (gimple_phi_arg_def (phi, i));
|
|
403
|
|
404 ver = SSA_NAME_VERSION (lhs);
|
|
405 old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
|
|
406
|
|
407 if (new_l == old_l)
|
|
408 return SSA_PROP_NOT_INTERESTING;
|
|
409
|
|
410 VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
|
|
411 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
|
|
412 }
|
|
413
|
|
414 /* Create one backing variable for a complex component of ORIG. */
|
|
415
|
|
416 static tree
|
|
417 create_one_component_var (tree type, tree orig, const char *prefix,
|
|
418 const char *suffix, enum tree_code code)
|
|
419 {
|
|
420 tree r = create_tmp_var (type, prefix);
|
|
421 add_referenced_var (r);
|
|
422
|
|
423 DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (orig);
|
|
424 DECL_ARTIFICIAL (r) = 1;
|
|
425
|
|
426 if (DECL_NAME (orig) && !DECL_IGNORED_P (orig))
|
|
427 {
|
|
428 const char *name = IDENTIFIER_POINTER (DECL_NAME (orig));
|
|
429 tree inner_type;
|
|
430
|
|
431 DECL_NAME (r) = get_identifier (ACONCAT ((name, suffix, NULL)));
|
|
432
|
|
433 inner_type = TREE_TYPE (TREE_TYPE (orig));
|
|
434 SET_DECL_DEBUG_EXPR (r, build1 (code, type, orig));
|
|
435 DECL_DEBUG_EXPR_IS_FROM (r) = 1;
|
|
436 DECL_IGNORED_P (r) = 0;
|
|
437 TREE_NO_WARNING (r) = TREE_NO_WARNING (orig);
|
|
438 }
|
|
439 else
|
|
440 {
|
|
441 DECL_IGNORED_P (r) = 1;
|
|
442 TREE_NO_WARNING (r) = 1;
|
|
443 }
|
|
444
|
|
445 return r;
|
|
446 }
|
|
447
|
|
448 /* Retrieve a value for a complex component of VAR. */
|
|
449
|
|
450 static tree
|
|
451 get_component_var (tree var, bool imag_p)
|
|
452 {
|
|
453 size_t decl_index = DECL_UID (var) * 2 + imag_p;
|
|
454 tree ret = cvc_lookup (decl_index);
|
|
455
|
|
456 if (ret == NULL)
|
|
457 {
|
|
458 ret = create_one_component_var (TREE_TYPE (TREE_TYPE (var)), var,
|
|
459 imag_p ? "CI" : "CR",
|
|
460 imag_p ? "$imag" : "$real",
|
|
461 imag_p ? IMAGPART_EXPR : REALPART_EXPR);
|
|
462 cvc_insert (decl_index, ret);
|
|
463 }
|
|
464
|
|
465 return ret;
|
|
466 }
|
|
467
|
|
468 /* Retrieve a value for a complex component of SSA_NAME. */
|
|
469
|
|
470 static tree
|
|
471 get_component_ssa_name (tree ssa_name, bool imag_p)
|
|
472 {
|
|
473 complex_lattice_t lattice = find_lattice_value (ssa_name);
|
|
474 size_t ssa_name_index;
|
|
475 tree ret;
|
|
476
|
|
477 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG))
|
|
478 {
|
|
479 tree inner_type = TREE_TYPE (TREE_TYPE (ssa_name));
|
|
480 if (SCALAR_FLOAT_TYPE_P (inner_type))
|
|
481 return build_real (inner_type, dconst0);
|
|
482 else
|
|
483 return build_int_cst (inner_type, 0);
|
|
484 }
|
|
485
|
|
486 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
|
|
487 ret = VEC_index (tree, complex_ssa_name_components, ssa_name_index);
|
|
488 if (ret == NULL)
|
|
489 {
|
|
490 ret = get_component_var (SSA_NAME_VAR (ssa_name), imag_p);
|
|
491 ret = make_ssa_name (ret, NULL);
|
|
492
|
|
493 /* Copy some properties from the original. In particular, whether it
|
|
494 is used in an abnormal phi, and whether it's uninitialized. */
|
|
495 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret)
|
|
496 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name);
|
|
497 if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL
|
|
498 && gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
|
|
499 {
|
|
500 SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name);
|
|
501 set_default_def (SSA_NAME_VAR (ret), ret);
|
|
502 }
|
|
503
|
|
504 VEC_replace (tree, complex_ssa_name_components, ssa_name_index, ret);
|
|
505 }
|
|
506
|
|
507 return ret;
|
|
508 }
|
|
509
|
|
510 /* Set a value for a complex component of SSA_NAME, return a
|
|
511 gimple_seq of stuff that needs doing. */
|
|
512
|
|
513 static gimple_seq
|
|
514 set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
|
|
515 {
|
|
516 complex_lattice_t lattice = find_lattice_value (ssa_name);
|
|
517 size_t ssa_name_index;
|
|
518 tree comp;
|
|
519 gimple last;
|
|
520 gimple_seq list;
|
|
521
|
|
522 /* We know the value must be zero, else there's a bug in our lattice
|
|
523 analysis. But the value may well be a variable known to contain
|
|
524 zero. We should be safe ignoring it. */
|
|
525 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG))
|
|
526 return NULL;
|
|
527
|
|
528 /* If we've already assigned an SSA_NAME to this component, then this
|
|
529 means that our walk of the basic blocks found a use before the set.
|
|
530 This is fine. Now we should create an initialization for the value
|
|
531 we created earlier. */
|
|
532 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
|
|
533 comp = VEC_index (tree, complex_ssa_name_components, ssa_name_index);
|
|
534 if (comp)
|
|
535 ;
|
|
536
|
|
537 /* If we've nothing assigned, and the value we're given is already stable,
|
|
538 then install that as the value for this SSA_NAME. This preemptively
|
|
539 copy-propagates the value, which avoids unnecessary memory allocation. */
|
|
540 else if (is_gimple_min_invariant (value)
|
|
541 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
|
|
542 {
|
|
543 VEC_replace (tree, complex_ssa_name_components, ssa_name_index, value);
|
|
544 return NULL;
|
|
545 }
|
|
546 else if (TREE_CODE (value) == SSA_NAME
|
|
547 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
|
|
548 {
|
|
549 /* Replace an anonymous base value with the variable from cvc_lookup.
|
|
550 This should result in better debug info. */
|
|
551 if (DECL_IGNORED_P (SSA_NAME_VAR (value))
|
|
552 && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name)))
|
|
553 {
|
|
554 comp = get_component_var (SSA_NAME_VAR (ssa_name), imag_p);
|
|
555 replace_ssa_name_symbol (value, comp);
|
|
556 }
|
|
557
|
|
558 VEC_replace (tree, complex_ssa_name_components, ssa_name_index, value);
|
|
559 return NULL;
|
|
560 }
|
|
561
|
|
562 /* Finally, we need to stabilize the result by installing the value into
|
|
563 a new ssa name. */
|
|
564 else
|
|
565 comp = get_component_ssa_name (ssa_name, imag_p);
|
|
566
|
|
567 /* Do all the work to assign VALUE to COMP. */
|
|
568 list = NULL;
|
|
569 value = force_gimple_operand (value, &list, false, NULL);
|
|
570 last = gimple_build_assign (comp, value);
|
|
571 gimple_seq_add_stmt (&list, last);
|
|
572 gcc_assert (SSA_NAME_DEF_STMT (comp) == last);
|
|
573
|
|
574 return list;
|
|
575 }
|
|
576
|
|
577 /* Extract the real or imaginary part of a complex variable or constant.
|
|
578 Make sure that it's a proper gimple_val and gimplify it if not.
|
|
579 Emit any new code before gsi. */
|
|
580
|
|
581 static tree
|
|
582 extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p,
|
|
583 bool gimple_p)
|
|
584 {
|
|
585 switch (TREE_CODE (t))
|
|
586 {
|
|
587 case COMPLEX_CST:
|
|
588 return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t);
|
|
589
|
|
590 case COMPLEX_EXPR:
|
|
591 gcc_unreachable ();
|
|
592
|
|
593 case VAR_DECL:
|
|
594 case RESULT_DECL:
|
|
595 case PARM_DECL:
|
|
596 case INDIRECT_REF:
|
|
597 case COMPONENT_REF:
|
|
598 case ARRAY_REF:
|
|
599 {
|
|
600 tree inner_type = TREE_TYPE (TREE_TYPE (t));
|
|
601
|
|
602 t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR),
|
|
603 inner_type, unshare_expr (t));
|
|
604
|
|
605 if (gimple_p)
|
|
606 t = force_gimple_operand_gsi (gsi, t, true, NULL, true,
|
|
607 GSI_SAME_STMT);
|
|
608
|
|
609 return t;
|
|
610 }
|
|
611
|
|
612 case SSA_NAME:
|
|
613 return get_component_ssa_name (t, imagpart_p);
|
|
614
|
|
615 default:
|
|
616 gcc_unreachable ();
|
|
617 }
|
|
618 }
|
|
619
|
|
620 /* Update the complex components of the ssa name on the lhs of STMT. */
|
|
621
|
|
622 static void
|
|
623 update_complex_components (gimple_stmt_iterator *gsi, gimple stmt, tree r,
|
|
624 tree i)
|
|
625 {
|
|
626 tree lhs;
|
|
627 gimple_seq list;
|
|
628
|
|
629 lhs = gimple_get_lhs (stmt);
|
|
630
|
|
631 list = set_component_ssa_name (lhs, false, r);
|
|
632 if (list)
|
|
633 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
|
|
634
|
|
635 list = set_component_ssa_name (lhs, true, i);
|
|
636 if (list)
|
|
637 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
|
|
638 }
|
|
639
|
|
640 static void
|
|
641 update_complex_components_on_edge (edge e, tree lhs, tree r, tree i)
|
|
642 {
|
|
643 gimple_seq list;
|
|
644
|
|
645 list = set_component_ssa_name (lhs, false, r);
|
|
646 if (list)
|
|
647 gsi_insert_seq_on_edge (e, list);
|
|
648
|
|
649 list = set_component_ssa_name (lhs, true, i);
|
|
650 if (list)
|
|
651 gsi_insert_seq_on_edge (e, list);
|
|
652 }
|
|
653
|
|
654
|
|
655 /* Update an assignment to a complex variable in place. */
|
|
656
|
|
657 static void
|
|
658 update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
|
|
659 {
|
|
660 gimple_stmt_iterator orig_si = *gsi;
|
|
661
|
|
662 if (gimple_in_ssa_p (cfun))
|
|
663 update_complex_components (gsi, gsi_stmt (*gsi), r, i);
|
|
664
|
|
665 gimple_assign_set_rhs_with_ops (&orig_si, COMPLEX_EXPR, r, i);
|
|
666 update_stmt (gsi_stmt (orig_si));
|
|
667 }
|
|
668
|
|
669
|
|
670 /* Generate code at the entry point of the function to initialize the
|
|
671 component variables for a complex parameter. */
|
|
672
|
|
673 static void
|
|
674 update_parameter_components (void)
|
|
675 {
|
|
676 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
|
|
677 tree parm;
|
|
678
|
|
679 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = TREE_CHAIN (parm))
|
|
680 {
|
|
681 tree type = TREE_TYPE (parm);
|
|
682 tree ssa_name, r, i;
|
|
683
|
|
684 if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm))
|
|
685 continue;
|
|
686
|
|
687 type = TREE_TYPE (type);
|
|
688 ssa_name = gimple_default_def (cfun, parm);
|
|
689 if (!ssa_name)
|
|
690 continue;
|
|
691
|
|
692 r = build1 (REALPART_EXPR, type, ssa_name);
|
|
693 i = build1 (IMAGPART_EXPR, type, ssa_name);
|
|
694 update_complex_components_on_edge (entry_edge, ssa_name, r, i);
|
|
695 }
|
|
696 }
|
|
697
|
|
698 /* Generate code to set the component variables of a complex variable
|
|
699 to match the PHI statements in block BB. */
|
|
700
|
|
701 static void
|
|
702 update_phi_components (basic_block bb)
|
|
703 {
|
|
704 gimple_stmt_iterator gsi;
|
|
705
|
|
706 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
707 {
|
|
708 gimple phi = gsi_stmt (gsi);
|
|
709
|
|
710 if (is_complex_reg (gimple_phi_result (phi)))
|
|
711 {
|
|
712 tree lr, li;
|
|
713 gimple pr = NULL, pi = NULL;
|
|
714 unsigned int i, n;
|
|
715
|
|
716 lr = get_component_ssa_name (gimple_phi_result (phi), false);
|
|
717 if (TREE_CODE (lr) == SSA_NAME)
|
|
718 {
|
|
719 pr = create_phi_node (lr, bb);
|
|
720 SSA_NAME_DEF_STMT (lr) = pr;
|
|
721 }
|
|
722
|
|
723 li = get_component_ssa_name (gimple_phi_result (phi), true);
|
|
724 if (TREE_CODE (li) == SSA_NAME)
|
|
725 {
|
|
726 pi = create_phi_node (li, bb);
|
|
727 SSA_NAME_DEF_STMT (li) = pi;
|
|
728 }
|
|
729
|
|
730 for (i = 0, n = gimple_phi_num_args (phi); i < n; ++i)
|
|
731 {
|
|
732 tree comp, arg = gimple_phi_arg_def (phi, i);
|
|
733 if (pr)
|
|
734 {
|
|
735 comp = extract_component (NULL, arg, false, false);
|
|
736 SET_PHI_ARG_DEF (pr, i, comp);
|
|
737 }
|
|
738 if (pi)
|
|
739 {
|
|
740 comp = extract_component (NULL, arg, true, false);
|
|
741 SET_PHI_ARG_DEF (pi, i, comp);
|
|
742 }
|
|
743 }
|
|
744 }
|
|
745 }
|
|
746 }
|
|
747
|
|
748 /* Mark each virtual op in STMT for ssa update. */
|
|
749
|
|
750 static void
|
|
751 update_all_vops (gimple stmt)
|
|
752 {
|
|
753 ssa_op_iter iter;
|
|
754 tree sym;
|
|
755
|
|
756 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
|
|
757 {
|
|
758 if (TREE_CODE (sym) == SSA_NAME)
|
|
759 sym = SSA_NAME_VAR (sym);
|
|
760 mark_sym_for_renaming (sym);
|
|
761 }
|
|
762 }
|
|
763
|
|
764
|
|
765 /* Expand a complex move to scalars. */
|
|
766
|
|
767 static void
|
|
768 expand_complex_move (gimple_stmt_iterator *gsi, tree type)
|
|
769 {
|
|
770 tree inner_type = TREE_TYPE (type);
|
|
771 tree r, i, lhs, rhs;
|
|
772 gimple stmt = gsi_stmt (*gsi);
|
|
773
|
|
774 if (is_gimple_assign (stmt))
|
|
775 {
|
|
776 lhs = gimple_assign_lhs (stmt);
|
|
777 if (gimple_num_ops (stmt) == 2)
|
|
778 rhs = gimple_assign_rhs1 (stmt);
|
|
779 else
|
|
780 rhs = NULL_TREE;
|
|
781 }
|
|
782 else if (is_gimple_call (stmt))
|
|
783 {
|
|
784 lhs = gimple_call_lhs (stmt);
|
|
785 rhs = NULL_TREE;
|
|
786 }
|
|
787 else
|
|
788 gcc_unreachable ();
|
|
789
|
|
790 if (TREE_CODE (lhs) == SSA_NAME)
|
|
791 {
|
|
792 if (is_ctrl_altering_stmt (stmt))
|
|
793 {
|
|
794 edge_iterator ei;
|
|
795 edge e;
|
|
796
|
|
797 /* The value is not assigned on the exception edges, so we need not
|
|
798 concern ourselves there. We do need to update on the fallthru
|
|
799 edge. Find it. */
|
|
800 FOR_EACH_EDGE (e, ei, gsi_bb (*gsi)->succs)
|
|
801 if (e->flags & EDGE_FALLTHRU)
|
|
802 goto found_fallthru;
|
|
803 gcc_unreachable ();
|
|
804 found_fallthru:
|
|
805
|
|
806 r = build1 (REALPART_EXPR, inner_type, lhs);
|
|
807 i = build1 (IMAGPART_EXPR, inner_type, lhs);
|
|
808 update_complex_components_on_edge (e, lhs, r, i);
|
|
809 }
|
|
810 else if (is_gimple_call (stmt)
|
|
811 || gimple_has_side_effects (stmt)
|
|
812 || gimple_assign_rhs_code (stmt) == PAREN_EXPR)
|
|
813 {
|
|
814 r = build1 (REALPART_EXPR, inner_type, lhs);
|
|
815 i = build1 (IMAGPART_EXPR, inner_type, lhs);
|
|
816 update_complex_components (gsi, stmt, r, i);
|
|
817 }
|
|
818 else
|
|
819 {
|
|
820 update_all_vops (stmt);
|
|
821 if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR)
|
|
822 {
|
|
823 r = extract_component (gsi, rhs, 0, true);
|
|
824 i = extract_component (gsi, rhs, 1, true);
|
|
825 }
|
|
826 else
|
|
827 {
|
|
828 r = gimple_assign_rhs1 (stmt);
|
|
829 i = gimple_assign_rhs2 (stmt);
|
|
830 }
|
|
831 update_complex_assignment (gsi, r, i);
|
|
832 }
|
|
833 }
|
|
834 else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
|
|
835 {
|
|
836 tree x;
|
|
837 gimple t;
|
|
838
|
|
839 r = extract_component (gsi, rhs, 0, false);
|
|
840 i = extract_component (gsi, rhs, 1, false);
|
|
841
|
|
842 x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs));
|
|
843 t = gimple_build_assign (x, r);
|
|
844 gsi_insert_before (gsi, t, GSI_SAME_STMT);
|
|
845
|
|
846 if (stmt == gsi_stmt (*gsi))
|
|
847 {
|
|
848 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
|
|
849 gimple_assign_set_lhs (stmt, x);
|
|
850 gimple_assign_set_rhs1 (stmt, i);
|
|
851 }
|
|
852 else
|
|
853 {
|
|
854 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
|
|
855 t = gimple_build_assign (x, i);
|
|
856 gsi_insert_before (gsi, t, GSI_SAME_STMT);
|
|
857
|
|
858 stmt = gsi_stmt (*gsi);
|
|
859 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN);
|
|
860 gimple_return_set_retval (stmt, lhs);
|
|
861 }
|
|
862
|
|
863 update_all_vops (stmt);
|
|
864 update_stmt (stmt);
|
|
865 }
|
|
866 }
|
|
867
|
|
868 /* Expand complex addition to scalars:
|
|
869 a + b = (ar + br) + i(ai + bi)
|
|
870 a - b = (ar - br) + i(ai + bi)
|
|
871 */
|
|
872
|
|
873 static void
|
|
874 expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type,
|
|
875 tree ar, tree ai, tree br, tree bi,
|
|
876 enum tree_code code,
|
|
877 complex_lattice_t al, complex_lattice_t bl)
|
|
878 {
|
|
879 tree rr, ri;
|
|
880
|
|
881 switch (PAIR (al, bl))
|
|
882 {
|
|
883 case PAIR (ONLY_REAL, ONLY_REAL):
|
|
884 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
|
|
885 ri = ai;
|
|
886 break;
|
|
887
|
|
888 case PAIR (ONLY_REAL, ONLY_IMAG):
|
|
889 rr = ar;
|
|
890 if (code == MINUS_EXPR)
|
|
891 ri = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, bi);
|
|
892 else
|
|
893 ri = bi;
|
|
894 break;
|
|
895
|
|
896 case PAIR (ONLY_IMAG, ONLY_REAL):
|
|
897 if (code == MINUS_EXPR)
|
|
898 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ar, br);
|
|
899 else
|
|
900 rr = br;
|
|
901 ri = ai;
|
|
902 break;
|
|
903
|
|
904 case PAIR (ONLY_IMAG, ONLY_IMAG):
|
|
905 rr = ar;
|
|
906 ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
|
|
907 break;
|
|
908
|
|
909 case PAIR (VARYING, ONLY_REAL):
|
|
910 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
|
|
911 ri = ai;
|
|
912 break;
|
|
913
|
|
914 case PAIR (VARYING, ONLY_IMAG):
|
|
915 rr = ar;
|
|
916 ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
|
|
917 break;
|
|
918
|
|
919 case PAIR (ONLY_REAL, VARYING):
|
|
920 if (code == MINUS_EXPR)
|
|
921 goto general;
|
|
922 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
|
|
923 ri = bi;
|
|
924 break;
|
|
925
|
|
926 case PAIR (ONLY_IMAG, VARYING):
|
|
927 if (code == MINUS_EXPR)
|
|
928 goto general;
|
|
929 rr = br;
|
|
930 ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
|
|
931 break;
|
|
932
|
|
933 case PAIR (VARYING, VARYING):
|
|
934 general:
|
|
935 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
|
|
936 ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
|
|
937 break;
|
|
938
|
|
939 default:
|
|
940 gcc_unreachable ();
|
|
941 }
|
|
942
|
|
943 update_complex_assignment (gsi, rr, ri);
|
|
944 }
|
|
945
|
|
946 /* Expand a complex multiplication or division to a libcall to the c99
|
|
947 compliant routines. */
|
|
948
|
|
949 static void
|
|
950 expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai,
|
|
951 tree br, tree bi, enum tree_code code)
|
|
952 {
|
|
953 enum machine_mode mode;
|
|
954 enum built_in_function bcode;
|
|
955 tree fn, type, lhs;
|
|
956 gimple old_stmt, stmt;
|
|
957
|
|
958 old_stmt = gsi_stmt (*gsi);
|
|
959 lhs = gimple_assign_lhs (old_stmt);
|
|
960 type = TREE_TYPE (lhs);
|
|
961
|
|
962 mode = TYPE_MODE (type);
|
|
963 gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT);
|
|
964
|
|
965 if (code == MULT_EXPR)
|
|
966 bcode = BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
|
|
967 else if (code == RDIV_EXPR)
|
|
968 bcode = BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
|
|
969 else
|
|
970 gcc_unreachable ();
|
|
971 fn = built_in_decls[bcode];
|
|
972
|
|
973 stmt = gimple_build_call (fn, 4, ar, ai, br, bi);
|
|
974 gimple_call_set_lhs (stmt, lhs);
|
|
975 update_stmt (stmt);
|
|
976 gsi_replace (gsi, stmt, false);
|
|
977
|
|
978 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
|
|
979 gimple_purge_dead_eh_edges (gsi_bb (*gsi));
|
|
980
|
|
981 if (gimple_in_ssa_p (cfun))
|
|
982 {
|
|
983 type = TREE_TYPE (type);
|
|
984 update_complex_components (gsi, stmt,
|
|
985 build1 (REALPART_EXPR, type, lhs),
|
|
986 build1 (IMAGPART_EXPR, type, lhs));
|
|
987 SSA_NAME_DEF_STMT (lhs) = stmt;
|
|
988 }
|
|
989 }
|
|
990
|
|
991 /* Expand complex multiplication to scalars:
|
|
992 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
|
|
993 */
|
|
994
|
|
995 static void
|
|
996 expand_complex_multiplication (gimple_stmt_iterator *gsi, tree inner_type,
|
|
997 tree ar, tree ai, tree br, tree bi,
|
|
998 complex_lattice_t al, complex_lattice_t bl)
|
|
999 {
|
|
1000 tree rr, ri;
|
|
1001
|
|
1002 if (al < bl)
|
|
1003 {
|
|
1004 complex_lattice_t tl;
|
|
1005 rr = ar, ar = br, br = rr;
|
|
1006 ri = ai, ai = bi, bi = ri;
|
|
1007 tl = al, al = bl, bl = tl;
|
|
1008 }
|
|
1009
|
|
1010 switch (PAIR (al, bl))
|
|
1011 {
|
|
1012 case PAIR (ONLY_REAL, ONLY_REAL):
|
|
1013 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
|
|
1014 ri = ai;
|
|
1015 break;
|
|
1016
|
|
1017 case PAIR (ONLY_IMAG, ONLY_REAL):
|
|
1018 rr = ar;
|
|
1019 if (TREE_CODE (ai) == REAL_CST
|
|
1020 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst1))
|
|
1021 ri = br;
|
|
1022 else
|
|
1023 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
|
|
1024 break;
|
|
1025
|
|
1026 case PAIR (ONLY_IMAG, ONLY_IMAG):
|
|
1027 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
|
|
1028 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
|
|
1029 ri = ar;
|
|
1030 break;
|
|
1031
|
|
1032 case PAIR (VARYING, ONLY_REAL):
|
|
1033 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
|
|
1034 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
|
|
1035 break;
|
|
1036
|
|
1037 case PAIR (VARYING, ONLY_IMAG):
|
|
1038 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
|
|
1039 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
|
|
1040 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
|
|
1041 break;
|
|
1042
|
|
1043 case PAIR (VARYING, VARYING):
|
|
1044 if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
|
|
1045 {
|
|
1046 expand_complex_libcall (gsi, ar, ai, br, bi, MULT_EXPR);
|
|
1047 return;
|
|
1048 }
|
|
1049 else
|
|
1050 {
|
|
1051 tree t1, t2, t3, t4;
|
|
1052
|
|
1053 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
|
|
1054 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
|
|
1055 t3 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
|
|
1056
|
|
1057 /* Avoid expanding redundant multiplication for the common
|
|
1058 case of squaring a complex number. */
|
|
1059 if (ar == br && ai == bi)
|
|
1060 t4 = t3;
|
|
1061 else
|
|
1062 t4 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
|
|
1063
|
|
1064 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
|
|
1065 ri = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t3, t4);
|
|
1066 }
|
|
1067 break;
|
|
1068
|
|
1069 default:
|
|
1070 gcc_unreachable ();
|
|
1071 }
|
|
1072
|
|
1073 update_complex_assignment (gsi, rr, ri);
|
|
1074 }
|
|
1075
|
|
1076 /* Expand complex division to scalars, straightforward algorithm.
|
|
1077 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
|
|
1078 t = br*br + bi*bi
|
|
1079 */
|
|
1080
|
|
1081 static void
|
|
1082 expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type,
|
|
1083 tree ar, tree ai, tree br, tree bi,
|
|
1084 enum tree_code code)
|
|
1085 {
|
|
1086 tree rr, ri, div, t1, t2, t3;
|
|
1087
|
|
1088 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, br);
|
|
1089 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, bi);
|
|
1090 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
|
|
1091
|
|
1092 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
|
|
1093 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
|
|
1094 t3 = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
|
|
1095 rr = gimplify_build2 (gsi, code, inner_type, t3, div);
|
|
1096
|
|
1097 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
|
|
1098 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
|
|
1099 t3 = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
|
|
1100 ri = gimplify_build2 (gsi, code, inner_type, t3, div);
|
|
1101
|
|
1102 update_complex_assignment (gsi, rr, ri);
|
|
1103 }
|
|
1104
|
|
1105 /* Expand complex division to scalars, modified algorithm to minimize
|
|
1106 overflow with wide input ranges. */
|
|
1107
|
|
1108 static void
|
|
1109 expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type,
|
|
1110 tree ar, tree ai, tree br, tree bi,
|
|
1111 enum tree_code code)
|
|
1112 {
|
|
1113 tree rr, ri, ratio, div, t1, t2, tr, ti, compare;
|
|
1114 basic_block bb_cond, bb_true, bb_false, bb_join;
|
|
1115 gimple stmt;
|
|
1116
|
|
1117 /* Examine |br| < |bi|, and branch. */
|
|
1118 t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br);
|
|
1119 t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi);
|
|
1120 compare = fold_build2 (LT_EXPR, boolean_type_node, t1, t2);
|
|
1121 STRIP_NOPS (compare);
|
|
1122
|
|
1123 bb_cond = bb_true = bb_false = bb_join = NULL;
|
|
1124 rr = ri = tr = ti = NULL;
|
|
1125 if (!TREE_CONSTANT (compare))
|
|
1126 {
|
|
1127 edge e;
|
|
1128 gimple stmt;
|
|
1129 tree cond, tmp;
|
|
1130
|
|
1131 tmp = create_tmp_var (boolean_type_node, NULL);
|
|
1132 stmt = gimple_build_assign (tmp, compare);
|
|
1133 if (gimple_in_ssa_p (cfun))
|
|
1134 {
|
|
1135 tmp = make_ssa_name (tmp, stmt);
|
|
1136 gimple_assign_set_lhs (stmt, tmp);
|
|
1137 }
|
|
1138
|
|
1139 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
|
|
1140
|
|
1141 cond = fold_build2 (EQ_EXPR, boolean_type_node, tmp, boolean_true_node);
|
|
1142 stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE);
|
|
1143 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
|
|
1144
|
|
1145 /* Split the original block, and create the TRUE and FALSE blocks. */
|
|
1146 e = split_block (gsi_bb (*gsi), stmt);
|
|
1147 bb_cond = e->src;
|
|
1148 bb_join = e->dest;
|
|
1149 bb_true = create_empty_bb (bb_cond);
|
|
1150 bb_false = create_empty_bb (bb_true);
|
|
1151
|
|
1152 /* Wire the blocks together. */
|
|
1153 e->flags = EDGE_TRUE_VALUE;
|
|
1154 redirect_edge_succ (e, bb_true);
|
|
1155 make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE);
|
|
1156 make_edge (bb_true, bb_join, EDGE_FALLTHRU);
|
|
1157 make_edge (bb_false, bb_join, EDGE_FALLTHRU);
|
|
1158
|
|
1159 /* Update dominance info. Note that bb_join's data was
|
|
1160 updated by split_block. */
|
|
1161 if (dom_info_available_p (CDI_DOMINATORS))
|
|
1162 {
|
|
1163 set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond);
|
|
1164 set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond);
|
|
1165 }
|
|
1166
|
|
1167 rr = make_rename_temp (inner_type, NULL);
|
|
1168 ri = make_rename_temp (inner_type, NULL);
|
|
1169 }
|
|
1170
|
|
1171 /* In the TRUE branch, we compute
|
|
1172 ratio = br/bi;
|
|
1173 div = (br * ratio) + bi;
|
|
1174 tr = (ar * ratio) + ai;
|
|
1175 ti = (ai * ratio) - ar;
|
|
1176 tr = tr / div;
|
|
1177 ti = ti / div; */
|
|
1178 if (bb_true || integer_nonzerop (compare))
|
|
1179 {
|
|
1180 if (bb_true)
|
|
1181 {
|
|
1182 *gsi = gsi_last_bb (bb_true);
|
|
1183 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
|
|
1184 }
|
|
1185
|
|
1186 ratio = gimplify_build2 (gsi, code, inner_type, br, bi);
|
|
1187
|
|
1188 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, ratio);
|
|
1189 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, bi);
|
|
1190
|
|
1191 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
|
|
1192 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ai);
|
|
1193
|
|
1194 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
|
|
1195 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, ar);
|
|
1196
|
|
1197 tr = gimplify_build2 (gsi, code, inner_type, tr, div);
|
|
1198 ti = gimplify_build2 (gsi, code, inner_type, ti, div);
|
|
1199
|
|
1200 if (bb_true)
|
|
1201 {
|
|
1202 stmt = gimple_build_assign (rr, tr);
|
|
1203 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
|
|
1204 stmt = gimple_build_assign (ri, ti);
|
|
1205 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
|
|
1206 gsi_remove (gsi, true);
|
|
1207 }
|
|
1208 }
|
|
1209
|
|
1210 /* In the FALSE branch, we compute
|
|
1211 ratio = d/c;
|
|
1212 divisor = (d * ratio) + c;
|
|
1213 tr = (b * ratio) + a;
|
|
1214 ti = b - (a * ratio);
|
|
1215 tr = tr / div;
|
|
1216 ti = ti / div; */
|
|
1217 if (bb_false || integer_zerop (compare))
|
|
1218 {
|
|
1219 if (bb_false)
|
|
1220 {
|
|
1221 *gsi = gsi_last_bb (bb_false);
|
|
1222 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
|
|
1223 }
|
|
1224
|
|
1225 ratio = gimplify_build2 (gsi, code, inner_type, bi, br);
|
|
1226
|
|
1227 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, ratio);
|
|
1228 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, br);
|
|
1229
|
|
1230 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
|
|
1231 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ar);
|
|
1232
|
|
1233 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
|
|
1234 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, t1);
|
|
1235
|
|
1236 tr = gimplify_build2 (gsi, code, inner_type, tr, div);
|
|
1237 ti = gimplify_build2 (gsi, code, inner_type, ti, div);
|
|
1238
|
|
1239 if (bb_false)
|
|
1240 {
|
|
1241 stmt = gimple_build_assign (rr, tr);
|
|
1242 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
|
|
1243 stmt = gimple_build_assign (ri, ti);
|
|
1244 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
|
|
1245 gsi_remove (gsi, true);
|
|
1246 }
|
|
1247 }
|
|
1248
|
|
1249 if (bb_join)
|
|
1250 *gsi = gsi_start_bb (bb_join);
|
|
1251 else
|
|
1252 rr = tr, ri = ti;
|
|
1253
|
|
1254 update_complex_assignment (gsi, rr, ri);
|
|
1255 }
|
|
1256
|
|
1257 /* Expand complex division to scalars. */
|
|
1258
|
|
1259 static void
|
|
1260 expand_complex_division (gimple_stmt_iterator *gsi, tree inner_type,
|
|
1261 tree ar, tree ai, tree br, tree bi,
|
|
1262 enum tree_code code,
|
|
1263 complex_lattice_t al, complex_lattice_t bl)
|
|
1264 {
|
|
1265 tree rr, ri;
|
|
1266
|
|
1267 switch (PAIR (al, bl))
|
|
1268 {
|
|
1269 case PAIR (ONLY_REAL, ONLY_REAL):
|
|
1270 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
|
|
1271 ri = ai;
|
|
1272 break;
|
|
1273
|
|
1274 case PAIR (ONLY_REAL, ONLY_IMAG):
|
|
1275 rr = ai;
|
|
1276 ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
|
|
1277 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
|
|
1278 break;
|
|
1279
|
|
1280 case PAIR (ONLY_IMAG, ONLY_REAL):
|
|
1281 rr = ar;
|
|
1282 ri = gimplify_build2 (gsi, code, inner_type, ai, br);
|
|
1283 break;
|
|
1284
|
|
1285 case PAIR (ONLY_IMAG, ONLY_IMAG):
|
|
1286 rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
|
|
1287 ri = ar;
|
|
1288 break;
|
|
1289
|
|
1290 case PAIR (VARYING, ONLY_REAL):
|
|
1291 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
|
|
1292 ri = gimplify_build2 (gsi, code, inner_type, ai, br);
|
|
1293 break;
|
|
1294
|
|
1295 case PAIR (VARYING, ONLY_IMAG):
|
|
1296 rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
|
|
1297 ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
|
|
1298 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
|
|
1299
|
|
1300 case PAIR (ONLY_REAL, VARYING):
|
|
1301 case PAIR (ONLY_IMAG, VARYING):
|
|
1302 case PAIR (VARYING, VARYING):
|
|
1303 switch (flag_complex_method)
|
|
1304 {
|
|
1305 case 0:
|
|
1306 /* straightforward implementation of complex divide acceptable. */
|
|
1307 expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code);
|
|
1308 break;
|
|
1309
|
|
1310 case 2:
|
|
1311 if (SCALAR_FLOAT_TYPE_P (inner_type))
|
|
1312 {
|
|
1313 expand_complex_libcall (gsi, ar, ai, br, bi, code);
|
|
1314 break;
|
|
1315 }
|
|
1316 /* FALLTHRU */
|
|
1317
|
|
1318 case 1:
|
|
1319 /* wide ranges of inputs must work for complex divide. */
|
|
1320 expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code);
|
|
1321 break;
|
|
1322
|
|
1323 default:
|
|
1324 gcc_unreachable ();
|
|
1325 }
|
|
1326 return;
|
|
1327
|
|
1328 default:
|
|
1329 gcc_unreachable ();
|
|
1330 }
|
|
1331
|
|
1332 update_complex_assignment (gsi, rr, ri);
|
|
1333 }
|
|
1334
|
|
1335 /* Expand complex negation to scalars:
|
|
1336 -a = (-ar) + i(-ai)
|
|
1337 */
|
|
1338
|
|
1339 static void
|
|
1340 expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type,
|
|
1341 tree ar, tree ai)
|
|
1342 {
|
|
1343 tree rr, ri;
|
|
1344
|
|
1345 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ar);
|
|
1346 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
|
|
1347
|
|
1348 update_complex_assignment (gsi, rr, ri);
|
|
1349 }
|
|
1350
|
|
1351 /* Expand complex conjugate to scalars:
|
|
1352 ~a = (ar) + i(-ai)
|
|
1353 */
|
|
1354
|
|
1355 static void
|
|
1356 expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type,
|
|
1357 tree ar, tree ai)
|
|
1358 {
|
|
1359 tree ri;
|
|
1360
|
|
1361 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
|
|
1362
|
|
1363 update_complex_assignment (gsi, ar, ri);
|
|
1364 }
|
|
1365
|
|
1366 /* Expand complex comparison (EQ or NE only). */
|
|
1367
|
|
1368 static void
|
|
1369 expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai,
|
|
1370 tree br, tree bi, enum tree_code code)
|
|
1371 {
|
|
1372 tree cr, ci, cc, type;
|
|
1373 gimple stmt;
|
|
1374
|
|
1375 cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br);
|
|
1376 ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi);
|
|
1377 cc = gimplify_build2 (gsi,
|
|
1378 (code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR),
|
|
1379 boolean_type_node, cr, ci);
|
|
1380
|
|
1381 stmt = gsi_stmt (*gsi);
|
|
1382
|
|
1383 switch (gimple_code (stmt))
|
|
1384 {
|
|
1385 case GIMPLE_RETURN:
|
|
1386 type = TREE_TYPE (gimple_return_retval (stmt));
|
|
1387 gimple_return_set_retval (stmt, fold_convert (type, cc));
|
|
1388 break;
|
|
1389
|
|
1390 case GIMPLE_ASSIGN:
|
|
1391 type = TREE_TYPE (gimple_assign_lhs (stmt));
|
|
1392 gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc));
|
|
1393 stmt = gsi_stmt (*gsi);
|
|
1394 break;
|
|
1395
|
|
1396 case GIMPLE_COND:
|
|
1397 gimple_cond_set_code (stmt, EQ_EXPR);
|
|
1398 gimple_cond_set_lhs (stmt, cc);
|
|
1399 gimple_cond_set_rhs (stmt, boolean_true_node);
|
|
1400 break;
|
|
1401
|
|
1402 default:
|
|
1403 gcc_unreachable ();
|
|
1404 }
|
|
1405
|
|
1406 update_stmt (stmt);
|
|
1407 }
|
|
1408
|
|
1409
|
|
1410 /* Process one statement. If we identify a complex operation, expand it. */
|
|
1411
|
|
1412 static void
|
|
1413 expand_complex_operations_1 (gimple_stmt_iterator *gsi)
|
|
1414 {
|
|
1415 gimple stmt = gsi_stmt (*gsi);
|
|
1416 tree type, inner_type, lhs;
|
|
1417 tree ac, ar, ai, bc, br, bi;
|
|
1418 complex_lattice_t al, bl;
|
|
1419 enum tree_code code;
|
|
1420
|
|
1421 lhs = gimple_get_lhs (stmt);
|
|
1422 if (!lhs && gimple_code (stmt) != GIMPLE_COND)
|
|
1423 return;
|
|
1424
|
|
1425 type = TREE_TYPE (gimple_op (stmt, 0));
|
|
1426 code = gimple_expr_code (stmt);
|
|
1427
|
|
1428 /* Initial filter for operations we handle. */
|
|
1429 switch (code)
|
|
1430 {
|
|
1431 case PLUS_EXPR:
|
|
1432 case MINUS_EXPR:
|
|
1433 case MULT_EXPR:
|
|
1434 case TRUNC_DIV_EXPR:
|
|
1435 case CEIL_DIV_EXPR:
|
|
1436 case FLOOR_DIV_EXPR:
|
|
1437 case ROUND_DIV_EXPR:
|
|
1438 case RDIV_EXPR:
|
|
1439 case NEGATE_EXPR:
|
|
1440 case CONJ_EXPR:
|
|
1441 if (TREE_CODE (type) != COMPLEX_TYPE)
|
|
1442 return;
|
|
1443 inner_type = TREE_TYPE (type);
|
|
1444 break;
|
|
1445
|
|
1446 case EQ_EXPR:
|
|
1447 case NE_EXPR:
|
|
1448 /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR
|
|
1449 subocde, so we need to access the operands using gimple_op. */
|
|
1450 inner_type = TREE_TYPE (gimple_op (stmt, 1));
|
|
1451 if (TREE_CODE (inner_type) != COMPLEX_TYPE)
|
|
1452 return;
|
|
1453 break;
|
|
1454
|
|
1455 default:
|
|
1456 {
|
|
1457 tree rhs;
|
|
1458
|
|
1459 /* GIMPLE_COND may also fallthru here, but we do not need to
|
|
1460 do anything with it. */
|
|
1461 if (gimple_code (stmt) == GIMPLE_COND)
|
|
1462 return;
|
|
1463
|
|
1464 if (TREE_CODE (type) == COMPLEX_TYPE)
|
|
1465 expand_complex_move (gsi, type);
|
|
1466 else if (is_gimple_assign (stmt)
|
|
1467 && (gimple_assign_rhs_code (stmt) == REALPART_EXPR
|
|
1468 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
|
|
1469 && TREE_CODE (lhs) == SSA_NAME)
|
|
1470 {
|
|
1471 rhs = gimple_assign_rhs1 (stmt);
|
|
1472 rhs = extract_component (gsi, TREE_OPERAND (rhs, 0),
|
|
1473 gimple_assign_rhs_code (stmt)
|
|
1474 == IMAGPART_EXPR,
|
|
1475 false);
|
|
1476 gimple_assign_set_rhs_from_tree (gsi, rhs);
|
|
1477 stmt = gsi_stmt (*gsi);
|
|
1478 update_stmt (stmt);
|
|
1479 }
|
|
1480 }
|
|
1481 return;
|
|
1482 }
|
|
1483
|
|
1484 /* Extract the components of the two complex values. Make sure and
|
|
1485 handle the common case of the same value used twice specially. */
|
|
1486 if (is_gimple_assign (stmt))
|
|
1487 {
|
|
1488 ac = gimple_assign_rhs1 (stmt);
|
|
1489 bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL;
|
|
1490 }
|
|
1491 /* GIMPLE_CALL can not get here. */
|
|
1492 else
|
|
1493 {
|
|
1494 ac = gimple_cond_lhs (stmt);
|
|
1495 bc = gimple_cond_rhs (stmt);
|
|
1496 }
|
|
1497
|
|
1498 ar = extract_component (gsi, ac, false, true);
|
|
1499 ai = extract_component (gsi, ac, true, true);
|
|
1500
|
|
1501 if (ac == bc)
|
|
1502 br = ar, bi = ai;
|
|
1503 else if (bc)
|
|
1504 {
|
|
1505 br = extract_component (gsi, bc, 0, true);
|
|
1506 bi = extract_component (gsi, bc, 1, true);
|
|
1507 }
|
|
1508 else
|
|
1509 br = bi = NULL_TREE;
|
|
1510
|
|
1511 if (gimple_in_ssa_p (cfun))
|
|
1512 {
|
|
1513 al = find_lattice_value (ac);
|
|
1514 if (al == UNINITIALIZED)
|
|
1515 al = VARYING;
|
|
1516
|
|
1517 if (TREE_CODE_CLASS (code) == tcc_unary)
|
|
1518 bl = UNINITIALIZED;
|
|
1519 else if (ac == bc)
|
|
1520 bl = al;
|
|
1521 else
|
|
1522 {
|
|
1523 bl = find_lattice_value (bc);
|
|
1524 if (bl == UNINITIALIZED)
|
|
1525 bl = VARYING;
|
|
1526 }
|
|
1527 }
|
|
1528 else
|
|
1529 al = bl = VARYING;
|
|
1530
|
|
1531 switch (code)
|
|
1532 {
|
|
1533 case PLUS_EXPR:
|
|
1534 case MINUS_EXPR:
|
|
1535 expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl);
|
|
1536 break;
|
|
1537
|
|
1538 case MULT_EXPR:
|
|
1539 expand_complex_multiplication (gsi, inner_type, ar, ai, br, bi, al, bl);
|
|
1540 break;
|
|
1541
|
|
1542 case TRUNC_DIV_EXPR:
|
|
1543 case CEIL_DIV_EXPR:
|
|
1544 case FLOOR_DIV_EXPR:
|
|
1545 case ROUND_DIV_EXPR:
|
|
1546 case RDIV_EXPR:
|
|
1547 expand_complex_division (gsi, inner_type, ar, ai, br, bi, code, al, bl);
|
|
1548 break;
|
|
1549
|
|
1550 case NEGATE_EXPR:
|
|
1551 expand_complex_negation (gsi, inner_type, ar, ai);
|
|
1552 break;
|
|
1553
|
|
1554 case CONJ_EXPR:
|
|
1555 expand_complex_conjugate (gsi, inner_type, ar, ai);
|
|
1556 break;
|
|
1557
|
|
1558 case EQ_EXPR:
|
|
1559 case NE_EXPR:
|
|
1560 expand_complex_comparison (gsi, ar, ai, br, bi, code);
|
|
1561 break;
|
|
1562
|
|
1563 default:
|
|
1564 gcc_unreachable ();
|
|
1565 }
|
|
1566 }
|
|
1567
|
|
1568
|
|
1569 /* Entry point for complex operation lowering during optimization. */
|
|
1570
|
|
1571 static unsigned int
|
|
1572 tree_lower_complex (void)
|
|
1573 {
|
|
1574 int old_last_basic_block;
|
|
1575 gimple_stmt_iterator gsi;
|
|
1576 basic_block bb;
|
|
1577
|
|
1578 if (!init_dont_simulate_again ())
|
|
1579 return 0;
|
|
1580
|
|
1581 complex_lattice_values = VEC_alloc (complex_lattice_t, heap, num_ssa_names);
|
|
1582 VEC_safe_grow_cleared (complex_lattice_t, heap,
|
|
1583 complex_lattice_values, num_ssa_names);
|
|
1584
|
|
1585 init_parameter_lattice_values ();
|
|
1586 ssa_propagate (complex_visit_stmt, complex_visit_phi);
|
|
1587
|
|
1588 complex_variable_components = htab_create (10, int_tree_map_hash,
|
|
1589 int_tree_map_eq, free);
|
|
1590
|
|
1591 complex_ssa_name_components = VEC_alloc (tree, heap, 2*num_ssa_names);
|
|
1592 VEC_safe_grow_cleared (tree, heap, complex_ssa_name_components,
|
|
1593 2 * num_ssa_names);
|
|
1594
|
|
1595 update_parameter_components ();
|
|
1596
|
|
1597 /* ??? Ideally we'd traverse the blocks in breadth-first order. */
|
|
1598 old_last_basic_block = last_basic_block;
|
|
1599 FOR_EACH_BB (bb)
|
|
1600 {
|
|
1601 if (bb->index >= old_last_basic_block)
|
|
1602 continue;
|
|
1603
|
|
1604 update_phi_components (bb);
|
|
1605 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1606 expand_complex_operations_1 (&gsi);
|
|
1607 }
|
|
1608
|
|
1609 gsi_commit_edge_inserts ();
|
|
1610
|
|
1611 htab_delete (complex_variable_components);
|
|
1612 VEC_free (tree, heap, complex_ssa_name_components);
|
|
1613 VEC_free (complex_lattice_t, heap, complex_lattice_values);
|
|
1614 return 0;
|
|
1615 }
|
|
1616
|
|
1617 struct gimple_opt_pass pass_lower_complex =
|
|
1618 {
|
|
1619 {
|
|
1620 GIMPLE_PASS,
|
|
1621 "cplxlower", /* name */
|
|
1622 0, /* gate */
|
|
1623 tree_lower_complex, /* execute */
|
|
1624 NULL, /* sub */
|
|
1625 NULL, /* next */
|
|
1626 0, /* static_pass_number */
|
|
1627 0, /* tv_id */
|
|
1628 PROP_ssa, /* properties_required */
|
|
1629 0, /* properties_provided */
|
|
1630 0, /* properties_destroyed */
|
|
1631 0, /* todo_flags_start */
|
|
1632 TODO_dump_func
|
|
1633 | TODO_ggc_collect
|
|
1634 | TODO_update_ssa
|
|
1635 | TODO_verify_stmts /* todo_flags_finish */
|
|
1636 }
|
|
1637 };
|
|
1638
|
|
1639
|
|
1640 /* Entry point for complex operation lowering without optimization. */
|
|
1641
|
|
1642 static unsigned int
|
|
1643 tree_lower_complex_O0 (void)
|
|
1644 {
|
|
1645 int old_last_basic_block = last_basic_block;
|
|
1646 gimple_stmt_iterator gsi;
|
|
1647 basic_block bb;
|
|
1648
|
|
1649 FOR_EACH_BB (bb)
|
|
1650 {
|
|
1651 if (bb->index >= old_last_basic_block)
|
|
1652 continue;
|
|
1653
|
|
1654 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1655 expand_complex_operations_1 (&gsi);
|
|
1656 }
|
|
1657 return 0;
|
|
1658 }
|
|
1659
|
|
1660 static bool
|
|
1661 gate_no_optimization (void)
|
|
1662 {
|
|
1663 /* With errors, normal optimization passes are not run. If we don't
|
|
1664 lower complex operations at all, rtl expansion will abort. */
|
|
1665 return optimize == 0 || sorrycount || errorcount;
|
|
1666 }
|
|
1667
|
|
1668 struct gimple_opt_pass pass_lower_complex_O0 =
|
|
1669 {
|
|
1670 {
|
|
1671 GIMPLE_PASS,
|
|
1672 "cplxlower0", /* name */
|
|
1673 gate_no_optimization, /* gate */
|
|
1674 tree_lower_complex_O0, /* execute */
|
|
1675 NULL, /* sub */
|
|
1676 NULL, /* next */
|
|
1677 0, /* static_pass_number */
|
|
1678 0, /* tv_id */
|
|
1679 PROP_cfg, /* properties_required */
|
|
1680 0, /* properties_provided */
|
|
1681 0, /* properties_destroyed */
|
|
1682 0, /* todo_flags_start */
|
|
1683 TODO_dump_func | TODO_ggc_collect
|
|
1684 | TODO_verify_stmts, /* todo_flags_finish */
|
|
1685 }
|
|
1686 };
|