0
|
1 /* Combining of if-expressions on trees.
|
|
2 Copyright (C) 2007, 2008 Free Software Foundation, Inc.
|
|
3 Contributed by Richard Guenther <rguenther@suse.de>
|
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify
|
|
8 it under the terms of the GNU General Public License as published by
|
|
9 the Free Software Foundation; either version 3, or (at your option)
|
|
10 any later version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful,
|
|
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
15 GNU General Public License for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21 #include "config.h"
|
|
22 #include "system.h"
|
|
23 #include "coretypes.h"
|
|
24 #include "tm.h"
|
|
25 #include "tree.h"
|
|
26 #include "basic-block.h"
|
|
27 #include "timevar.h"
|
|
28 #include "diagnostic.h"
|
|
29 #include "tree-flow.h"
|
|
30 #include "tree-pass.h"
|
|
31 #include "tree-dump.h"
|
|
32
|
|
33 /* This pass combines COND_EXPRs to simplify control flow. It
|
|
34 currently recognizes bit tests and comparisons in chains that
|
|
35 represent logical and or logical or of two COND_EXPRs.
|
|
36
|
|
37 It does so by walking basic blocks in a approximate reverse
|
|
38 post-dominator order and trying to match CFG patterns that
|
|
39 represent logical and or logical or of two COND_EXPRs.
|
|
40 Transformations are done if the COND_EXPR conditions match
|
|
41 either
|
|
42
|
|
43 1. two single bit tests X & (1 << Yn) (for logical and)
|
|
44
|
|
45 2. two bit tests X & Yn (for logical or)
|
|
46
|
|
47 3. two comparisons X OPn Y (for logical or)
|
|
48
|
|
49 To simplify this pass, removing basic blocks and dead code
|
|
50 is left to CFG cleanup and DCE. */
|
|
51
|
|
52
|
|
53 /* Recognize a if-then-else CFG pattern starting to match with the
|
|
54 COND_BB basic-block containing the COND_EXPR. The recognized
|
|
55 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
|
|
56 *THEN_BB and/or *ELSE_BB are already set, they are required to
|
|
57 match the then and else basic-blocks to make the pattern match.
|
|
58 Returns true if the pattern matched, false otherwise. */
|
|
59
|
|
60 static bool
|
|
61 recognize_if_then_else (basic_block cond_bb,
|
|
62 basic_block *then_bb, basic_block *else_bb)
|
|
63 {
|
|
64 edge t, e;
|
|
65
|
|
66 if (EDGE_COUNT (cond_bb->succs) != 2)
|
|
67 return false;
|
|
68
|
|
69 /* Find the then/else edges. */
|
|
70 t = EDGE_SUCC (cond_bb, 0);
|
|
71 e = EDGE_SUCC (cond_bb, 1);
|
|
72 if (!(t->flags & EDGE_TRUE_VALUE))
|
|
73 {
|
|
74 edge tmp = t;
|
|
75 t = e;
|
|
76 e = tmp;
|
|
77 }
|
|
78 if (!(t->flags & EDGE_TRUE_VALUE)
|
|
79 || !(e->flags & EDGE_FALSE_VALUE))
|
|
80 return false;
|
|
81
|
|
82 /* Check if the edge destinations point to the required block. */
|
|
83 if (*then_bb
|
|
84 && t->dest != *then_bb)
|
|
85 return false;
|
|
86 if (*else_bb
|
|
87 && e->dest != *else_bb)
|
|
88 return false;
|
|
89
|
|
90 if (!*then_bb)
|
|
91 *then_bb = t->dest;
|
|
92 if (!*else_bb)
|
|
93 *else_bb = e->dest;
|
|
94
|
|
95 return true;
|
|
96 }
|
|
97
|
|
98 /* Verify if the basic block BB does not have side-effects. Return
|
|
99 true in this case, else false. */
|
|
100
|
|
101 static bool
|
|
102 bb_no_side_effects_p (basic_block bb)
|
|
103 {
|
|
104 gimple_stmt_iterator gsi;
|
|
105
|
|
106 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
107 {
|
|
108 gimple stmt = gsi_stmt (gsi);
|
|
109
|
|
110 if (gimple_has_volatile_ops (stmt)
|
|
111 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
|
|
112 return false;
|
|
113 }
|
|
114
|
|
115 return true;
|
|
116 }
|
|
117
|
|
118 /* Verify if all PHI node arguments in DEST for edges from BB1 or
|
|
119 BB2 to DEST are the same. This makes the CFG merge point
|
|
120 free from side-effects. Return true in this case, else false. */
|
|
121
|
|
122 static bool
|
|
123 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
|
|
124 {
|
|
125 edge e1 = find_edge (bb1, dest);
|
|
126 edge e2 = find_edge (bb2, dest);
|
|
127 gimple_stmt_iterator gsi;
|
|
128 gimple phi;
|
|
129
|
|
130 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
131 {
|
|
132 phi = gsi_stmt (gsi);
|
|
133 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
|
|
134 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
|
|
135 return false;
|
|
136 }
|
|
137
|
|
138 return true;
|
|
139 }
|
|
140
|
|
141 /* Return the best representative SSA name for CANDIDATE which is used
|
|
142 in a bit test. */
|
|
143
|
|
144 static tree
|
|
145 get_name_for_bit_test (tree candidate)
|
|
146 {
|
|
147 /* Skip single-use names in favor of using the name from a
|
|
148 non-widening conversion definition. */
|
|
149 if (TREE_CODE (candidate) == SSA_NAME
|
|
150 && has_single_use (candidate))
|
|
151 {
|
|
152 gimple def_stmt = SSA_NAME_DEF_STMT (candidate);
|
|
153 if (is_gimple_assign (def_stmt)
|
|
154 && gimple_assign_cast_p (def_stmt))
|
|
155 {
|
|
156 if (TYPE_PRECISION (TREE_TYPE (candidate))
|
|
157 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
|
|
158 return gimple_assign_rhs1 (def_stmt);
|
|
159 }
|
|
160 }
|
|
161
|
|
162 return candidate;
|
|
163 }
|
|
164
|
|
165 /* Helpers for recognize_single_bit_test defined mainly for source code
|
|
166 formating. */
|
|
167
|
|
168 static int
|
|
169 operand_precision (tree t)
|
|
170 {
|
|
171 return TYPE_PRECISION (TREE_TYPE (t));
|
|
172 }
|
|
173
|
|
174 static bool
|
|
175 integral_operand_p (tree t)
|
|
176 {
|
|
177 return INTEGRAL_TYPE_P (TREE_TYPE (t));
|
|
178 }
|
|
179
|
|
180 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
|
|
181 statements. Store the name being tested in *NAME and the bit
|
|
182 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
|
|
183 Returns true if the pattern matched, false otherwise. */
|
|
184
|
|
185 static bool
|
|
186 recognize_single_bit_test (gimple cond, tree *name, tree *bit)
|
|
187 {
|
|
188 gimple stmt;
|
|
189
|
|
190 /* Get at the definition of the result of the bit test. */
|
|
191 if (gimple_cond_code (cond) != NE_EXPR
|
|
192 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
|
|
193 || !integer_zerop (gimple_cond_rhs (cond)))
|
|
194 return false;
|
|
195 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
|
|
196 if (!is_gimple_assign (stmt))
|
|
197 return false;
|
|
198
|
|
199 /* Look at which bit is tested. One form to recognize is
|
|
200 D.1985_5 = state_3(D) >> control1_4(D);
|
|
201 D.1986_6 = (int) D.1985_5;
|
|
202 D.1987_7 = op0 & 1;
|
|
203 if (D.1987_7 != 0) */
|
|
204 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
|
|
205 && integer_onep (gimple_assign_rhs2 (stmt))
|
|
206 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
|
|
207 {
|
|
208 tree orig_name = gimple_assign_rhs1 (stmt);
|
|
209
|
|
210 /* Look through copies and conversions to eventually
|
|
211 find the stmt that computes the shift. */
|
|
212 stmt = SSA_NAME_DEF_STMT (orig_name);
|
|
213
|
|
214 while (is_gimple_assign (stmt)
|
|
215 && (gimple_assign_ssa_name_copy_p (stmt)
|
|
216 || (gimple_assign_cast_p (stmt)
|
|
217 && integral_operand_p (gimple_assign_lhs (stmt))
|
|
218 && integral_operand_p (gimple_assign_rhs1 (stmt))
|
|
219 && (operand_precision (gimple_assign_lhs (stmt))
|
|
220 <= operand_precision (gimple_assign_rhs1 (stmt))))))
|
|
221 {
|
|
222 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
|
|
223 }
|
|
224
|
|
225 /* If we found such, decompose it. */
|
|
226 if (is_gimple_assign (stmt)
|
|
227 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
|
|
228 {
|
|
229 /* op0 & (1 << op1) */
|
|
230 *bit = gimple_assign_rhs2 (stmt);
|
|
231 *name = gimple_assign_rhs1 (stmt);
|
|
232 }
|
|
233 else
|
|
234 {
|
|
235 /* t & 1 */
|
|
236 *bit = integer_zero_node;
|
|
237 *name = get_name_for_bit_test (orig_name);
|
|
238 }
|
|
239
|
|
240 return true;
|
|
241 }
|
|
242
|
|
243 /* Another form is
|
|
244 D.1987_7 = op0 & (1 << CST)
|
|
245 if (D.1987_7 != 0) */
|
|
246 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
|
|
247 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
|
|
248 && integer_pow2p (gimple_assign_rhs2 (stmt)))
|
|
249 {
|
|
250 *name = gimple_assign_rhs1 (stmt);
|
|
251 *bit = build_int_cst (integer_type_node,
|
|
252 tree_log2 (gimple_assign_rhs2 (stmt)));
|
|
253 return true;
|
|
254 }
|
|
255
|
|
256 /* Another form is
|
|
257 D.1986_6 = 1 << control1_4(D)
|
|
258 D.1987_7 = op0 & D.1986_6
|
|
259 if (D.1987_7 != 0) */
|
|
260 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
|
|
261 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
|
|
262 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
|
|
263 {
|
|
264 gimple tmp;
|
|
265
|
|
266 /* Both arguments of the BIT_AND_EXPR can be the single-bit
|
|
267 specifying expression. */
|
|
268 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
|
|
269 if (is_gimple_assign (tmp)
|
|
270 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
|
|
271 && integer_onep (gimple_assign_rhs1 (tmp)))
|
|
272 {
|
|
273 *name = gimple_assign_rhs2 (stmt);
|
|
274 *bit = gimple_assign_rhs2 (tmp);
|
|
275 return true;
|
|
276 }
|
|
277
|
|
278 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
|
|
279 if (is_gimple_assign (tmp)
|
|
280 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
|
|
281 && integer_onep (gimple_assign_rhs1 (tmp)))
|
|
282 {
|
|
283 *name = gimple_assign_rhs1 (stmt);
|
|
284 *bit = gimple_assign_rhs2 (tmp);
|
|
285 return true;
|
|
286 }
|
|
287 }
|
|
288
|
|
289 return false;
|
|
290 }
|
|
291
|
|
292 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
|
|
293 statements. Store the name being tested in *NAME and the bits
|
|
294 in *BITS. The COND_EXPR computes *NAME & *BITS.
|
|
295 Returns true if the pattern matched, false otherwise. */
|
|
296
|
|
297 static bool
|
|
298 recognize_bits_test (gimple cond, tree *name, tree *bits)
|
|
299 {
|
|
300 gimple stmt;
|
|
301
|
|
302 /* Get at the definition of the result of the bit test. */
|
|
303 if (gimple_cond_code (cond) != NE_EXPR
|
|
304 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
|
|
305 || !integer_zerop (gimple_cond_rhs (cond)))
|
|
306 return false;
|
|
307 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
|
|
308 if (!is_gimple_assign (stmt)
|
|
309 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
|
|
310 return false;
|
|
311
|
|
312 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
|
|
313 *bits = gimple_assign_rhs2 (stmt);
|
|
314
|
|
315 return true;
|
|
316 }
|
|
317
|
|
318 /* If-convert on a and pattern with a common else block. The inner
|
|
319 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
|
|
320 Returns true if the edges to the common else basic-block were merged. */
|
|
321
|
|
322 static bool
|
|
323 ifcombine_ifandif (basic_block inner_cond_bb, basic_block outer_cond_bb)
|
|
324 {
|
|
325 gimple_stmt_iterator gsi;
|
|
326 gimple inner_cond, outer_cond;
|
|
327 tree name1, name2, bit1, bit2;
|
|
328
|
|
329 inner_cond = last_stmt (inner_cond_bb);
|
|
330 if (!inner_cond
|
|
331 || gimple_code (inner_cond) != GIMPLE_COND)
|
|
332 return false;
|
|
333
|
|
334 outer_cond = last_stmt (outer_cond_bb);
|
|
335 if (!outer_cond
|
|
336 || gimple_code (outer_cond) != GIMPLE_COND)
|
|
337 return false;
|
|
338
|
|
339 /* See if we test a single bit of the same name in both tests. In
|
|
340 that case remove the outer test, merging both else edges,
|
|
341 and change the inner one to test for
|
|
342 name & (bit1 | bit2) == (bit1 | bit2). */
|
|
343 if (recognize_single_bit_test (inner_cond, &name1, &bit1)
|
|
344 && recognize_single_bit_test (outer_cond, &name2, &bit2)
|
|
345 && name1 == name2)
|
|
346 {
|
|
347 tree t, t2;
|
|
348
|
|
349 /* Do it. */
|
|
350 gsi = gsi_for_stmt (inner_cond);
|
|
351 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
|
|
352 build_int_cst (TREE_TYPE (name1), 1), bit1);
|
|
353 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
|
|
354 build_int_cst (TREE_TYPE (name1), 1), bit2);
|
|
355 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
|
|
356 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
|
|
357 true, GSI_SAME_STMT);
|
|
358 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
|
|
359 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
|
|
360 true, GSI_SAME_STMT);
|
|
361 t = fold_build2 (EQ_EXPR, boolean_type_node, t2, t);
|
|
362 gimple_cond_set_condition_from_tree (inner_cond, t);
|
|
363 update_stmt (inner_cond);
|
|
364
|
|
365 /* Leave CFG optimization to cfg_cleanup. */
|
|
366 gimple_cond_set_condition_from_tree (outer_cond, boolean_true_node);
|
|
367 update_stmt (outer_cond);
|
|
368
|
|
369 if (dump_file)
|
|
370 {
|
|
371 fprintf (dump_file, "optimizing double bit test to ");
|
|
372 print_generic_expr (dump_file, name1, 0);
|
|
373 fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
|
|
374 print_generic_expr (dump_file, bit1, 0);
|
|
375 fprintf (dump_file, ") | (1 << ");
|
|
376 print_generic_expr (dump_file, bit2, 0);
|
|
377 fprintf (dump_file, ")\n");
|
|
378 }
|
|
379
|
|
380 return true;
|
|
381 }
|
|
382
|
|
383 return false;
|
|
384 }
|
|
385
|
|
386 /* If-convert on a or pattern with a common then block. The inner
|
|
387 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
|
|
388 Returns true, if the edges leading to the common then basic-block
|
|
389 were merged. */
|
|
390
|
|
391 static bool
|
|
392 ifcombine_iforif (basic_block inner_cond_bb, basic_block outer_cond_bb)
|
|
393 {
|
|
394 gimple inner_cond, outer_cond;
|
|
395 tree name1, name2, bits1, bits2;
|
|
396
|
|
397 inner_cond = last_stmt (inner_cond_bb);
|
|
398 if (!inner_cond
|
|
399 || gimple_code (inner_cond) != GIMPLE_COND)
|
|
400 return false;
|
|
401
|
|
402 outer_cond = last_stmt (outer_cond_bb);
|
|
403 if (!outer_cond
|
|
404 || gimple_code (outer_cond) != GIMPLE_COND)
|
|
405 return false;
|
|
406
|
|
407 /* See if we have two bit tests of the same name in both tests.
|
|
408 In that case remove the outer test and change the inner one to
|
|
409 test for name & (bits1 | bits2) != 0. */
|
|
410 if (recognize_bits_test (inner_cond, &name1, &bits1)
|
|
411 && recognize_bits_test (outer_cond, &name2, &bits2))
|
|
412 {
|
|
413 gimple_stmt_iterator gsi;
|
|
414 tree t;
|
|
415
|
|
416 /* Find the common name which is bit-tested. */
|
|
417 if (name1 == name2)
|
|
418 ;
|
|
419 else if (bits1 == bits2)
|
|
420 {
|
|
421 t = name2;
|
|
422 name2 = bits2;
|
|
423 bits2 = t;
|
|
424 t = name1;
|
|
425 name1 = bits1;
|
|
426 bits1 = t;
|
|
427 }
|
|
428 else if (name1 == bits2)
|
|
429 {
|
|
430 t = name2;
|
|
431 name2 = bits2;
|
|
432 bits2 = t;
|
|
433 }
|
|
434 else if (bits1 == name2)
|
|
435 {
|
|
436 t = name1;
|
|
437 name1 = bits1;
|
|
438 bits1 = t;
|
|
439 }
|
|
440 else
|
|
441 return false;
|
|
442
|
|
443 /* As we strip non-widening conversions in finding a common
|
|
444 name that is tested make sure to end up with an integral
|
|
445 type for building the bit operations. */
|
|
446 if (TYPE_PRECISION (TREE_TYPE (bits1))
|
|
447 >= TYPE_PRECISION (TREE_TYPE (bits2)))
|
|
448 {
|
|
449 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
|
|
450 name1 = fold_convert (TREE_TYPE (bits1), name1);
|
|
451 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
|
|
452 bits2 = fold_convert (TREE_TYPE (bits1), bits2);
|
|
453 }
|
|
454 else
|
|
455 {
|
|
456 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
|
|
457 name1 = fold_convert (TREE_TYPE (bits2), name1);
|
|
458 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
|
|
459 bits1 = fold_convert (TREE_TYPE (bits2), bits1);
|
|
460 }
|
|
461
|
|
462 /* Do it. */
|
|
463 gsi = gsi_for_stmt (inner_cond);
|
|
464 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
|
|
465 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
|
|
466 true, GSI_SAME_STMT);
|
|
467 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
|
|
468 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
|
|
469 true, GSI_SAME_STMT);
|
|
470 t = fold_build2 (NE_EXPR, boolean_type_node, t,
|
|
471 build_int_cst (TREE_TYPE (t), 0));
|
|
472 gimple_cond_set_condition_from_tree (inner_cond, t);
|
|
473 update_stmt (inner_cond);
|
|
474
|
|
475 /* Leave CFG optimization to cfg_cleanup. */
|
|
476 gimple_cond_set_condition_from_tree (outer_cond, boolean_false_node);
|
|
477 update_stmt (outer_cond);
|
|
478
|
|
479 if (dump_file)
|
|
480 {
|
|
481 fprintf (dump_file, "optimizing bits or bits test to ");
|
|
482 print_generic_expr (dump_file, name1, 0);
|
|
483 fprintf (dump_file, " & T != 0\nwith temporary T = ");
|
|
484 print_generic_expr (dump_file, bits1, 0);
|
|
485 fprintf (dump_file, " | ");
|
|
486 print_generic_expr (dump_file, bits2, 0);
|
|
487 fprintf (dump_file, "\n");
|
|
488 }
|
|
489
|
|
490 return true;
|
|
491 }
|
|
492
|
|
493 /* See if we have two comparisons that we can merge into one.
|
|
494 This happens for C++ operator overloading where for example
|
|
495 GE_EXPR is implemented as GT_EXPR || EQ_EXPR. */
|
|
496 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
|
|
497 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison
|
|
498 && operand_equal_p (gimple_cond_lhs (inner_cond),
|
|
499 gimple_cond_lhs (outer_cond), 0)
|
|
500 && operand_equal_p (gimple_cond_rhs (inner_cond),
|
|
501 gimple_cond_rhs (outer_cond), 0))
|
|
502 {
|
|
503 enum tree_code code1 = gimple_cond_code (inner_cond);
|
|
504 enum tree_code code2 = gimple_cond_code (outer_cond);
|
|
505 enum tree_code code;
|
|
506 tree t;
|
|
507
|
|
508 #define CHK(a,b) ((code1 == a ## _EXPR && code2 == b ## _EXPR) \
|
|
509 || (code2 == a ## _EXPR && code1 == b ## _EXPR))
|
|
510 /* Merge the two condition codes if possible. */
|
|
511 if (code1 == code2)
|
|
512 code = code1;
|
|
513 else if (CHK (EQ, LT))
|
|
514 code = LE_EXPR;
|
|
515 else if (CHK (EQ, GT))
|
|
516 code = GE_EXPR;
|
|
517 else if (CHK (LT, LE))
|
|
518 code = LE_EXPR;
|
|
519 else if (CHK (GT, GE))
|
|
520 code = GE_EXPR;
|
|
521 else if (INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (inner_cond)))
|
|
522 || flag_unsafe_math_optimizations)
|
|
523 {
|
|
524 if (CHK (LT, GT))
|
|
525 code = NE_EXPR;
|
|
526 else if (CHK (LT, NE))
|
|
527 code = NE_EXPR;
|
|
528 else if (CHK (GT, NE))
|
|
529 code = NE_EXPR;
|
|
530 else
|
|
531 return false;
|
|
532 }
|
|
533 /* We could check for combinations leading to trivial true/false. */
|
|
534 else
|
|
535 return false;
|
|
536 #undef CHK
|
|
537
|
|
538 /* Do it. */
|
|
539 t = fold_build2 (code, boolean_type_node, gimple_cond_lhs (outer_cond),
|
|
540 gimple_cond_rhs (outer_cond));
|
|
541 t = canonicalize_cond_expr_cond (t);
|
|
542 if (!t)
|
|
543 return false;
|
|
544 gimple_cond_set_condition_from_tree (inner_cond, t);
|
|
545 update_stmt (inner_cond);
|
|
546
|
|
547 /* Leave CFG optimization to cfg_cleanup. */
|
|
548 gimple_cond_set_condition_from_tree (outer_cond, boolean_false_node);
|
|
549 update_stmt (outer_cond);
|
|
550
|
|
551 if (dump_file)
|
|
552 {
|
|
553 fprintf (dump_file, "optimizing two comparisons to ");
|
|
554 print_generic_expr (dump_file, t, 0);
|
|
555 fprintf (dump_file, "\n");
|
|
556 }
|
|
557
|
|
558 return true;
|
|
559 }
|
|
560
|
|
561 return false;
|
|
562 }
|
|
563
|
|
564 /* Recognize a CFG pattern and dispatch to the appropriate
|
|
565 if-conversion helper. We start with BB as the innermost
|
|
566 worker basic-block. Returns true if a transformation was done. */
|
|
567
|
|
568 static bool
|
|
569 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
|
|
570 {
|
|
571 basic_block then_bb = NULL, else_bb = NULL;
|
|
572
|
|
573 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
|
|
574 return false;
|
|
575
|
|
576 /* Recognize && and || of two conditions with a common
|
|
577 then/else block which entry edges we can merge. That is:
|
|
578 if (a || b)
|
|
579 ;
|
|
580 and
|
|
581 if (a && b)
|
|
582 ;
|
|
583 This requires a single predecessor of the inner cond_bb. */
|
|
584 if (single_pred_p (inner_cond_bb))
|
|
585 {
|
|
586 basic_block outer_cond_bb = single_pred (inner_cond_bb);
|
|
587
|
|
588 /* The && form is characterized by a common else_bb with
|
|
589 the two edges leading to it mergable. The latter is
|
|
590 guaranteed by matching PHI arguments in the else_bb and
|
|
591 the inner cond_bb having no side-effects. */
|
|
592 if (recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
|
|
593 && same_phi_args_p (outer_cond_bb, inner_cond_bb, else_bb)
|
|
594 && bb_no_side_effects_p (inner_cond_bb))
|
|
595 {
|
|
596 /* We have
|
|
597 <outer_cond_bb>
|
|
598 if (q) goto inner_cond_bb; else goto else_bb;
|
|
599 <inner_cond_bb>
|
|
600 if (p) goto ...; else goto else_bb;
|
|
601 ...
|
|
602 <else_bb>
|
|
603 ...
|
|
604 */
|
|
605 return ifcombine_ifandif (inner_cond_bb, outer_cond_bb);
|
|
606 }
|
|
607
|
|
608 /* The || form is characterized by a common then_bb with the
|
|
609 two edges leading to it mergable. The latter is guaranteed
|
|
610 by matching PHI arguments in the then_bb and the inner cond_bb
|
|
611 having no side-effects. */
|
|
612 if (recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
|
|
613 && same_phi_args_p (outer_cond_bb, inner_cond_bb, then_bb)
|
|
614 && bb_no_side_effects_p (inner_cond_bb))
|
|
615 {
|
|
616 /* We have
|
|
617 <outer_cond_bb>
|
|
618 if (q) goto then_bb; else goto inner_cond_bb;
|
|
619 <inner_cond_bb>
|
|
620 if (q) goto then_bb; else goto ...;
|
|
621 <then_bb>
|
|
622 ...
|
|
623 */
|
|
624 return ifcombine_iforif (inner_cond_bb, outer_cond_bb);
|
|
625 }
|
|
626 }
|
|
627
|
|
628 return false;
|
|
629 }
|
|
630
|
|
631 /* Main entry for the tree if-conversion pass. */
|
|
632
|
|
633 static unsigned int
|
|
634 tree_ssa_ifcombine (void)
|
|
635 {
|
|
636 basic_block *bbs;
|
|
637 bool cfg_changed = false;
|
|
638 int i;
|
|
639
|
|
640 bbs = blocks_in_phiopt_order ();
|
|
641
|
|
642 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; ++i)
|
|
643 {
|
|
644 basic_block bb = bbs[i];
|
|
645 gimple stmt = last_stmt (bb);
|
|
646
|
|
647 if (stmt
|
|
648 && gimple_code (stmt) == GIMPLE_COND)
|
|
649 cfg_changed |= tree_ssa_ifcombine_bb (bb);
|
|
650 }
|
|
651
|
|
652 free (bbs);
|
|
653
|
|
654 return cfg_changed ? TODO_cleanup_cfg : 0;
|
|
655 }
|
|
656
|
|
657 static bool
|
|
658 gate_ifcombine (void)
|
|
659 {
|
|
660 return 1;
|
|
661 }
|
|
662
|
|
663 struct gimple_opt_pass pass_tree_ifcombine =
|
|
664 {
|
|
665 {
|
|
666 GIMPLE_PASS,
|
|
667 "ifcombine", /* name */
|
|
668 gate_ifcombine, /* gate */
|
|
669 tree_ssa_ifcombine, /* execute */
|
|
670 NULL, /* sub */
|
|
671 NULL, /* next */
|
|
672 0, /* static_pass_number */
|
|
673 TV_TREE_IFCOMBINE, /* tv_id */
|
|
674 PROP_cfg | PROP_ssa, /* properties_required */
|
|
675 0, /* properties_provided */
|
|
676 0, /* properties_destroyed */
|
|
677 0, /* todo_flags_start */
|
|
678 TODO_dump_func
|
|
679 | TODO_ggc_collect
|
|
680 | TODO_update_ssa
|
|
681 | TODO_verify_ssa /* todo_flags_finish */
|
|
682 }
|
|
683 };
|