tree-ssa-ifcombine.c: Include rtl.h and tm_p.h.
[gcc.git] / gcc / tree-ssa-ifcombine.c
1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2013 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 /* rtl is needed only because arm back-end requires it for
26 BRANCH_COST. */
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "tree.h"
30 #include "basic-block.h"
31 #include "tree-pretty-print.h"
32 #include "gimple.h"
33 #include "gimple-ssa.h"
34 #include "tree-cfg.h"
35 #include "tree-phinodes.h"
36 #include "ssa-iterators.h"
37 #include "tree-pass.h"
38
39 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
40 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
41 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
42 false) >= 2)
43 #endif
44
45 /* This pass combines COND_EXPRs to simplify control flow. It
46 currently recognizes bit tests and comparisons in chains that
47 represent logical and or logical or of two COND_EXPRs.
48
49 It does so by walking basic blocks in a approximate reverse
50 post-dominator order and trying to match CFG patterns that
51 represent logical and or logical or of two COND_EXPRs.
52 Transformations are done if the COND_EXPR conditions match
53 either
54
55 1. two single bit tests X & (1 << Yn) (for logical and)
56
57 2. two bit tests X & Yn (for logical or)
58
59 3. two comparisons X OPn Y (for logical or)
60
61 To simplify this pass, removing basic blocks and dead code
62 is left to CFG cleanup and DCE. */
63
64
65 /* Recognize a if-then-else CFG pattern starting to match with the
66 COND_BB basic-block containing the COND_EXPR. The recognized
67 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
68 *THEN_BB and/or *ELSE_BB are already set, they are required to
69 match the then and else basic-blocks to make the pattern match.
70 Returns true if the pattern matched, false otherwise. */
71
72 static bool
73 recognize_if_then_else (basic_block cond_bb,
74 basic_block *then_bb, basic_block *else_bb)
75 {
76 edge t, e;
77
78 if (EDGE_COUNT (cond_bb->succs) != 2)
79 return false;
80
81 /* Find the then/else edges. */
82 t = EDGE_SUCC (cond_bb, 0);
83 e = EDGE_SUCC (cond_bb, 1);
84 if (!(t->flags & EDGE_TRUE_VALUE))
85 {
86 edge tmp = t;
87 t = e;
88 e = tmp;
89 }
90 if (!(t->flags & EDGE_TRUE_VALUE)
91 || !(e->flags & EDGE_FALSE_VALUE))
92 return false;
93
94 /* Check if the edge destinations point to the required block. */
95 if (*then_bb
96 && t->dest != *then_bb)
97 return false;
98 if (*else_bb
99 && e->dest != *else_bb)
100 return false;
101
102 if (!*then_bb)
103 *then_bb = t->dest;
104 if (!*else_bb)
105 *else_bb = e->dest;
106
107 return true;
108 }
109
110 /* Verify if the basic block BB does not have side-effects. Return
111 true in this case, else false. */
112
113 static bool
114 bb_no_side_effects_p (basic_block bb)
115 {
116 gimple_stmt_iterator gsi;
117
118 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
119 {
120 gimple stmt = gsi_stmt (gsi);
121
122 if (gimple_has_side_effects (stmt)
123 || gimple_vuse (stmt))
124 return false;
125 }
126
127 return true;
128 }
129
130 /* Verify if all PHI node arguments in DEST for edges from BB1 or
131 BB2 to DEST are the same. This makes the CFG merge point
132 free from side-effects. Return true in this case, else false. */
133
134 static bool
135 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
136 {
137 edge e1 = find_edge (bb1, dest);
138 edge e2 = find_edge (bb2, dest);
139 gimple_stmt_iterator gsi;
140 gimple phi;
141
142 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
143 {
144 phi = gsi_stmt (gsi);
145 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
146 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
147 return false;
148 }
149
150 return true;
151 }
152
153 /* Return the best representative SSA name for CANDIDATE which is used
154 in a bit test. */
155
156 static tree
157 get_name_for_bit_test (tree candidate)
158 {
159 /* Skip single-use names in favor of using the name from a
160 non-widening conversion definition. */
161 if (TREE_CODE (candidate) == SSA_NAME
162 && has_single_use (candidate))
163 {
164 gimple def_stmt = SSA_NAME_DEF_STMT (candidate);
165 if (is_gimple_assign (def_stmt)
166 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
167 {
168 if (TYPE_PRECISION (TREE_TYPE (candidate))
169 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
170 return gimple_assign_rhs1 (def_stmt);
171 }
172 }
173
174 return candidate;
175 }
176
177 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
178 statements. Store the name being tested in *NAME and the bit
179 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
180 Returns true if the pattern matched, false otherwise. */
181
182 static bool
183 recognize_single_bit_test (gimple cond, tree *name, tree *bit, bool inv)
184 {
185 gimple stmt;
186
187 /* Get at the definition of the result of the bit test. */
188 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
189 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
190 || !integer_zerop (gimple_cond_rhs (cond)))
191 return false;
192 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
193 if (!is_gimple_assign (stmt))
194 return false;
195
196 /* Look at which bit is tested. One form to recognize is
197 D.1985_5 = state_3(D) >> control1_4(D);
198 D.1986_6 = (int) D.1985_5;
199 D.1987_7 = op0 & 1;
200 if (D.1987_7 != 0) */
201 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
202 && integer_onep (gimple_assign_rhs2 (stmt))
203 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
204 {
205 tree orig_name = gimple_assign_rhs1 (stmt);
206
207 /* Look through copies and conversions to eventually
208 find the stmt that computes the shift. */
209 stmt = SSA_NAME_DEF_STMT (orig_name);
210
211 while (is_gimple_assign (stmt)
212 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
213 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
214 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt)))))
215 || gimple_assign_ssa_name_copy_p (stmt)))
216 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
217
218 /* If we found such, decompose it. */
219 if (is_gimple_assign (stmt)
220 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
221 {
222 /* op0 & (1 << op1) */
223 *bit = gimple_assign_rhs2 (stmt);
224 *name = gimple_assign_rhs1 (stmt);
225 }
226 else
227 {
228 /* t & 1 */
229 *bit = integer_zero_node;
230 *name = get_name_for_bit_test (orig_name);
231 }
232
233 return true;
234 }
235
236 /* Another form is
237 D.1987_7 = op0 & (1 << CST)
238 if (D.1987_7 != 0) */
239 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
240 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
241 && integer_pow2p (gimple_assign_rhs2 (stmt)))
242 {
243 *name = gimple_assign_rhs1 (stmt);
244 *bit = build_int_cst (integer_type_node,
245 tree_log2 (gimple_assign_rhs2 (stmt)));
246 return true;
247 }
248
249 /* Another form is
250 D.1986_6 = 1 << control1_4(D)
251 D.1987_7 = op0 & D.1986_6
252 if (D.1987_7 != 0) */
253 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
254 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
255 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
256 {
257 gimple tmp;
258
259 /* Both arguments of the BIT_AND_EXPR can be the single-bit
260 specifying expression. */
261 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
262 if (is_gimple_assign (tmp)
263 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
264 && integer_onep (gimple_assign_rhs1 (tmp)))
265 {
266 *name = gimple_assign_rhs2 (stmt);
267 *bit = gimple_assign_rhs2 (tmp);
268 return true;
269 }
270
271 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
272 if (is_gimple_assign (tmp)
273 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
274 && integer_onep (gimple_assign_rhs1 (tmp)))
275 {
276 *name = gimple_assign_rhs1 (stmt);
277 *bit = gimple_assign_rhs2 (tmp);
278 return true;
279 }
280 }
281
282 return false;
283 }
284
285 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
286 statements. Store the name being tested in *NAME and the bits
287 in *BITS. The COND_EXPR computes *NAME & *BITS.
288 Returns true if the pattern matched, false otherwise. */
289
290 static bool
291 recognize_bits_test (gimple cond, tree *name, tree *bits, bool inv)
292 {
293 gimple stmt;
294
295 /* Get at the definition of the result of the bit test. */
296 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
297 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
298 || !integer_zerop (gimple_cond_rhs (cond)))
299 return false;
300 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
301 if (!is_gimple_assign (stmt)
302 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
303 return false;
304
305 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
306 *bits = gimple_assign_rhs2 (stmt);
307
308 return true;
309 }
310
311 /* If-convert on a and pattern with a common else block. The inner
312 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
313 inner_inv, outer_inv and result_inv indicate whether the conditions
314 are inverted.
315 Returns true if the edges to the common else basic-block were merged. */
316
317 static bool
318 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
319 basic_block outer_cond_bb, bool outer_inv, bool result_inv)
320 {
321 gimple_stmt_iterator gsi;
322 gimple inner_cond, outer_cond;
323 tree name1, name2, bit1, bit2, bits1, bits2;
324
325 inner_cond = last_stmt (inner_cond_bb);
326 if (!inner_cond
327 || gimple_code (inner_cond) != GIMPLE_COND)
328 return false;
329
330 outer_cond = last_stmt (outer_cond_bb);
331 if (!outer_cond
332 || gimple_code (outer_cond) != GIMPLE_COND)
333 return false;
334
335 /* See if we test a single bit of the same name in both tests. In
336 that case remove the outer test, merging both else edges,
337 and change the inner one to test for
338 name & (bit1 | bit2) == (bit1 | bit2). */
339 if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
340 && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
341 && name1 == name2)
342 {
343 tree t, t2;
344
345 /* Do it. */
346 gsi = gsi_for_stmt (inner_cond);
347 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
348 build_int_cst (TREE_TYPE (name1), 1), bit1);
349 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
350 build_int_cst (TREE_TYPE (name1), 1), bit2);
351 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
352 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
353 true, GSI_SAME_STMT);
354 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
355 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
356 true, GSI_SAME_STMT);
357 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
358 boolean_type_node, t2, t);
359 t = canonicalize_cond_expr_cond (t);
360 if (!t)
361 return false;
362 gimple_cond_set_condition_from_tree (inner_cond, t);
363 update_stmt (inner_cond);
364
365 /* Leave CFG optimization to cfg_cleanup. */
366 gimple_cond_set_condition_from_tree (outer_cond,
367 outer_inv ? boolean_false_node : boolean_true_node);
368 update_stmt (outer_cond);
369
370 if (dump_file)
371 {
372 fprintf (dump_file, "optimizing double bit test to ");
373 print_generic_expr (dump_file, name1, 0);
374 fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
375 print_generic_expr (dump_file, bit1, 0);
376 fprintf (dump_file, ") | (1 << ");
377 print_generic_expr (dump_file, bit2, 0);
378 fprintf (dump_file, ")\n");
379 }
380
381 return true;
382 }
383
384 /* See if we have two bit tests of the same name in both tests.
385 In that case remove the outer test and change the inner one to
386 test for name & (bits1 | bits2) != 0. */
387 else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
388 && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
389 {
390 gimple_stmt_iterator gsi;
391 tree t;
392
393 /* Find the common name which is bit-tested. */
394 if (name1 == name2)
395 ;
396 else if (bits1 == bits2)
397 {
398 t = name2;
399 name2 = bits2;
400 bits2 = t;
401 t = name1;
402 name1 = bits1;
403 bits1 = t;
404 }
405 else if (name1 == bits2)
406 {
407 t = name2;
408 name2 = bits2;
409 bits2 = t;
410 }
411 else if (bits1 == name2)
412 {
413 t = name1;
414 name1 = bits1;
415 bits1 = t;
416 }
417 else
418 return false;
419
420 /* As we strip non-widening conversions in finding a common
421 name that is tested make sure to end up with an integral
422 type for building the bit operations. */
423 if (TYPE_PRECISION (TREE_TYPE (bits1))
424 >= TYPE_PRECISION (TREE_TYPE (bits2)))
425 {
426 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
427 name1 = fold_convert (TREE_TYPE (bits1), name1);
428 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
429 bits2 = fold_convert (TREE_TYPE (bits1), bits2);
430 }
431 else
432 {
433 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
434 name1 = fold_convert (TREE_TYPE (bits2), name1);
435 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
436 bits1 = fold_convert (TREE_TYPE (bits2), bits1);
437 }
438
439 /* Do it. */
440 gsi = gsi_for_stmt (inner_cond);
441 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
442 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
443 true, GSI_SAME_STMT);
444 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
445 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
446 true, GSI_SAME_STMT);
447 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
448 build_int_cst (TREE_TYPE (t), 0));
449 t = canonicalize_cond_expr_cond (t);
450 if (!t)
451 return false;
452 gimple_cond_set_condition_from_tree (inner_cond, t);
453 update_stmt (inner_cond);
454
455 /* Leave CFG optimization to cfg_cleanup. */
456 gimple_cond_set_condition_from_tree (outer_cond,
457 outer_inv ? boolean_false_node : boolean_true_node);
458 update_stmt (outer_cond);
459
460 if (dump_file)
461 {
462 fprintf (dump_file, "optimizing bits or bits test to ");
463 print_generic_expr (dump_file, name1, 0);
464 fprintf (dump_file, " & T != 0\nwith temporary T = ");
465 print_generic_expr (dump_file, bits1, 0);
466 fprintf (dump_file, " | ");
467 print_generic_expr (dump_file, bits2, 0);
468 fprintf (dump_file, "\n");
469 }
470
471 return true;
472 }
473
474 /* See if we have two comparisons that we can merge into one. */
475 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
476 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
477 {
478 tree t;
479 enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
480 enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
481
482 /* Invert comparisons if necessary (and possible). */
483 if (inner_inv)
484 inner_cond_code = invert_tree_comparison (inner_cond_code,
485 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (inner_cond)))));
486 if (inner_cond_code == ERROR_MARK)
487 return false;
488 if (outer_inv)
489 outer_cond_code = invert_tree_comparison (outer_cond_code,
490 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (outer_cond)))));
491 if (outer_cond_code == ERROR_MARK)
492 return false;
493 /* Don't return false so fast, try maybe_fold_or_comparisons? */
494
495 if (!(t = maybe_fold_and_comparisons (inner_cond_code,
496 gimple_cond_lhs (inner_cond),
497 gimple_cond_rhs (inner_cond),
498 outer_cond_code,
499 gimple_cond_lhs (outer_cond),
500 gimple_cond_rhs (outer_cond))))
501 {
502 tree t1, t2;
503 gimple_stmt_iterator gsi;
504 if (!LOGICAL_OP_NON_SHORT_CIRCUIT)
505 return false;
506 /* Only do this optimization if the inner bb contains only the conditional. */
507 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
508 return false;
509 t1 = fold_build2_loc (gimple_location (inner_cond),
510 inner_cond_code,
511 boolean_type_node,
512 gimple_cond_lhs (inner_cond),
513 gimple_cond_rhs (inner_cond));
514 t2 = fold_build2_loc (gimple_location (outer_cond),
515 outer_cond_code,
516 boolean_type_node,
517 gimple_cond_lhs (outer_cond),
518 gimple_cond_rhs (outer_cond));
519 t = fold_build2_loc (gimple_location (inner_cond),
520 TRUTH_AND_EXPR, boolean_type_node, t1, t2);
521 if (result_inv)
522 {
523 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
524 result_inv = false;
525 }
526 gsi = gsi_for_stmt (inner_cond);
527 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
528 GSI_SAME_STMT);
529 }
530 if (result_inv)
531 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
532 t = canonicalize_cond_expr_cond (t);
533 if (!t)
534 return false;
535 gimple_cond_set_condition_from_tree (inner_cond, t);
536 update_stmt (inner_cond);
537
538 /* Leave CFG optimization to cfg_cleanup. */
539 gimple_cond_set_condition_from_tree (outer_cond,
540 outer_inv ? boolean_false_node : boolean_true_node);
541 update_stmt (outer_cond);
542
543 if (dump_file)
544 {
545 fprintf (dump_file, "optimizing two comparisons to ");
546 print_generic_expr (dump_file, t, 0);
547 fprintf (dump_file, "\n");
548 }
549
550 return true;
551 }
552
553 return false;
554 }
555
556 /* Recognize a CFG pattern and dispatch to the appropriate
557 if-conversion helper. We start with BB as the innermost
558 worker basic-block. Returns true if a transformation was done. */
559
560 static bool
561 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
562 {
563 basic_block then_bb = NULL, else_bb = NULL;
564
565 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
566 return false;
567
568 /* Recognize && and || of two conditions with a common
569 then/else block which entry edges we can merge. That is:
570 if (a || b)
571 ;
572 and
573 if (a && b)
574 ;
575 This requires a single predecessor of the inner cond_bb. */
576 if (single_pred_p (inner_cond_bb))
577 {
578 basic_block outer_cond_bb = single_pred (inner_cond_bb);
579
580 /* The && form is characterized by a common else_bb with
581 the two edges leading to it mergable. The latter is
582 guaranteed by matching PHI arguments in the else_bb and
583 the inner cond_bb having no side-effects. */
584 if (recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
585 && same_phi_args_p (outer_cond_bb, inner_cond_bb, else_bb)
586 && bb_no_side_effects_p (inner_cond_bb))
587 {
588 /* We have
589 <outer_cond_bb>
590 if (q) goto inner_cond_bb; else goto else_bb;
591 <inner_cond_bb>
592 if (p) goto ...; else goto else_bb;
593 ...
594 <else_bb>
595 ...
596 */
597 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
598 false);
599 }
600
601 /* And a version where the outer condition is negated. */
602 if (recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
603 && same_phi_args_p (outer_cond_bb, inner_cond_bb, else_bb)
604 && bb_no_side_effects_p (inner_cond_bb))
605 {
606 /* We have
607 <outer_cond_bb>
608 if (q) goto else_bb; else goto inner_cond_bb;
609 <inner_cond_bb>
610 if (p) goto ...; else goto else_bb;
611 ...
612 <else_bb>
613 ...
614 */
615 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
616 false);
617 }
618
619 /* The || form is characterized by a common then_bb with the
620 two edges leading to it mergable. The latter is guaranteed
621 by matching PHI arguments in the then_bb and the inner cond_bb
622 having no side-effects. */
623 if (recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
624 && same_phi_args_p (outer_cond_bb, inner_cond_bb, then_bb)
625 && bb_no_side_effects_p (inner_cond_bb))
626 {
627 /* We have
628 <outer_cond_bb>
629 if (q) goto then_bb; else goto inner_cond_bb;
630 <inner_cond_bb>
631 if (q) goto then_bb; else goto ...;
632 <then_bb>
633 ...
634 */
635 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
636 true);
637 }
638
639 /* And a version where the outer condition is negated. */
640 if (recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
641 && same_phi_args_p (outer_cond_bb, inner_cond_bb, then_bb)
642 && bb_no_side_effects_p (inner_cond_bb))
643 {
644 /* We have
645 <outer_cond_bb>
646 if (q) goto inner_cond_bb; else goto then_bb;
647 <inner_cond_bb>
648 if (q) goto then_bb; else goto ...;
649 <then_bb>
650 ...
651 */
652 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
653 true);
654 }
655 }
656
657 return false;
658 }
659
660 /* Main entry for the tree if-conversion pass. */
661
662 static unsigned int
663 tree_ssa_ifcombine (void)
664 {
665 basic_block *bbs;
666 bool cfg_changed = false;
667 int i;
668
669 bbs = single_pred_before_succ_order ();
670 calculate_dominance_info (CDI_DOMINATORS);
671
672 /* Search every basic block for COND_EXPR we may be able to optimize.
673
674 We walk the blocks in order that guarantees that a block with
675 a single predecessor is processed after the predecessor.
676 This ensures that we collapse outter ifs before visiting the
677 inner ones, and also that we do not try to visit a removed
678 block. This is opposite of PHI-OPT, because we cascade the
679 combining rather than cascading PHIs. */
680 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
681 {
682 basic_block bb = bbs[i];
683 gimple stmt = last_stmt (bb);
684
685 if (stmt
686 && gimple_code (stmt) == GIMPLE_COND)
687 cfg_changed |= tree_ssa_ifcombine_bb (bb);
688 }
689
690 free (bbs);
691
692 return cfg_changed ? TODO_cleanup_cfg : 0;
693 }
694
695 static bool
696 gate_ifcombine (void)
697 {
698 return 1;
699 }
700
701 namespace {
702
703 const pass_data pass_data_tree_ifcombine =
704 {
705 GIMPLE_PASS, /* type */
706 "ifcombine", /* name */
707 OPTGROUP_NONE, /* optinfo_flags */
708 true, /* has_gate */
709 true, /* has_execute */
710 TV_TREE_IFCOMBINE, /* tv_id */
711 ( PROP_cfg | PROP_ssa ), /* properties_required */
712 0, /* properties_provided */
713 0, /* properties_destroyed */
714 0, /* todo_flags_start */
715 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
716 };
717
718 class pass_tree_ifcombine : public gimple_opt_pass
719 {
720 public:
721 pass_tree_ifcombine (gcc::context *ctxt)
722 : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
723 {}
724
725 /* opt_pass methods: */
726 bool gate () { return gate_ifcombine (); }
727 unsigned int execute () { return tree_ssa_ifcombine (); }
728
729 }; // class pass_tree_ifcombine
730
731 } // anon namespace
732
733 gimple_opt_pass *
734 make_pass_tree_ifcombine (gcc::context *ctxt)
735 {
736 return new pass_tree_ifcombine (ctxt);
737 }