rtlanal.c (subreg_get_info): Exit early for simple and common cases.
[gcc.git] / gcc / tree-ssa-ifcombine.c
1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2015 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 /* rtl is needed only because arm back-end requires it for
26 BRANCH_COST. */
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hash-set.h"
30 #include "machmode.h"
31 #include "vec.h"
32 #include "double-int.h"
33 #include "input.h"
34 #include "alias.h"
35 #include "symtab.h"
36 #include "wide-int.h"
37 #include "inchash.h"
38 #include "tree.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
41 #include "predict.h"
42 #include "hard-reg-set.h"
43 #include "input.h"
44 #include "function.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "cfganal.h"
48 #include "basic-block.h"
49 #include "tree-pretty-print.h"
50 #include "tree-ssa-alias.h"
51 #include "internal-fn.h"
52 #include "gimple-fold.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-iterator.h"
57 #include "gimplify-me.h"
58 #include "gimple-ssa.h"
59 #include "tree-cfg.h"
60 #include "tree-phinodes.h"
61 #include "ssa-iterators.h"
62 #include "tree-pass.h"
63
64 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
65 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
66 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
67 false) >= 2)
68 #endif
69
70 /* This pass combines COND_EXPRs to simplify control flow. It
71 currently recognizes bit tests and comparisons in chains that
72 represent logical and or logical or of two COND_EXPRs.
73
74 It does so by walking basic blocks in a approximate reverse
75 post-dominator order and trying to match CFG patterns that
76 represent logical and or logical or of two COND_EXPRs.
77 Transformations are done if the COND_EXPR conditions match
78 either
79
80 1. two single bit tests X & (1 << Yn) (for logical and)
81
82 2. two bit tests X & Yn (for logical or)
83
84 3. two comparisons X OPn Y (for logical or)
85
86 To simplify this pass, removing basic blocks and dead code
87 is left to CFG cleanup and DCE. */
88
89
90 /* Recognize a if-then-else CFG pattern starting to match with the
91 COND_BB basic-block containing the COND_EXPR. The recognized
92 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
93 *THEN_BB and/or *ELSE_BB are already set, they are required to
94 match the then and else basic-blocks to make the pattern match.
95 Returns true if the pattern matched, false otherwise. */
96
97 static bool
98 recognize_if_then_else (basic_block cond_bb,
99 basic_block *then_bb, basic_block *else_bb)
100 {
101 edge t, e;
102
103 if (EDGE_COUNT (cond_bb->succs) != 2)
104 return false;
105
106 /* Find the then/else edges. */
107 t = EDGE_SUCC (cond_bb, 0);
108 e = EDGE_SUCC (cond_bb, 1);
109 if (!(t->flags & EDGE_TRUE_VALUE))
110 {
111 edge tmp = t;
112 t = e;
113 e = tmp;
114 }
115 if (!(t->flags & EDGE_TRUE_VALUE)
116 || !(e->flags & EDGE_FALSE_VALUE))
117 return false;
118
119 /* Check if the edge destinations point to the required block. */
120 if (*then_bb
121 && t->dest != *then_bb)
122 return false;
123 if (*else_bb
124 && e->dest != *else_bb)
125 return false;
126
127 if (!*then_bb)
128 *then_bb = t->dest;
129 if (!*else_bb)
130 *else_bb = e->dest;
131
132 return true;
133 }
134
135 /* Verify if the basic block BB does not have side-effects. Return
136 true in this case, else false. */
137
138 static bool
139 bb_no_side_effects_p (basic_block bb)
140 {
141 gimple_stmt_iterator gsi;
142
143 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
144 {
145 gimple stmt = gsi_stmt (gsi);
146
147 if (is_gimple_debug (stmt))
148 continue;
149
150 if (gimple_has_side_effects (stmt)
151 || gimple_could_trap_p (stmt)
152 || gimple_vuse (stmt))
153 return false;
154 }
155
156 return true;
157 }
158
159 /* Return true if BB is an empty forwarder block to TO_BB. */
160
161 static bool
162 forwarder_block_to (basic_block bb, basic_block to_bb)
163 {
164 return empty_block_p (bb)
165 && single_succ_p (bb)
166 && single_succ (bb) == to_bb;
167 }
168
169 /* Verify if all PHI node arguments in DEST for edges from BB1 or
170 BB2 to DEST are the same. This makes the CFG merge point
171 free from side-effects. Return true in this case, else false. */
172
173 static bool
174 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
175 {
176 edge e1 = find_edge (bb1, dest);
177 edge e2 = find_edge (bb2, dest);
178 gphi_iterator gsi;
179 gphi *phi;
180
181 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
182 {
183 phi = gsi.phi ();
184 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
185 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
186 return false;
187 }
188
189 return true;
190 }
191
192 /* Return the best representative SSA name for CANDIDATE which is used
193 in a bit test. */
194
195 static tree
196 get_name_for_bit_test (tree candidate)
197 {
198 /* Skip single-use names in favor of using the name from a
199 non-widening conversion definition. */
200 if (TREE_CODE (candidate) == SSA_NAME
201 && has_single_use (candidate))
202 {
203 gimple def_stmt = SSA_NAME_DEF_STMT (candidate);
204 if (is_gimple_assign (def_stmt)
205 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
206 {
207 if (TYPE_PRECISION (TREE_TYPE (candidate))
208 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
209 return gimple_assign_rhs1 (def_stmt);
210 }
211 }
212
213 return candidate;
214 }
215
216 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
217 statements. Store the name being tested in *NAME and the bit
218 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
219 Returns true if the pattern matched, false otherwise. */
220
221 static bool
222 recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
223 {
224 gimple stmt;
225
226 /* Get at the definition of the result of the bit test. */
227 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
228 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
229 || !integer_zerop (gimple_cond_rhs (cond)))
230 return false;
231 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
232 if (!is_gimple_assign (stmt))
233 return false;
234
235 /* Look at which bit is tested. One form to recognize is
236 D.1985_5 = state_3(D) >> control1_4(D);
237 D.1986_6 = (int) D.1985_5;
238 D.1987_7 = op0 & 1;
239 if (D.1987_7 != 0) */
240 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
241 && integer_onep (gimple_assign_rhs2 (stmt))
242 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
243 {
244 tree orig_name = gimple_assign_rhs1 (stmt);
245
246 /* Look through copies and conversions to eventually
247 find the stmt that computes the shift. */
248 stmt = SSA_NAME_DEF_STMT (orig_name);
249
250 while (is_gimple_assign (stmt)
251 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
252 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
253 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
254 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
255 || gimple_assign_ssa_name_copy_p (stmt)))
256 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
257
258 /* If we found such, decompose it. */
259 if (is_gimple_assign (stmt)
260 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
261 {
262 /* op0 & (1 << op1) */
263 *bit = gimple_assign_rhs2 (stmt);
264 *name = gimple_assign_rhs1 (stmt);
265 }
266 else
267 {
268 /* t & 1 */
269 *bit = integer_zero_node;
270 *name = get_name_for_bit_test (orig_name);
271 }
272
273 return true;
274 }
275
276 /* Another form is
277 D.1987_7 = op0 & (1 << CST)
278 if (D.1987_7 != 0) */
279 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
280 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
281 && integer_pow2p (gimple_assign_rhs2 (stmt)))
282 {
283 *name = gimple_assign_rhs1 (stmt);
284 *bit = build_int_cst (integer_type_node,
285 tree_log2 (gimple_assign_rhs2 (stmt)));
286 return true;
287 }
288
289 /* Another form is
290 D.1986_6 = 1 << control1_4(D)
291 D.1987_7 = op0 & D.1986_6
292 if (D.1987_7 != 0) */
293 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
294 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
295 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
296 {
297 gimple tmp;
298
299 /* Both arguments of the BIT_AND_EXPR can be the single-bit
300 specifying expression. */
301 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
302 if (is_gimple_assign (tmp)
303 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
304 && integer_onep (gimple_assign_rhs1 (tmp)))
305 {
306 *name = gimple_assign_rhs2 (stmt);
307 *bit = gimple_assign_rhs2 (tmp);
308 return true;
309 }
310
311 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
312 if (is_gimple_assign (tmp)
313 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
314 && integer_onep (gimple_assign_rhs1 (tmp)))
315 {
316 *name = gimple_assign_rhs1 (stmt);
317 *bit = gimple_assign_rhs2 (tmp);
318 return true;
319 }
320 }
321
322 return false;
323 }
324
325 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
326 statements. Store the name being tested in *NAME and the bits
327 in *BITS. The COND_EXPR computes *NAME & *BITS.
328 Returns true if the pattern matched, false otherwise. */
329
330 static bool
331 recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
332 {
333 gimple stmt;
334
335 /* Get at the definition of the result of the bit test. */
336 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
337 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
338 || !integer_zerop (gimple_cond_rhs (cond)))
339 return false;
340 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
341 if (!is_gimple_assign (stmt)
342 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
343 return false;
344
345 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
346 *bits = gimple_assign_rhs2 (stmt);
347
348 return true;
349 }
350
351 /* If-convert on a and pattern with a common else block. The inner
352 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
353 inner_inv, outer_inv and result_inv indicate whether the conditions
354 are inverted.
355 Returns true if the edges to the common else basic-block were merged. */
356
357 static bool
358 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
359 basic_block outer_cond_bb, bool outer_inv, bool result_inv)
360 {
361 gimple_stmt_iterator gsi;
362 gimple inner_stmt, outer_stmt;
363 gcond *inner_cond, *outer_cond;
364 tree name1, name2, bit1, bit2, bits1, bits2;
365
366 inner_stmt = last_stmt (inner_cond_bb);
367 if (!inner_stmt
368 || gimple_code (inner_stmt) != GIMPLE_COND)
369 return false;
370 inner_cond = as_a <gcond *> (inner_stmt);
371
372 outer_stmt = last_stmt (outer_cond_bb);
373 if (!outer_stmt
374 || gimple_code (outer_stmt) != GIMPLE_COND)
375 return false;
376 outer_cond = as_a <gcond *> (outer_stmt);
377
378 /* See if we test a single bit of the same name in both tests. In
379 that case remove the outer test, merging both else edges,
380 and change the inner one to test for
381 name & (bit1 | bit2) == (bit1 | bit2). */
382 if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
383 && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
384 && name1 == name2)
385 {
386 tree t, t2;
387
388 /* Do it. */
389 gsi = gsi_for_stmt (inner_cond);
390 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
391 build_int_cst (TREE_TYPE (name1), 1), bit1);
392 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
393 build_int_cst (TREE_TYPE (name1), 1), bit2);
394 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
395 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
396 true, GSI_SAME_STMT);
397 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
398 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
399 true, GSI_SAME_STMT);
400 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
401 boolean_type_node, t2, t);
402 t = canonicalize_cond_expr_cond (t);
403 if (!t)
404 return false;
405 gimple_cond_set_condition_from_tree (inner_cond, t);
406 update_stmt (inner_cond);
407
408 /* Leave CFG optimization to cfg_cleanup. */
409 gimple_cond_set_condition_from_tree (outer_cond,
410 outer_inv ? boolean_false_node : boolean_true_node);
411 update_stmt (outer_cond);
412
413 if (dump_file)
414 {
415 fprintf (dump_file, "optimizing double bit test to ");
416 print_generic_expr (dump_file, name1, 0);
417 fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
418 print_generic_expr (dump_file, bit1, 0);
419 fprintf (dump_file, ") | (1 << ");
420 print_generic_expr (dump_file, bit2, 0);
421 fprintf (dump_file, ")\n");
422 }
423
424 return true;
425 }
426
427 /* See if we have two bit tests of the same name in both tests.
428 In that case remove the outer test and change the inner one to
429 test for name & (bits1 | bits2) != 0. */
430 else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
431 && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
432 {
433 gimple_stmt_iterator gsi;
434 tree t;
435
436 /* Find the common name which is bit-tested. */
437 if (name1 == name2)
438 ;
439 else if (bits1 == bits2)
440 {
441 t = name2;
442 name2 = bits2;
443 bits2 = t;
444 t = name1;
445 name1 = bits1;
446 bits1 = t;
447 }
448 else if (name1 == bits2)
449 {
450 t = name2;
451 name2 = bits2;
452 bits2 = t;
453 }
454 else if (bits1 == name2)
455 {
456 t = name1;
457 name1 = bits1;
458 bits1 = t;
459 }
460 else
461 return false;
462
463 /* As we strip non-widening conversions in finding a common
464 name that is tested make sure to end up with an integral
465 type for building the bit operations. */
466 if (TYPE_PRECISION (TREE_TYPE (bits1))
467 >= TYPE_PRECISION (TREE_TYPE (bits2)))
468 {
469 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
470 name1 = fold_convert (TREE_TYPE (bits1), name1);
471 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
472 bits2 = fold_convert (TREE_TYPE (bits1), bits2);
473 }
474 else
475 {
476 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
477 name1 = fold_convert (TREE_TYPE (bits2), name1);
478 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
479 bits1 = fold_convert (TREE_TYPE (bits2), bits1);
480 }
481
482 /* Do it. */
483 gsi = gsi_for_stmt (inner_cond);
484 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
485 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
486 true, GSI_SAME_STMT);
487 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
488 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
489 true, GSI_SAME_STMT);
490 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
491 build_int_cst (TREE_TYPE (t), 0));
492 t = canonicalize_cond_expr_cond (t);
493 if (!t)
494 return false;
495 gimple_cond_set_condition_from_tree (inner_cond, t);
496 update_stmt (inner_cond);
497
498 /* Leave CFG optimization to cfg_cleanup. */
499 gimple_cond_set_condition_from_tree (outer_cond,
500 outer_inv ? boolean_false_node : boolean_true_node);
501 update_stmt (outer_cond);
502
503 if (dump_file)
504 {
505 fprintf (dump_file, "optimizing bits or bits test to ");
506 print_generic_expr (dump_file, name1, 0);
507 fprintf (dump_file, " & T != 0\nwith temporary T = ");
508 print_generic_expr (dump_file, bits1, 0);
509 fprintf (dump_file, " | ");
510 print_generic_expr (dump_file, bits2, 0);
511 fprintf (dump_file, "\n");
512 }
513
514 return true;
515 }
516
517 /* See if we have two comparisons that we can merge into one. */
518 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
519 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
520 {
521 tree t;
522 enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
523 enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
524
525 /* Invert comparisons if necessary (and possible). */
526 if (inner_inv)
527 inner_cond_code = invert_tree_comparison (inner_cond_code,
528 HONOR_NANS (gimple_cond_lhs (inner_cond)));
529 if (inner_cond_code == ERROR_MARK)
530 return false;
531 if (outer_inv)
532 outer_cond_code = invert_tree_comparison (outer_cond_code,
533 HONOR_NANS (gimple_cond_lhs (outer_cond)));
534 if (outer_cond_code == ERROR_MARK)
535 return false;
536 /* Don't return false so fast, try maybe_fold_or_comparisons? */
537
538 if (!(t = maybe_fold_and_comparisons (inner_cond_code,
539 gimple_cond_lhs (inner_cond),
540 gimple_cond_rhs (inner_cond),
541 outer_cond_code,
542 gimple_cond_lhs (outer_cond),
543 gimple_cond_rhs (outer_cond))))
544 {
545 tree t1, t2;
546 gimple_stmt_iterator gsi;
547 if (!LOGICAL_OP_NON_SHORT_CIRCUIT)
548 return false;
549 /* Only do this optimization if the inner bb contains only the conditional. */
550 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
551 return false;
552 t1 = fold_build2_loc (gimple_location (inner_cond),
553 inner_cond_code,
554 boolean_type_node,
555 gimple_cond_lhs (inner_cond),
556 gimple_cond_rhs (inner_cond));
557 t2 = fold_build2_loc (gimple_location (outer_cond),
558 outer_cond_code,
559 boolean_type_node,
560 gimple_cond_lhs (outer_cond),
561 gimple_cond_rhs (outer_cond));
562 t = fold_build2_loc (gimple_location (inner_cond),
563 TRUTH_AND_EXPR, boolean_type_node, t1, t2);
564 if (result_inv)
565 {
566 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
567 result_inv = false;
568 }
569 gsi = gsi_for_stmt (inner_cond);
570 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
571 GSI_SAME_STMT);
572 }
573 if (result_inv)
574 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
575 t = canonicalize_cond_expr_cond (t);
576 if (!t)
577 return false;
578 gimple_cond_set_condition_from_tree (inner_cond, t);
579 update_stmt (inner_cond);
580
581 /* Leave CFG optimization to cfg_cleanup. */
582 gimple_cond_set_condition_from_tree (outer_cond,
583 outer_inv ? boolean_false_node : boolean_true_node);
584 update_stmt (outer_cond);
585
586 if (dump_file)
587 {
588 fprintf (dump_file, "optimizing two comparisons to ");
589 print_generic_expr (dump_file, t, 0);
590 fprintf (dump_file, "\n");
591 }
592
593 return true;
594 }
595
596 return false;
597 }
598
599 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
600 dispatch to the appropriate if-conversion helper for a particular
601 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
602 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
603
604 static bool
605 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
606 basic_block then_bb, basic_block else_bb,
607 basic_block phi_pred_bb)
608 {
609 /* The && form is characterized by a common else_bb with
610 the two edges leading to it mergable. The latter is
611 guaranteed by matching PHI arguments in the else_bb and
612 the inner cond_bb having no side-effects. */
613 if (phi_pred_bb != else_bb
614 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
615 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb)
616 && bb_no_side_effects_p (inner_cond_bb))
617 {
618 /* We have
619 <outer_cond_bb>
620 if (q) goto inner_cond_bb; else goto else_bb;
621 <inner_cond_bb>
622 if (p) goto ...; else goto else_bb;
623 ...
624 <else_bb>
625 ...
626 */
627 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
628 false);
629 }
630
631 /* And a version where the outer condition is negated. */
632 if (phi_pred_bb != else_bb
633 && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
634 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb)
635 && bb_no_side_effects_p (inner_cond_bb))
636 {
637 /* We have
638 <outer_cond_bb>
639 if (q) goto else_bb; else goto inner_cond_bb;
640 <inner_cond_bb>
641 if (p) goto ...; else goto else_bb;
642 ...
643 <else_bb>
644 ...
645 */
646 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
647 false);
648 }
649
650 /* The || form is characterized by a common then_bb with the
651 two edges leading to it mergable. The latter is guaranteed
652 by matching PHI arguments in the then_bb and the inner cond_bb
653 having no side-effects. */
654 if (phi_pred_bb != then_bb
655 && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
656 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb)
657 && bb_no_side_effects_p (inner_cond_bb))
658 {
659 /* We have
660 <outer_cond_bb>
661 if (q) goto then_bb; else goto inner_cond_bb;
662 <inner_cond_bb>
663 if (q) goto then_bb; else goto ...;
664 <then_bb>
665 ...
666 */
667 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
668 true);
669 }
670
671 /* And a version where the outer condition is negated. */
672 if (phi_pred_bb != then_bb
673 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
674 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb)
675 && bb_no_side_effects_p (inner_cond_bb))
676 {
677 /* We have
678 <outer_cond_bb>
679 if (q) goto inner_cond_bb; else goto then_bb;
680 <inner_cond_bb>
681 if (q) goto then_bb; else goto ...;
682 <then_bb>
683 ...
684 */
685 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
686 true);
687 }
688
689 return false;
690 }
691
692 /* Recognize a CFG pattern and dispatch to the appropriate
693 if-conversion helper. We start with BB as the innermost
694 worker basic-block. Returns true if a transformation was done. */
695
696 static bool
697 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
698 {
699 basic_block then_bb = NULL, else_bb = NULL;
700
701 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
702 return false;
703
704 /* Recognize && and || of two conditions with a common
705 then/else block which entry edges we can merge. That is:
706 if (a || b)
707 ;
708 and
709 if (a && b)
710 ;
711 This requires a single predecessor of the inner cond_bb. */
712 if (single_pred_p (inner_cond_bb))
713 {
714 basic_block outer_cond_bb = single_pred (inner_cond_bb);
715
716 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
717 then_bb, else_bb, inner_cond_bb))
718 return true;
719
720 if (forwarder_block_to (else_bb, then_bb))
721 {
722 /* Other possibilities for the && form, if else_bb is
723 empty forwarder block to then_bb. Compared to the above simpler
724 forms this can be treated as if then_bb and else_bb were swapped,
725 and the corresponding inner_cond_bb not inverted because of that.
726 For same_phi_args_p we look at equality of arguments between
727 edge from outer_cond_bb and the forwarder block. */
728 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
729 then_bb, else_bb))
730 return true;
731 }
732 else if (forwarder_block_to (then_bb, else_bb))
733 {
734 /* Other possibilities for the || form, if then_bb is
735 empty forwarder block to else_bb. Compared to the above simpler
736 forms this can be treated as if then_bb and else_bb were swapped,
737 and the corresponding inner_cond_bb not inverted because of that.
738 For same_phi_args_p we look at equality of arguments between
739 edge from outer_cond_bb and the forwarder block. */
740 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
741 then_bb, then_bb))
742 return true;
743 }
744 }
745
746 return false;
747 }
748
749 /* Main entry for the tree if-conversion pass. */
750
751 namespace {
752
753 const pass_data pass_data_tree_ifcombine =
754 {
755 GIMPLE_PASS, /* type */
756 "ifcombine", /* name */
757 OPTGROUP_NONE, /* optinfo_flags */
758 TV_TREE_IFCOMBINE, /* tv_id */
759 ( PROP_cfg | PROP_ssa ), /* properties_required */
760 0, /* properties_provided */
761 0, /* properties_destroyed */
762 0, /* todo_flags_start */
763 TODO_update_ssa, /* todo_flags_finish */
764 };
765
766 class pass_tree_ifcombine : public gimple_opt_pass
767 {
768 public:
769 pass_tree_ifcombine (gcc::context *ctxt)
770 : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
771 {}
772
773 /* opt_pass methods: */
774 virtual unsigned int execute (function *);
775
776 }; // class pass_tree_ifcombine
777
778 unsigned int
779 pass_tree_ifcombine::execute (function *fun)
780 {
781 basic_block *bbs;
782 bool cfg_changed = false;
783 int i;
784
785 bbs = single_pred_before_succ_order ();
786 calculate_dominance_info (CDI_DOMINATORS);
787
788 /* Search every basic block for COND_EXPR we may be able to optimize.
789
790 We walk the blocks in order that guarantees that a block with
791 a single predecessor is processed after the predecessor.
792 This ensures that we collapse outter ifs before visiting the
793 inner ones, and also that we do not try to visit a removed
794 block. This is opposite of PHI-OPT, because we cascade the
795 combining rather than cascading PHIs. */
796 for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
797 {
798 basic_block bb = bbs[i];
799 gimple stmt = last_stmt (bb);
800
801 if (stmt
802 && gimple_code (stmt) == GIMPLE_COND)
803 cfg_changed |= tree_ssa_ifcombine_bb (bb);
804 }
805
806 free (bbs);
807
808 return cfg_changed ? TODO_cleanup_cfg : 0;
809 }
810
811 } // anon namespace
812
813 gimple_opt_pass *
814 make_pass_tree_ifcombine (gcc::context *ctxt)
815 {
816 return new pass_tree_ifcombine (ctxt);
817 }