(Synchronize with addition made to binutils sources):
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
43 #include "timevar.h"
44 #include "tree-pass.h"
45 #include "df.h"
46 #include "vec.h"
47 #include "vecprim.h"
48 #include "dbgcnt.h"
49
50 #ifndef HAVE_conditional_execution
51 #define HAVE_conditional_execution 0
52 #endif
53 #ifndef HAVE_conditional_move
54 #define HAVE_conditional_move 0
55 #endif
56 #ifndef HAVE_incscc
57 #define HAVE_incscc 0
58 #endif
59 #ifndef HAVE_decscc
60 #define HAVE_decscc 0
61 #endif
62 #ifndef HAVE_trap
63 #define HAVE_trap 0
64 #endif
65
66 #ifndef MAX_CONDITIONAL_EXECUTE
67 #define MAX_CONDITIONAL_EXECUTE \
68 (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
69 + 1)
70 #endif
71
72 #define IFCVT_MULTIPLE_DUMPS 1
73
74 #define NULL_BLOCK ((basic_block) NULL)
75
76 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
77 static int num_possible_if_blocks;
78
79 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
80 execution. */
81 static int num_updated_if_blocks;
82
83 /* # of changes made. */
84 static int num_true_changes;
85
86 /* Whether conditional execution changes were made. */
87 static int cond_exec_changed_p;
88
89 /* Forward references. */
90 static int count_bb_insns (const_basic_block);
91 static bool cheap_bb_rtx_cost_p (const_basic_block, int);
92 static rtx first_active_insn (basic_block);
93 static rtx last_active_insn (basic_block, int);
94 static basic_block block_fallthru (basic_block);
95 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
96 static rtx cond_exec_get_condition (rtx);
97 static rtx noce_get_condition (rtx, rtx *, bool);
98 static int noce_operand_ok (const_rtx);
99 static void merge_if_block (ce_if_block_t *);
100 static int find_cond_trap (basic_block, edge, edge);
101 static basic_block find_if_header (basic_block, int);
102 static int block_jumps_and_fallthru_p (basic_block, basic_block);
103 static int noce_find_if_block (basic_block, edge, edge, int);
104 static int cond_exec_find_if_block (ce_if_block_t *);
105 static int find_if_case_1 (basic_block, edge, edge);
106 static int find_if_case_2 (basic_block, edge, edge);
107 static int find_memory (rtx *, void *);
108 static int dead_or_predicable (basic_block, basic_block, basic_block,
109 basic_block, int);
110 static void noce_emit_move_insn (rtx, rtx);
111 static rtx block_has_only_trap (basic_block);
112 \f
113 /* Count the number of non-jump active insns in BB. */
114
115 static int
116 count_bb_insns (const_basic_block bb)
117 {
118 int count = 0;
119 rtx insn = BB_HEAD (bb);
120
121 while (1)
122 {
123 if (CALL_P (insn) || NONJUMP_INSN_P (insn))
124 count++;
125
126 if (insn == BB_END (bb))
127 break;
128 insn = NEXT_INSN (insn);
129 }
130
131 return count;
132 }
133
134 /* Determine whether the total insn_rtx_cost on non-jump insns in
135 basic block BB is less than MAX_COST. This function returns
136 false if the cost of any instruction could not be estimated. */
137
138 static bool
139 cheap_bb_rtx_cost_p (const_basic_block bb, int max_cost)
140 {
141 int count = 0;
142 rtx insn = BB_HEAD (bb);
143 bool speed = optimize_bb_for_speed_p (bb);
144
145 while (1)
146 {
147 if (NONJUMP_INSN_P (insn))
148 {
149 int cost = insn_rtx_cost (PATTERN (insn), speed);
150 if (cost == 0)
151 return false;
152
153 /* If this instruction is the load or set of a "stack" register,
154 such as a floating point register on x87, then the cost of
155 speculatively executing this insn may need to include
156 the additional cost of popping its result off of the
157 register stack. Unfortunately, correctly recognizing and
158 accounting for this additional overhead is tricky, so for
159 now we simply prohibit such speculative execution. */
160 #ifdef STACK_REGS
161 {
162 rtx set = single_set (insn);
163 if (set && STACK_REG_P (SET_DEST (set)))
164 return false;
165 }
166 #endif
167
168 count += cost;
169 if (count >= max_cost)
170 return false;
171 }
172 else if (CALL_P (insn))
173 return false;
174
175 if (insn == BB_END (bb))
176 break;
177 insn = NEXT_INSN (insn);
178 }
179
180 return true;
181 }
182
183 /* Return the first non-jump active insn in the basic block. */
184
185 static rtx
186 first_active_insn (basic_block bb)
187 {
188 rtx insn = BB_HEAD (bb);
189
190 if (LABEL_P (insn))
191 {
192 if (insn == BB_END (bb))
193 return NULL_RTX;
194 insn = NEXT_INSN (insn);
195 }
196
197 while (NOTE_P (insn))
198 {
199 if (insn == BB_END (bb))
200 return NULL_RTX;
201 insn = NEXT_INSN (insn);
202 }
203
204 if (JUMP_P (insn))
205 return NULL_RTX;
206
207 return insn;
208 }
209
210 /* Return the last non-jump active (non-jump) insn in the basic block. */
211
212 static rtx
213 last_active_insn (basic_block bb, int skip_use_p)
214 {
215 rtx insn = BB_END (bb);
216 rtx head = BB_HEAD (bb);
217
218 while (NOTE_P (insn)
219 || JUMP_P (insn)
220 || (skip_use_p
221 && NONJUMP_INSN_P (insn)
222 && GET_CODE (PATTERN (insn)) == USE))
223 {
224 if (insn == head)
225 return NULL_RTX;
226 insn = PREV_INSN (insn);
227 }
228
229 if (LABEL_P (insn))
230 return NULL_RTX;
231
232 return insn;
233 }
234
235 /* Return the basic block reached by falling though the basic block BB. */
236
237 static basic_block
238 block_fallthru (basic_block bb)
239 {
240 edge e;
241 edge_iterator ei;
242
243 FOR_EACH_EDGE (e, ei, bb->succs)
244 if (e->flags & EDGE_FALLTHRU)
245 break;
246
247 return (e) ? e->dest : NULL_BLOCK;
248 }
249 \f
250 /* Go through a bunch of insns, converting them to conditional
251 execution format if possible. Return TRUE if all of the non-note
252 insns were processed. */
253
254 static int
255 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
256 /* if block information */rtx start,
257 /* first insn to look at */rtx end,
258 /* last insn to look at */rtx test,
259 /* conditional execution test */rtx prob_val,
260 /* probability of branch taken. */int mod_ok)
261 {
262 int must_be_last = FALSE;
263 rtx insn;
264 rtx xtest;
265 rtx pattern;
266
267 if (!start || !end)
268 return FALSE;
269
270 for (insn = start; ; insn = NEXT_INSN (insn))
271 {
272 if (NOTE_P (insn))
273 goto insn_done;
274
275 gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn));
276
277 /* Remove USE insns that get in the way. */
278 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
279 {
280 /* ??? Ug. Actually unlinking the thing is problematic,
281 given what we'd have to coordinate with our callers. */
282 SET_INSN_DELETED (insn);
283 goto insn_done;
284 }
285
286 /* Last insn wasn't last? */
287 if (must_be_last)
288 return FALSE;
289
290 if (modified_in_p (test, insn))
291 {
292 if (!mod_ok)
293 return FALSE;
294 must_be_last = TRUE;
295 }
296
297 /* Now build the conditional form of the instruction. */
298 pattern = PATTERN (insn);
299 xtest = copy_rtx (test);
300
301 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
302 two conditions. */
303 if (GET_CODE (pattern) == COND_EXEC)
304 {
305 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
306 return FALSE;
307
308 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
309 COND_EXEC_TEST (pattern));
310 pattern = COND_EXEC_CODE (pattern);
311 }
312
313 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
314
315 /* If the machine needs to modify the insn being conditionally executed,
316 say for example to force a constant integer operand into a temp
317 register, do so here. */
318 #ifdef IFCVT_MODIFY_INSN
319 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
320 if (! pattern)
321 return FALSE;
322 #endif
323
324 validate_change (insn, &PATTERN (insn), pattern, 1);
325
326 if (CALL_P (insn) && prob_val)
327 validate_change (insn, &REG_NOTES (insn),
328 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
329 REG_NOTES (insn)), 1);
330
331 insn_done:
332 if (insn == end)
333 break;
334 }
335
336 return TRUE;
337 }
338
339 /* Return the condition for a jump. Do not do any special processing. */
340
341 static rtx
342 cond_exec_get_condition (rtx jump)
343 {
344 rtx test_if, cond;
345
346 if (any_condjump_p (jump))
347 test_if = SET_SRC (pc_set (jump));
348 else
349 return NULL_RTX;
350 cond = XEXP (test_if, 0);
351
352 /* If this branches to JUMP_LABEL when the condition is false,
353 reverse the condition. */
354 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
355 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
356 {
357 enum rtx_code rev = reversed_comparison_code (cond, jump);
358 if (rev == UNKNOWN)
359 return NULL_RTX;
360
361 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
362 XEXP (cond, 1));
363 }
364
365 return cond;
366 }
367
368 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
369 to conditional execution. Return TRUE if we were successful at
370 converting the block. */
371
372 static int
373 cond_exec_process_if_block (ce_if_block_t * ce_info,
374 /* if block information */int do_multiple_p)
375 {
376 basic_block test_bb = ce_info->test_bb; /* last test block */
377 basic_block then_bb = ce_info->then_bb; /* THEN */
378 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
379 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
380 rtx then_start; /* first insn in THEN block */
381 rtx then_end; /* last insn + 1 in THEN block */
382 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
383 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
384 int max; /* max # of insns to convert. */
385 int then_mod_ok; /* whether conditional mods are ok in THEN */
386 rtx true_expr; /* test for else block insns */
387 rtx false_expr; /* test for then block insns */
388 rtx true_prob_val; /* probability of else block */
389 rtx false_prob_val; /* probability of then block */
390 int n_insns;
391 enum rtx_code false_code;
392
393 /* If test is comprised of && or || elements, and we've failed at handling
394 all of them together, just use the last test if it is the special case of
395 && elements without an ELSE block. */
396 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
397 {
398 if (else_bb || ! ce_info->and_and_p)
399 return FALSE;
400
401 ce_info->test_bb = test_bb = ce_info->last_test_bb;
402 ce_info->num_multiple_test_blocks = 0;
403 ce_info->num_and_and_blocks = 0;
404 ce_info->num_or_or_blocks = 0;
405 }
406
407 /* Find the conditional jump to the ELSE or JOIN part, and isolate
408 the test. */
409 test_expr = cond_exec_get_condition (BB_END (test_bb));
410 if (! test_expr)
411 return FALSE;
412
413 /* If the conditional jump is more than just a conditional jump,
414 then we can not do conditional execution conversion on this block. */
415 if (! onlyjump_p (BB_END (test_bb)))
416 return FALSE;
417
418 /* Collect the bounds of where we're to search, skipping any labels, jumps
419 and notes at the beginning and end of the block. Then count the total
420 number of insns and see if it is small enough to convert. */
421 then_start = first_active_insn (then_bb);
422 then_end = last_active_insn (then_bb, TRUE);
423 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
424 max = MAX_CONDITIONAL_EXECUTE;
425
426 if (else_bb)
427 {
428 max *= 2;
429 else_start = first_active_insn (else_bb);
430 else_end = last_active_insn (else_bb, TRUE);
431 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
432 }
433
434 if (n_insns > max)
435 return FALSE;
436
437 /* Map test_expr/test_jump into the appropriate MD tests to use on
438 the conditionally executed code. */
439
440 true_expr = test_expr;
441
442 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
443 if (false_code != UNKNOWN)
444 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
445 XEXP (true_expr, 0), XEXP (true_expr, 1));
446 else
447 false_expr = NULL_RTX;
448
449 #ifdef IFCVT_MODIFY_TESTS
450 /* If the machine description needs to modify the tests, such as setting a
451 conditional execution register from a comparison, it can do so here. */
452 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
453
454 /* See if the conversion failed. */
455 if (!true_expr || !false_expr)
456 goto fail;
457 #endif
458
459 true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
460 if (true_prob_val)
461 {
462 true_prob_val = XEXP (true_prob_val, 0);
463 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
464 }
465 else
466 false_prob_val = NULL_RTX;
467
468 /* If we have && or || tests, do them here. These tests are in the adjacent
469 blocks after the first block containing the test. */
470 if (ce_info->num_multiple_test_blocks > 0)
471 {
472 basic_block bb = test_bb;
473 basic_block last_test_bb = ce_info->last_test_bb;
474
475 if (! false_expr)
476 goto fail;
477
478 do
479 {
480 rtx start, end;
481 rtx t, f;
482 enum rtx_code f_code;
483
484 bb = block_fallthru (bb);
485 start = first_active_insn (bb);
486 end = last_active_insn (bb, TRUE);
487 if (start
488 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
489 false_prob_val, FALSE))
490 goto fail;
491
492 /* If the conditional jump is more than just a conditional jump, then
493 we can not do conditional execution conversion on this block. */
494 if (! onlyjump_p (BB_END (bb)))
495 goto fail;
496
497 /* Find the conditional jump and isolate the test. */
498 t = cond_exec_get_condition (BB_END (bb));
499 if (! t)
500 goto fail;
501
502 f_code = reversed_comparison_code (t, BB_END (bb));
503 if (f_code == UNKNOWN)
504 goto fail;
505
506 f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
507 if (ce_info->and_and_p)
508 {
509 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
510 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
511 }
512 else
513 {
514 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
515 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
516 }
517
518 /* If the machine description needs to modify the tests, such as
519 setting a conditional execution register from a comparison, it can
520 do so here. */
521 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
522 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
523
524 /* See if the conversion failed. */
525 if (!t || !f)
526 goto fail;
527 #endif
528
529 true_expr = t;
530 false_expr = f;
531 }
532 while (bb != last_test_bb);
533 }
534
535 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
536 on then THEN block. */
537 then_mod_ok = (else_bb == NULL_BLOCK);
538
539 /* Go through the THEN and ELSE blocks converting the insns if possible
540 to conditional execution. */
541
542 if (then_end
543 && (! false_expr
544 || ! cond_exec_process_insns (ce_info, then_start, then_end,
545 false_expr, false_prob_val,
546 then_mod_ok)))
547 goto fail;
548
549 if (else_bb && else_end
550 && ! cond_exec_process_insns (ce_info, else_start, else_end,
551 true_expr, true_prob_val, TRUE))
552 goto fail;
553
554 /* If we cannot apply the changes, fail. Do not go through the normal fail
555 processing, since apply_change_group will call cancel_changes. */
556 if (! apply_change_group ())
557 {
558 #ifdef IFCVT_MODIFY_CANCEL
559 /* Cancel any machine dependent changes. */
560 IFCVT_MODIFY_CANCEL (ce_info);
561 #endif
562 return FALSE;
563 }
564
565 #ifdef IFCVT_MODIFY_FINAL
566 /* Do any machine dependent final modifications. */
567 IFCVT_MODIFY_FINAL (ce_info);
568 #endif
569
570 /* Conversion succeeded. */
571 if (dump_file)
572 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
573 n_insns, (n_insns == 1) ? " was" : "s were");
574
575 /* Merge the blocks! */
576 merge_if_block (ce_info);
577 cond_exec_changed_p = TRUE;
578 return TRUE;
579
580 fail:
581 #ifdef IFCVT_MODIFY_CANCEL
582 /* Cancel any machine dependent changes. */
583 IFCVT_MODIFY_CANCEL (ce_info);
584 #endif
585
586 cancel_changes (0);
587 return FALSE;
588 }
589 \f
590 /* Used by noce_process_if_block to communicate with its subroutines.
591
592 The subroutines know that A and B may be evaluated freely. They
593 know that X is a register. They should insert new instructions
594 before cond_earliest. */
595
596 struct noce_if_info
597 {
598 /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block. */
599 basic_block test_bb, then_bb, else_bb, join_bb;
600
601 /* The jump that ends TEST_BB. */
602 rtx jump;
603
604 /* The jump condition. */
605 rtx cond;
606
607 /* New insns should be inserted before this one. */
608 rtx cond_earliest;
609
610 /* Insns in the THEN and ELSE block. There is always just this
611 one insns in those blocks. The insns are single_set insns.
612 If there was no ELSE block, INSN_B is the last insn before
613 COND_EARLIEST, or NULL_RTX. In the former case, the insn
614 operands are still valid, as if INSN_B was moved down below
615 the jump. */
616 rtx insn_a, insn_b;
617
618 /* The SET_SRC of INSN_A and INSN_B. */
619 rtx a, b;
620
621 /* The SET_DEST of INSN_A. */
622 rtx x;
623
624 /* True if this if block is not canonical. In the canonical form of
625 if blocks, the THEN_BB is the block reached via the fallthru edge
626 from TEST_BB. For the noce transformations, we allow the symmetric
627 form as well. */
628 bool then_else_reversed;
629
630 /* Estimated cost of the particular branch instruction. */
631 int branch_cost;
632 };
633
634 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
635 static int noce_try_move (struct noce_if_info *);
636 static int noce_try_store_flag (struct noce_if_info *);
637 static int noce_try_addcc (struct noce_if_info *);
638 static int noce_try_store_flag_constants (struct noce_if_info *);
639 static int noce_try_store_flag_mask (struct noce_if_info *);
640 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
641 rtx, rtx, rtx);
642 static int noce_try_cmove (struct noce_if_info *);
643 static int noce_try_cmove_arith (struct noce_if_info *);
644 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
645 static int noce_try_minmax (struct noce_if_info *);
646 static int noce_try_abs (struct noce_if_info *);
647 static int noce_try_sign_mask (struct noce_if_info *);
648
649 /* Helper function for noce_try_store_flag*. */
650
651 static rtx
652 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
653 int normalize)
654 {
655 rtx cond = if_info->cond;
656 int cond_complex;
657 enum rtx_code code;
658
659 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
660 || ! general_operand (XEXP (cond, 1), VOIDmode));
661
662 /* If earliest == jump, or when the condition is complex, try to
663 build the store_flag insn directly. */
664
665 if (cond_complex)
666 {
667 rtx set = pc_set (if_info->jump);
668 cond = XEXP (SET_SRC (set), 0);
669 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
670 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump))
671 reversep = !reversep;
672 if (if_info->then_else_reversed)
673 reversep = !reversep;
674 }
675
676 if (reversep)
677 code = reversed_comparison_code (cond, if_info->jump);
678 else
679 code = GET_CODE (cond);
680
681 if ((if_info->cond_earliest == if_info->jump || cond_complex)
682 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
683 {
684 rtx tmp;
685
686 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
687 XEXP (cond, 1));
688 tmp = gen_rtx_SET (VOIDmode, x, tmp);
689
690 start_sequence ();
691 tmp = emit_insn (tmp);
692
693 if (recog_memoized (tmp) >= 0)
694 {
695 tmp = get_insns ();
696 end_sequence ();
697 emit_insn (tmp);
698
699 if_info->cond_earliest = if_info->jump;
700
701 return x;
702 }
703
704 end_sequence ();
705 }
706
707 /* Don't even try if the comparison operands or the mode of X are weird. */
708 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
709 return NULL_RTX;
710
711 return emit_store_flag (x, code, XEXP (cond, 0),
712 XEXP (cond, 1), VOIDmode,
713 (code == LTU || code == LEU
714 || code == GEU || code == GTU), normalize);
715 }
716
717 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
718 X is the destination/target and Y is the value to copy. */
719
720 static void
721 noce_emit_move_insn (rtx x, rtx y)
722 {
723 enum machine_mode outmode;
724 rtx outer, inner;
725 int bitpos;
726
727 if (GET_CODE (x) != STRICT_LOW_PART)
728 {
729 rtx seq, insn, target;
730 optab ot;
731
732 start_sequence ();
733 /* Check that the SET_SRC is reasonable before calling emit_move_insn,
734 otherwise construct a suitable SET pattern ourselves. */
735 insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
736 ? emit_move_insn (x, y)
737 : emit_insn (gen_rtx_SET (VOIDmode, x, y));
738 seq = get_insns ();
739 end_sequence ();
740
741 if (recog_memoized (insn) <= 0)
742 {
743 if (GET_CODE (x) == ZERO_EXTRACT)
744 {
745 rtx op = XEXP (x, 0);
746 unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
747 unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
748
749 /* store_bit_field expects START to be relative to
750 BYTES_BIG_ENDIAN and adjusts this value for machines with
751 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN. In order to be able to
752 invoke store_bit_field again it is necessary to have the START
753 value from the first call. */
754 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
755 {
756 if (MEM_P (op))
757 start = BITS_PER_UNIT - start - size;
758 else
759 {
760 gcc_assert (REG_P (op));
761 start = BITS_PER_WORD - start - size;
762 }
763 }
764
765 gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
766 store_bit_field (op, size, start, GET_MODE (x), y);
767 return;
768 }
769
770 switch (GET_RTX_CLASS (GET_CODE (y)))
771 {
772 case RTX_UNARY:
773 ot = code_to_optab[GET_CODE (y)];
774 if (ot)
775 {
776 start_sequence ();
777 target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
778 if (target != NULL_RTX)
779 {
780 if (target != x)
781 emit_move_insn (x, target);
782 seq = get_insns ();
783 }
784 end_sequence ();
785 }
786 break;
787
788 case RTX_BIN_ARITH:
789 case RTX_COMM_ARITH:
790 ot = code_to_optab[GET_CODE (y)];
791 if (ot)
792 {
793 start_sequence ();
794 target = expand_binop (GET_MODE (y), ot,
795 XEXP (y, 0), XEXP (y, 1),
796 x, 0, OPTAB_DIRECT);
797 if (target != NULL_RTX)
798 {
799 if (target != x)
800 emit_move_insn (x, target);
801 seq = get_insns ();
802 }
803 end_sequence ();
804 }
805 break;
806
807 default:
808 break;
809 }
810 }
811
812 emit_insn (seq);
813 return;
814 }
815
816 outer = XEXP (x, 0);
817 inner = XEXP (outer, 0);
818 outmode = GET_MODE (outer);
819 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
820 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y);
821 }
822
823 /* Return sequence of instructions generated by if conversion. This
824 function calls end_sequence() to end the current stream, ensures
825 that are instructions are unshared, recognizable non-jump insns.
826 On failure, this function returns a NULL_RTX. */
827
828 static rtx
829 end_ifcvt_sequence (struct noce_if_info *if_info)
830 {
831 rtx insn;
832 rtx seq = get_insns ();
833
834 set_used_flags (if_info->x);
835 set_used_flags (if_info->cond);
836 unshare_all_rtl_in_chain (seq);
837 end_sequence ();
838
839 /* Make sure that all of the instructions emitted are recognizable,
840 and that we haven't introduced a new jump instruction.
841 As an exercise for the reader, build a general mechanism that
842 allows proper placement of required clobbers. */
843 for (insn = seq; insn; insn = NEXT_INSN (insn))
844 if (JUMP_P (insn)
845 || recog_memoized (insn) == -1)
846 return NULL_RTX;
847
848 return seq;
849 }
850
851 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
852 "if (a == b) x = a; else x = b" into "x = b". */
853
854 static int
855 noce_try_move (struct noce_if_info *if_info)
856 {
857 rtx cond = if_info->cond;
858 enum rtx_code code = GET_CODE (cond);
859 rtx y, seq;
860
861 if (code != NE && code != EQ)
862 return FALSE;
863
864 /* This optimization isn't valid if either A or B could be a NaN
865 or a signed zero. */
866 if (HONOR_NANS (GET_MODE (if_info->x))
867 || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
868 return FALSE;
869
870 /* Check whether the operands of the comparison are A and in
871 either order. */
872 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
873 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
874 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
875 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
876 {
877 y = (code == EQ) ? if_info->a : if_info->b;
878
879 /* Avoid generating the move if the source is the destination. */
880 if (! rtx_equal_p (if_info->x, y))
881 {
882 start_sequence ();
883 noce_emit_move_insn (if_info->x, y);
884 seq = end_ifcvt_sequence (if_info);
885 if (!seq)
886 return FALSE;
887
888 emit_insn_before_setloc (seq, if_info->jump,
889 INSN_LOCATOR (if_info->insn_a));
890 }
891 return TRUE;
892 }
893 return FALSE;
894 }
895
896 /* Convert "if (test) x = 1; else x = 0".
897
898 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
899 tried in noce_try_store_flag_constants after noce_try_cmove has had
900 a go at the conversion. */
901
902 static int
903 noce_try_store_flag (struct noce_if_info *if_info)
904 {
905 int reversep;
906 rtx target, seq;
907
908 if (GET_CODE (if_info->b) == CONST_INT
909 && INTVAL (if_info->b) == STORE_FLAG_VALUE
910 && if_info->a == const0_rtx)
911 reversep = 0;
912 else if (if_info->b == const0_rtx
913 && GET_CODE (if_info->a) == CONST_INT
914 && INTVAL (if_info->a) == STORE_FLAG_VALUE
915 && (reversed_comparison_code (if_info->cond, if_info->jump)
916 != UNKNOWN))
917 reversep = 1;
918 else
919 return FALSE;
920
921 start_sequence ();
922
923 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
924 if (target)
925 {
926 if (target != if_info->x)
927 noce_emit_move_insn (if_info->x, target);
928
929 seq = end_ifcvt_sequence (if_info);
930 if (! seq)
931 return FALSE;
932
933 emit_insn_before_setloc (seq, if_info->jump,
934 INSN_LOCATOR (if_info->insn_a));
935 return TRUE;
936 }
937 else
938 {
939 end_sequence ();
940 return FALSE;
941 }
942 }
943
944 /* Convert "if (test) x = a; else x = b", for A and B constant. */
945
946 static int
947 noce_try_store_flag_constants (struct noce_if_info *if_info)
948 {
949 rtx target, seq;
950 int reversep;
951 HOST_WIDE_INT itrue, ifalse, diff, tmp;
952 int normalize, can_reverse;
953 enum machine_mode mode;
954
955 if (GET_CODE (if_info->a) == CONST_INT
956 && GET_CODE (if_info->b) == CONST_INT)
957 {
958 mode = GET_MODE (if_info->x);
959 ifalse = INTVAL (if_info->a);
960 itrue = INTVAL (if_info->b);
961
962 /* Make sure we can represent the difference between the two values. */
963 if ((itrue - ifalse > 0)
964 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
965 return FALSE;
966
967 diff = trunc_int_for_mode (itrue - ifalse, mode);
968
969 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
970 != UNKNOWN);
971
972 reversep = 0;
973 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
974 normalize = 0;
975 else if (ifalse == 0 && exact_log2 (itrue) >= 0
976 && (STORE_FLAG_VALUE == 1
977 || if_info->branch_cost >= 2))
978 normalize = 1;
979 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
980 && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
981 normalize = 1, reversep = 1;
982 else if (itrue == -1
983 && (STORE_FLAG_VALUE == -1
984 || if_info->branch_cost >= 2))
985 normalize = -1;
986 else if (ifalse == -1 && can_reverse
987 && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
988 normalize = -1, reversep = 1;
989 else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
990 || if_info->branch_cost >= 3)
991 normalize = -1;
992 else
993 return FALSE;
994
995 if (reversep)
996 {
997 tmp = itrue; itrue = ifalse; ifalse = tmp;
998 diff = trunc_int_for_mode (-diff, mode);
999 }
1000
1001 start_sequence ();
1002 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1003 if (! target)
1004 {
1005 end_sequence ();
1006 return FALSE;
1007 }
1008
1009 /* if (test) x = 3; else x = 4;
1010 => x = 3 + (test == 0); */
1011 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1012 {
1013 target = expand_simple_binop (mode,
1014 (diff == STORE_FLAG_VALUE
1015 ? PLUS : MINUS),
1016 GEN_INT (ifalse), target, if_info->x, 0,
1017 OPTAB_WIDEN);
1018 }
1019
1020 /* if (test) x = 8; else x = 0;
1021 => x = (test != 0) << 3; */
1022 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1023 {
1024 target = expand_simple_binop (mode, ASHIFT,
1025 target, GEN_INT (tmp), if_info->x, 0,
1026 OPTAB_WIDEN);
1027 }
1028
1029 /* if (test) x = -1; else x = b;
1030 => x = -(test != 0) | b; */
1031 else if (itrue == -1)
1032 {
1033 target = expand_simple_binop (mode, IOR,
1034 target, GEN_INT (ifalse), if_info->x, 0,
1035 OPTAB_WIDEN);
1036 }
1037
1038 /* if (test) x = a; else x = b;
1039 => x = (-(test != 0) & (b - a)) + a; */
1040 else
1041 {
1042 target = expand_simple_binop (mode, AND,
1043 target, GEN_INT (diff), if_info->x, 0,
1044 OPTAB_WIDEN);
1045 if (target)
1046 target = expand_simple_binop (mode, PLUS,
1047 target, GEN_INT (ifalse),
1048 if_info->x, 0, OPTAB_WIDEN);
1049 }
1050
1051 if (! target)
1052 {
1053 end_sequence ();
1054 return FALSE;
1055 }
1056
1057 if (target != if_info->x)
1058 noce_emit_move_insn (if_info->x, target);
1059
1060 seq = end_ifcvt_sequence (if_info);
1061 if (!seq)
1062 return FALSE;
1063
1064 emit_insn_before_setloc (seq, if_info->jump,
1065 INSN_LOCATOR (if_info->insn_a));
1066 return TRUE;
1067 }
1068
1069 return FALSE;
1070 }
1071
1072 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1073 similarly for "foo--". */
1074
1075 static int
1076 noce_try_addcc (struct noce_if_info *if_info)
1077 {
1078 rtx target, seq;
1079 int subtract, normalize;
1080
1081 if (GET_CODE (if_info->a) == PLUS
1082 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1083 && (reversed_comparison_code (if_info->cond, if_info->jump)
1084 != UNKNOWN))
1085 {
1086 rtx cond = if_info->cond;
1087 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1088
1089 /* First try to use addcc pattern. */
1090 if (general_operand (XEXP (cond, 0), VOIDmode)
1091 && general_operand (XEXP (cond, 1), VOIDmode))
1092 {
1093 start_sequence ();
1094 target = emit_conditional_add (if_info->x, code,
1095 XEXP (cond, 0),
1096 XEXP (cond, 1),
1097 VOIDmode,
1098 if_info->b,
1099 XEXP (if_info->a, 1),
1100 GET_MODE (if_info->x),
1101 (code == LTU || code == GEU
1102 || code == LEU || code == GTU));
1103 if (target)
1104 {
1105 if (target != if_info->x)
1106 noce_emit_move_insn (if_info->x, target);
1107
1108 seq = end_ifcvt_sequence (if_info);
1109 if (!seq)
1110 return FALSE;
1111
1112 emit_insn_before_setloc (seq, if_info->jump,
1113 INSN_LOCATOR (if_info->insn_a));
1114 return TRUE;
1115 }
1116 end_sequence ();
1117 }
1118
1119 /* If that fails, construct conditional increment or decrement using
1120 setcc. */
1121 if (if_info->branch_cost >= 2
1122 && (XEXP (if_info->a, 1) == const1_rtx
1123 || XEXP (if_info->a, 1) == constm1_rtx))
1124 {
1125 start_sequence ();
1126 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1127 subtract = 0, normalize = 0;
1128 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1129 subtract = 1, normalize = 0;
1130 else
1131 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1132
1133
1134 target = noce_emit_store_flag (if_info,
1135 gen_reg_rtx (GET_MODE (if_info->x)),
1136 1, normalize);
1137
1138 if (target)
1139 target = expand_simple_binop (GET_MODE (if_info->x),
1140 subtract ? MINUS : PLUS,
1141 if_info->b, target, if_info->x,
1142 0, OPTAB_WIDEN);
1143 if (target)
1144 {
1145 if (target != if_info->x)
1146 noce_emit_move_insn (if_info->x, target);
1147
1148 seq = end_ifcvt_sequence (if_info);
1149 if (!seq)
1150 return FALSE;
1151
1152 emit_insn_before_setloc (seq, if_info->jump,
1153 INSN_LOCATOR (if_info->insn_a));
1154 return TRUE;
1155 }
1156 end_sequence ();
1157 }
1158 }
1159
1160 return FALSE;
1161 }
1162
1163 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1164
1165 static int
1166 noce_try_store_flag_mask (struct noce_if_info *if_info)
1167 {
1168 rtx target, seq;
1169 int reversep;
1170
1171 reversep = 0;
1172 if ((if_info->branch_cost >= 2
1173 || STORE_FLAG_VALUE == -1)
1174 && ((if_info->a == const0_rtx
1175 && rtx_equal_p (if_info->b, if_info->x))
1176 || ((reversep = (reversed_comparison_code (if_info->cond,
1177 if_info->jump)
1178 != UNKNOWN))
1179 && if_info->b == const0_rtx
1180 && rtx_equal_p (if_info->a, if_info->x))))
1181 {
1182 start_sequence ();
1183 target = noce_emit_store_flag (if_info,
1184 gen_reg_rtx (GET_MODE (if_info->x)),
1185 reversep, -1);
1186 if (target)
1187 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1188 if_info->x,
1189 target, if_info->x, 0,
1190 OPTAB_WIDEN);
1191
1192 if (target)
1193 {
1194 if (target != if_info->x)
1195 noce_emit_move_insn (if_info->x, target);
1196
1197 seq = end_ifcvt_sequence (if_info);
1198 if (!seq)
1199 return FALSE;
1200
1201 emit_insn_before_setloc (seq, if_info->jump,
1202 INSN_LOCATOR (if_info->insn_a));
1203 return TRUE;
1204 }
1205
1206 end_sequence ();
1207 }
1208
1209 return FALSE;
1210 }
1211
1212 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1213
1214 static rtx
1215 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1216 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1217 {
1218 /* If earliest == jump, try to build the cmove insn directly.
1219 This is helpful when combine has created some complex condition
1220 (like for alpha's cmovlbs) that we can't hope to regenerate
1221 through the normal interface. */
1222
1223 if (if_info->cond_earliest == if_info->jump)
1224 {
1225 rtx tmp;
1226
1227 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1228 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1229 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1230
1231 start_sequence ();
1232 tmp = emit_insn (tmp);
1233
1234 if (recog_memoized (tmp) >= 0)
1235 {
1236 tmp = get_insns ();
1237 end_sequence ();
1238 emit_insn (tmp);
1239
1240 return x;
1241 }
1242
1243 end_sequence ();
1244 }
1245
1246 /* Don't even try if the comparison operands are weird. */
1247 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1248 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1249 return NULL_RTX;
1250
1251 #if HAVE_conditional_move
1252 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1253 vtrue, vfalse, GET_MODE (x),
1254 (code == LTU || code == GEU
1255 || code == LEU || code == GTU));
1256 #else
1257 /* We'll never get here, as noce_process_if_block doesn't call the
1258 functions involved. Ifdef code, however, should be discouraged
1259 because it leads to typos in the code not selected. However,
1260 emit_conditional_move won't exist either. */
1261 return NULL_RTX;
1262 #endif
1263 }
1264
1265 /* Try only simple constants and registers here. More complex cases
1266 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1267 has had a go at it. */
1268
1269 static int
1270 noce_try_cmove (struct noce_if_info *if_info)
1271 {
1272 enum rtx_code code;
1273 rtx target, seq;
1274
1275 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1276 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1277 {
1278 start_sequence ();
1279
1280 code = GET_CODE (if_info->cond);
1281 target = noce_emit_cmove (if_info, if_info->x, code,
1282 XEXP (if_info->cond, 0),
1283 XEXP (if_info->cond, 1),
1284 if_info->a, if_info->b);
1285
1286 if (target)
1287 {
1288 if (target != if_info->x)
1289 noce_emit_move_insn (if_info->x, target);
1290
1291 seq = end_ifcvt_sequence (if_info);
1292 if (!seq)
1293 return FALSE;
1294
1295 emit_insn_before_setloc (seq, if_info->jump,
1296 INSN_LOCATOR (if_info->insn_a));
1297 return TRUE;
1298 }
1299 else
1300 {
1301 end_sequence ();
1302 return FALSE;
1303 }
1304 }
1305
1306 return FALSE;
1307 }
1308
1309 /* Try more complex cases involving conditional_move. */
1310
1311 static int
1312 noce_try_cmove_arith (struct noce_if_info *if_info)
1313 {
1314 rtx a = if_info->a;
1315 rtx b = if_info->b;
1316 rtx x = if_info->x;
1317 rtx orig_a, orig_b;
1318 rtx insn_a, insn_b;
1319 rtx tmp, target;
1320 int is_mem = 0;
1321 int insn_cost;
1322 enum rtx_code code;
1323
1324 /* A conditional move from two memory sources is equivalent to a
1325 conditional on their addresses followed by a load. Don't do this
1326 early because it'll screw alias analysis. Note that we've
1327 already checked for no side effects. */
1328 /* ??? FIXME: Magic number 5. */
1329 if (cse_not_expected
1330 && MEM_P (a) && MEM_P (b)
1331 && if_info->branch_cost >= 5)
1332 {
1333 a = XEXP (a, 0);
1334 b = XEXP (b, 0);
1335 x = gen_reg_rtx (Pmode);
1336 is_mem = 1;
1337 }
1338
1339 /* ??? We could handle this if we knew that a load from A or B could
1340 not fault. This is also true if we've already loaded
1341 from the address along the path from ENTRY. */
1342 else if (may_trap_p (a) || may_trap_p (b))
1343 return FALSE;
1344
1345 /* if (test) x = a + b; else x = c - d;
1346 => y = a + b;
1347 x = c - d;
1348 if (test)
1349 x = y;
1350 */
1351
1352 code = GET_CODE (if_info->cond);
1353 insn_a = if_info->insn_a;
1354 insn_b = if_info->insn_b;
1355
1356 /* Total insn_rtx_cost should be smaller than branch cost. Exit
1357 if insn_rtx_cost can't be estimated. */
1358 if (insn_a)
1359 {
1360 insn_cost = insn_rtx_cost (PATTERN (insn_a),
1361 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1362 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1363 return FALSE;
1364 }
1365 else
1366 insn_cost = 0;
1367
1368 if (insn_b)
1369 {
1370 insn_cost += insn_rtx_cost (PATTERN (insn_b),
1371 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1372 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1373 return FALSE;
1374 }
1375
1376 /* Possibly rearrange operands to make things come out more natural. */
1377 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1378 {
1379 int reversep = 0;
1380 if (rtx_equal_p (b, x))
1381 reversep = 1;
1382 else if (general_operand (b, GET_MODE (b)))
1383 reversep = 1;
1384
1385 if (reversep)
1386 {
1387 code = reversed_comparison_code (if_info->cond, if_info->jump);
1388 tmp = a, a = b, b = tmp;
1389 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1390 }
1391 }
1392
1393 start_sequence ();
1394
1395 orig_a = a;
1396 orig_b = b;
1397
1398 /* If either operand is complex, load it into a register first.
1399 The best way to do this is to copy the original insn. In this
1400 way we preserve any clobbers etc that the insn may have had.
1401 This is of course not possible in the IS_MEM case. */
1402 if (! general_operand (a, GET_MODE (a)))
1403 {
1404 rtx set;
1405
1406 if (is_mem)
1407 {
1408 tmp = gen_reg_rtx (GET_MODE (a));
1409 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1410 }
1411 else if (! insn_a)
1412 goto end_seq_and_fail;
1413 else
1414 {
1415 a = gen_reg_rtx (GET_MODE (a));
1416 tmp = copy_rtx (insn_a);
1417 set = single_set (tmp);
1418 SET_DEST (set) = a;
1419 tmp = emit_insn (PATTERN (tmp));
1420 }
1421 if (recog_memoized (tmp) < 0)
1422 goto end_seq_and_fail;
1423 }
1424 if (! general_operand (b, GET_MODE (b)))
1425 {
1426 rtx set, last;
1427
1428 if (is_mem)
1429 {
1430 tmp = gen_reg_rtx (GET_MODE (b));
1431 tmp = gen_rtx_SET (VOIDmode, tmp, b);
1432 }
1433 else if (! insn_b)
1434 goto end_seq_and_fail;
1435 else
1436 {
1437 b = gen_reg_rtx (GET_MODE (b));
1438 tmp = copy_rtx (insn_b);
1439 set = single_set (tmp);
1440 SET_DEST (set) = b;
1441 tmp = PATTERN (tmp);
1442 }
1443
1444 /* If insn to set up A clobbers any registers B depends on, try to
1445 swap insn that sets up A with the one that sets up B. If even
1446 that doesn't help, punt. */
1447 last = get_last_insn ();
1448 if (last && modified_in_p (orig_b, last))
1449 {
1450 tmp = emit_insn_before (tmp, get_insns ());
1451 if (modified_in_p (orig_a, tmp))
1452 goto end_seq_and_fail;
1453 }
1454 else
1455 tmp = emit_insn (tmp);
1456
1457 if (recog_memoized (tmp) < 0)
1458 goto end_seq_and_fail;
1459 }
1460
1461 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1462 XEXP (if_info->cond, 1), a, b);
1463
1464 if (! target)
1465 goto end_seq_and_fail;
1466
1467 /* If we're handling a memory for above, emit the load now. */
1468 if (is_mem)
1469 {
1470 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1471
1472 /* Copy over flags as appropriate. */
1473 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1474 MEM_VOLATILE_P (tmp) = 1;
1475 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1476 MEM_IN_STRUCT_P (tmp) = 1;
1477 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1478 MEM_SCALAR_P (tmp) = 1;
1479 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1480 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1481 set_mem_align (tmp,
1482 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1483
1484 noce_emit_move_insn (if_info->x, tmp);
1485 }
1486 else if (target != x)
1487 noce_emit_move_insn (x, target);
1488
1489 tmp = end_ifcvt_sequence (if_info);
1490 if (!tmp)
1491 return FALSE;
1492
1493 emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1494 return TRUE;
1495
1496 end_seq_and_fail:
1497 end_sequence ();
1498 return FALSE;
1499 }
1500
1501 /* For most cases, the simplified condition we found is the best
1502 choice, but this is not the case for the min/max/abs transforms.
1503 For these we wish to know that it is A or B in the condition. */
1504
1505 static rtx
1506 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1507 rtx *earliest)
1508 {
1509 rtx cond, set, insn;
1510 int reverse;
1511
1512 /* If target is already mentioned in the known condition, return it. */
1513 if (reg_mentioned_p (target, if_info->cond))
1514 {
1515 *earliest = if_info->cond_earliest;
1516 return if_info->cond;
1517 }
1518
1519 set = pc_set (if_info->jump);
1520 cond = XEXP (SET_SRC (set), 0);
1521 reverse
1522 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1523 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1524 if (if_info->then_else_reversed)
1525 reverse = !reverse;
1526
1527 /* If we're looking for a constant, try to make the conditional
1528 have that constant in it. There are two reasons why it may
1529 not have the constant we want:
1530
1531 1. GCC may have needed to put the constant in a register, because
1532 the target can't compare directly against that constant. For
1533 this case, we look for a SET immediately before the comparison
1534 that puts a constant in that register.
1535
1536 2. GCC may have canonicalized the conditional, for example
1537 replacing "if x < 4" with "if x <= 3". We can undo that (or
1538 make equivalent types of changes) to get the constants we need
1539 if they're off by one in the right direction. */
1540
1541 if (GET_CODE (target) == CONST_INT)
1542 {
1543 enum rtx_code code = GET_CODE (if_info->cond);
1544 rtx op_a = XEXP (if_info->cond, 0);
1545 rtx op_b = XEXP (if_info->cond, 1);
1546 rtx prev_insn;
1547
1548 /* First, look to see if we put a constant in a register. */
1549 prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1550 if (prev_insn
1551 && BLOCK_NUM (prev_insn) == BLOCK_NUM (if_info->cond_earliest)
1552 && INSN_P (prev_insn)
1553 && GET_CODE (PATTERN (prev_insn)) == SET)
1554 {
1555 rtx src = find_reg_equal_equiv_note (prev_insn);
1556 if (!src)
1557 src = SET_SRC (PATTERN (prev_insn));
1558 if (GET_CODE (src) == CONST_INT)
1559 {
1560 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1561 op_a = src;
1562 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1563 op_b = src;
1564
1565 if (GET_CODE (op_a) == CONST_INT)
1566 {
1567 rtx tmp = op_a;
1568 op_a = op_b;
1569 op_b = tmp;
1570 code = swap_condition (code);
1571 }
1572 }
1573 }
1574
1575 /* Now, look to see if we can get the right constant by
1576 adjusting the conditional. */
1577 if (GET_CODE (op_b) == CONST_INT)
1578 {
1579 HOST_WIDE_INT desired_val = INTVAL (target);
1580 HOST_WIDE_INT actual_val = INTVAL (op_b);
1581
1582 switch (code)
1583 {
1584 case LT:
1585 if (actual_val == desired_val + 1)
1586 {
1587 code = LE;
1588 op_b = GEN_INT (desired_val);
1589 }
1590 break;
1591 case LE:
1592 if (actual_val == desired_val - 1)
1593 {
1594 code = LT;
1595 op_b = GEN_INT (desired_val);
1596 }
1597 break;
1598 case GT:
1599 if (actual_val == desired_val - 1)
1600 {
1601 code = GE;
1602 op_b = GEN_INT (desired_val);
1603 }
1604 break;
1605 case GE:
1606 if (actual_val == desired_val + 1)
1607 {
1608 code = GT;
1609 op_b = GEN_INT (desired_val);
1610 }
1611 break;
1612 default:
1613 break;
1614 }
1615 }
1616
1617 /* If we made any changes, generate a new conditional that is
1618 equivalent to what we started with, but has the right
1619 constants in it. */
1620 if (code != GET_CODE (if_info->cond)
1621 || op_a != XEXP (if_info->cond, 0)
1622 || op_b != XEXP (if_info->cond, 1))
1623 {
1624 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1625 *earliest = if_info->cond_earliest;
1626 return cond;
1627 }
1628 }
1629
1630 cond = canonicalize_condition (if_info->jump, cond, reverse,
1631 earliest, target, false, true);
1632 if (! cond || ! reg_mentioned_p (target, cond))
1633 return NULL;
1634
1635 /* We almost certainly searched back to a different place.
1636 Need to re-verify correct lifetimes. */
1637
1638 /* X may not be mentioned in the range (cond_earliest, jump]. */
1639 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1640 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1641 return NULL;
1642
1643 /* A and B may not be modified in the range [cond_earliest, jump). */
1644 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1645 if (INSN_P (insn)
1646 && (modified_in_p (if_info->a, insn)
1647 || modified_in_p (if_info->b, insn)))
1648 return NULL;
1649
1650 return cond;
1651 }
1652
1653 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1654
1655 static int
1656 noce_try_minmax (struct noce_if_info *if_info)
1657 {
1658 rtx cond, earliest, target, seq;
1659 enum rtx_code code, op;
1660 int unsignedp;
1661
1662 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1663 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1664 to get the target to tell us... */
1665 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1666 || HONOR_NANS (GET_MODE (if_info->x)))
1667 return FALSE;
1668
1669 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1670 if (!cond)
1671 return FALSE;
1672
1673 /* Verify the condition is of the form we expect, and canonicalize
1674 the comparison code. */
1675 code = GET_CODE (cond);
1676 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1677 {
1678 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1679 return FALSE;
1680 }
1681 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1682 {
1683 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1684 return FALSE;
1685 code = swap_condition (code);
1686 }
1687 else
1688 return FALSE;
1689
1690 /* Determine what sort of operation this is. Note that the code is for
1691 a taken branch, so the code->operation mapping appears backwards. */
1692 switch (code)
1693 {
1694 case LT:
1695 case LE:
1696 case UNLT:
1697 case UNLE:
1698 op = SMAX;
1699 unsignedp = 0;
1700 break;
1701 case GT:
1702 case GE:
1703 case UNGT:
1704 case UNGE:
1705 op = SMIN;
1706 unsignedp = 0;
1707 break;
1708 case LTU:
1709 case LEU:
1710 op = UMAX;
1711 unsignedp = 1;
1712 break;
1713 case GTU:
1714 case GEU:
1715 op = UMIN;
1716 unsignedp = 1;
1717 break;
1718 default:
1719 return FALSE;
1720 }
1721
1722 start_sequence ();
1723
1724 target = expand_simple_binop (GET_MODE (if_info->x), op,
1725 if_info->a, if_info->b,
1726 if_info->x, unsignedp, OPTAB_WIDEN);
1727 if (! target)
1728 {
1729 end_sequence ();
1730 return FALSE;
1731 }
1732 if (target != if_info->x)
1733 noce_emit_move_insn (if_info->x, target);
1734
1735 seq = end_ifcvt_sequence (if_info);
1736 if (!seq)
1737 return FALSE;
1738
1739 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1740 if_info->cond = cond;
1741 if_info->cond_earliest = earliest;
1742
1743 return TRUE;
1744 }
1745
1746 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1747
1748 static int
1749 noce_try_abs (struct noce_if_info *if_info)
1750 {
1751 rtx cond, earliest, target, seq, a, b, c;
1752 int negate;
1753
1754 /* Reject modes with signed zeros. */
1755 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
1756 return FALSE;
1757
1758 /* Recognize A and B as constituting an ABS or NABS. The canonical
1759 form is a branch around the negation, taken when the object is the
1760 first operand of a comparison against 0 that evaluates to true. */
1761 a = if_info->a;
1762 b = if_info->b;
1763 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1764 negate = 0;
1765 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1766 {
1767 c = a; a = b; b = c;
1768 negate = 1;
1769 }
1770 else
1771 return FALSE;
1772
1773 cond = noce_get_alt_condition (if_info, b, &earliest);
1774 if (!cond)
1775 return FALSE;
1776
1777 /* Verify the condition is of the form we expect. */
1778 if (rtx_equal_p (XEXP (cond, 0), b))
1779 c = XEXP (cond, 1);
1780 else if (rtx_equal_p (XEXP (cond, 1), b))
1781 {
1782 c = XEXP (cond, 0);
1783 negate = !negate;
1784 }
1785 else
1786 return FALSE;
1787
1788 /* Verify that C is zero. Search one step backward for a
1789 REG_EQUAL note or a simple source if necessary. */
1790 if (REG_P (c))
1791 {
1792 rtx set, insn = prev_nonnote_insn (earliest);
1793 if (insn
1794 && BLOCK_NUM (insn) == BLOCK_NUM (earliest)
1795 && (set = single_set (insn))
1796 && rtx_equal_p (SET_DEST (set), c))
1797 {
1798 rtx note = find_reg_equal_equiv_note (insn);
1799 if (note)
1800 c = XEXP (note, 0);
1801 else
1802 c = SET_SRC (set);
1803 }
1804 else
1805 return FALSE;
1806 }
1807 if (MEM_P (c)
1808 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1809 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1810 c = get_pool_constant (XEXP (c, 0));
1811
1812 /* Work around funny ideas get_condition has wrt canonicalization.
1813 Note that these rtx constants are known to be CONST_INT, and
1814 therefore imply integer comparisons. */
1815 if (c == constm1_rtx && GET_CODE (cond) == GT)
1816 ;
1817 else if (c == const1_rtx && GET_CODE (cond) == LT)
1818 ;
1819 else if (c != CONST0_RTX (GET_MODE (b)))
1820 return FALSE;
1821
1822 /* Determine what sort of operation this is. */
1823 switch (GET_CODE (cond))
1824 {
1825 case LT:
1826 case LE:
1827 case UNLT:
1828 case UNLE:
1829 negate = !negate;
1830 break;
1831 case GT:
1832 case GE:
1833 case UNGT:
1834 case UNGE:
1835 break;
1836 default:
1837 return FALSE;
1838 }
1839
1840 start_sequence ();
1841
1842 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1843
1844 /* ??? It's a quandary whether cmove would be better here, especially
1845 for integers. Perhaps combine will clean things up. */
1846 if (target && negate)
1847 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1848
1849 if (! target)
1850 {
1851 end_sequence ();
1852 return FALSE;
1853 }
1854
1855 if (target != if_info->x)
1856 noce_emit_move_insn (if_info->x, target);
1857
1858 seq = end_ifcvt_sequence (if_info);
1859 if (!seq)
1860 return FALSE;
1861
1862 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1863 if_info->cond = cond;
1864 if_info->cond_earliest = earliest;
1865
1866 return TRUE;
1867 }
1868
1869 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
1870
1871 static int
1872 noce_try_sign_mask (struct noce_if_info *if_info)
1873 {
1874 rtx cond, t, m, c, seq;
1875 enum machine_mode mode;
1876 enum rtx_code code;
1877 bool t_unconditional;
1878
1879 cond = if_info->cond;
1880 code = GET_CODE (cond);
1881 m = XEXP (cond, 0);
1882 c = XEXP (cond, 1);
1883
1884 t = NULL_RTX;
1885 if (if_info->a == const0_rtx)
1886 {
1887 if ((code == LT && c == const0_rtx)
1888 || (code == LE && c == constm1_rtx))
1889 t = if_info->b;
1890 }
1891 else if (if_info->b == const0_rtx)
1892 {
1893 if ((code == GE && c == const0_rtx)
1894 || (code == GT && c == constm1_rtx))
1895 t = if_info->a;
1896 }
1897
1898 if (! t || side_effects_p (t))
1899 return FALSE;
1900
1901 /* We currently don't handle different modes. */
1902 mode = GET_MODE (t);
1903 if (GET_MODE (m) != mode)
1904 return FALSE;
1905
1906 /* This is only profitable if T is unconditionally executed/evaluated in the
1907 original insn sequence or T is cheap. The former happens if B is the
1908 non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
1909 INSN_B which can happen for e.g. conditional stores to memory. For the
1910 cost computation use the block TEST_BB where the evaluation will end up
1911 after the transformation. */
1912 t_unconditional =
1913 (t == if_info->b
1914 && (if_info->insn_b == NULL_RTX
1915 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
1916 if (!(t_unconditional
1917 || (rtx_cost (t, SET, optimize_bb_for_speed_p (if_info->test_bb))
1918 < COSTS_N_INSNS (2))))
1919 return FALSE;
1920
1921 start_sequence ();
1922 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1923 "(signed) m >> 31" directly. This benefits targets with specialized
1924 insns to obtain the signmask, but still uses ashr_optab otherwise. */
1925 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
1926 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
1927 : NULL_RTX;
1928
1929 if (!t)
1930 {
1931 end_sequence ();
1932 return FALSE;
1933 }
1934
1935 noce_emit_move_insn (if_info->x, t);
1936
1937 seq = end_ifcvt_sequence (if_info);
1938 if (!seq)
1939 return FALSE;
1940
1941 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1942 return TRUE;
1943 }
1944
1945
1946 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
1947 transformations. */
1948
1949 static int
1950 noce_try_bitop (struct noce_if_info *if_info)
1951 {
1952 rtx cond, x, a, result, seq;
1953 enum machine_mode mode;
1954 enum rtx_code code;
1955 int bitnum;
1956
1957 x = if_info->x;
1958 cond = if_info->cond;
1959 code = GET_CODE (cond);
1960
1961 /* Check for no else condition. */
1962 if (! rtx_equal_p (x, if_info->b))
1963 return FALSE;
1964
1965 /* Check for a suitable condition. */
1966 if (code != NE && code != EQ)
1967 return FALSE;
1968 if (XEXP (cond, 1) != const0_rtx)
1969 return FALSE;
1970 cond = XEXP (cond, 0);
1971
1972 /* ??? We could also handle AND here. */
1973 if (GET_CODE (cond) == ZERO_EXTRACT)
1974 {
1975 if (XEXP (cond, 1) != const1_rtx
1976 || GET_CODE (XEXP (cond, 2)) != CONST_INT
1977 || ! rtx_equal_p (x, XEXP (cond, 0)))
1978 return FALSE;
1979 bitnum = INTVAL (XEXP (cond, 2));
1980 mode = GET_MODE (x);
1981 if (BITS_BIG_ENDIAN)
1982 bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
1983 if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
1984 return FALSE;
1985 }
1986 else
1987 return FALSE;
1988
1989 a = if_info->a;
1990 if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
1991 {
1992 /* Check for "if (X & C) x = x op C". */
1993 if (! rtx_equal_p (x, XEXP (a, 0))
1994 || GET_CODE (XEXP (a, 1)) != CONST_INT
1995 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
1996 != (unsigned HOST_WIDE_INT) 1 << bitnum)
1997 return FALSE;
1998
1999 /* if ((x & C) == 0) x |= C; is transformed to x |= C. */
2000 /* if ((x & C) != 0) x |= C; is transformed to nothing. */
2001 if (GET_CODE (a) == IOR)
2002 result = (code == NE) ? a : NULL_RTX;
2003 else if (code == NE)
2004 {
2005 /* if ((x & C) == 0) x ^= C; is transformed to x |= C. */
2006 result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2007 result = simplify_gen_binary (IOR, mode, x, result);
2008 }
2009 else
2010 {
2011 /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C. */
2012 result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2013 result = simplify_gen_binary (AND, mode, x, result);
2014 }
2015 }
2016 else if (GET_CODE (a) == AND)
2017 {
2018 /* Check for "if (X & C) x &= ~C". */
2019 if (! rtx_equal_p (x, XEXP (a, 0))
2020 || GET_CODE (XEXP (a, 1)) != CONST_INT
2021 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2022 != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2023 return FALSE;
2024
2025 /* if ((x & C) == 0) x &= ~C; is transformed to nothing. */
2026 /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C. */
2027 result = (code == EQ) ? a : NULL_RTX;
2028 }
2029 else
2030 return FALSE;
2031
2032 if (result)
2033 {
2034 start_sequence ();
2035 noce_emit_move_insn (x, result);
2036 seq = end_ifcvt_sequence (if_info);
2037 if (!seq)
2038 return FALSE;
2039
2040 emit_insn_before_setloc (seq, if_info->jump,
2041 INSN_LOCATOR (if_info->insn_a));
2042 }
2043 return TRUE;
2044 }
2045
2046
2047 /* Similar to get_condition, only the resulting condition must be
2048 valid at JUMP, instead of at EARLIEST.
2049
2050 If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2051 THEN block of the caller, and we have to reverse the condition. */
2052
2053 static rtx
2054 noce_get_condition (rtx jump, rtx *earliest, bool then_else_reversed)
2055 {
2056 rtx cond, set, tmp;
2057 bool reverse;
2058
2059 if (! any_condjump_p (jump))
2060 return NULL_RTX;
2061
2062 set = pc_set (jump);
2063
2064 /* If this branches to JUMP_LABEL when the condition is false,
2065 reverse the condition. */
2066 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2067 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
2068
2069 /* We may have to reverse because the caller's if block is not canonical,
2070 i.e. the THEN block isn't the fallthrough block for the TEST block
2071 (see find_if_header). */
2072 if (then_else_reversed)
2073 reverse = !reverse;
2074
2075 /* If the condition variable is a register and is MODE_INT, accept it. */
2076
2077 cond = XEXP (SET_SRC (set), 0);
2078 tmp = XEXP (cond, 0);
2079 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
2080 {
2081 *earliest = jump;
2082
2083 if (reverse)
2084 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2085 GET_MODE (cond), tmp, XEXP (cond, 1));
2086 return cond;
2087 }
2088
2089 /* Otherwise, fall back on canonicalize_condition to do the dirty
2090 work of manipulating MODE_CC values and COMPARE rtx codes. */
2091 return canonicalize_condition (jump, cond, reverse, earliest,
2092 NULL_RTX, false, true);
2093 }
2094
2095 /* Return true if OP is ok for if-then-else processing. */
2096
2097 static int
2098 noce_operand_ok (const_rtx op)
2099 {
2100 /* We special-case memories, so handle any of them with
2101 no address side effects. */
2102 if (MEM_P (op))
2103 return ! side_effects_p (XEXP (op, 0));
2104
2105 if (side_effects_p (op))
2106 return FALSE;
2107
2108 return ! may_trap_p (op);
2109 }
2110
2111 /* Return true if a write into MEM may trap or fault. */
2112
2113 static bool
2114 noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2115 {
2116 rtx addr;
2117
2118 if (MEM_READONLY_P (mem))
2119 return true;
2120
2121 if (may_trap_or_fault_p (mem))
2122 return true;
2123
2124 addr = XEXP (mem, 0);
2125
2126 /* Call target hook to avoid the effects of -fpic etc.... */
2127 addr = targetm.delegitimize_address (addr);
2128
2129 while (addr)
2130 switch (GET_CODE (addr))
2131 {
2132 case CONST:
2133 case PRE_DEC:
2134 case PRE_INC:
2135 case POST_DEC:
2136 case POST_INC:
2137 case POST_MODIFY:
2138 addr = XEXP (addr, 0);
2139 break;
2140 case LO_SUM:
2141 case PRE_MODIFY:
2142 addr = XEXP (addr, 1);
2143 break;
2144 case PLUS:
2145 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
2146 addr = XEXP (addr, 0);
2147 else
2148 return false;
2149 break;
2150 case LABEL_REF:
2151 return true;
2152 case SYMBOL_REF:
2153 if (SYMBOL_REF_DECL (addr)
2154 && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2155 return true;
2156 return false;
2157 default:
2158 return false;
2159 }
2160
2161 return false;
2162 }
2163
2164 /* Return whether we can use store speculation for MEM. TOP_BB is the
2165 basic block above the conditional block where we are considering
2166 doing the speculative store. We look for whether MEM is set
2167 unconditionally later in the function. */
2168
2169 static bool
2170 noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2171 {
2172 basic_block dominator;
2173
2174 for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2175 dominator != NULL;
2176 dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2177 {
2178 rtx insn;
2179
2180 FOR_BB_INSNS (dominator, insn)
2181 {
2182 /* If we see something that might be a memory barrier, we
2183 have to stop looking. Even if the MEM is set later in
2184 the function, we still don't want to set it
2185 unconditionally before the barrier. */
2186 if (INSN_P (insn)
2187 && (volatile_insn_p (PATTERN (insn))
2188 || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2189 return false;
2190
2191 if (memory_modified_in_insn_p (mem, insn))
2192 return true;
2193 if (modified_in_p (XEXP (mem, 0), insn))
2194 return false;
2195
2196 }
2197 }
2198
2199 return false;
2200 }
2201
2202 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2203 it without using conditional execution. Return TRUE if we were successful
2204 at converting the block. */
2205
2206 static int
2207 noce_process_if_block (struct noce_if_info *if_info)
2208 {
2209 basic_block test_bb = if_info->test_bb; /* test block */
2210 basic_block then_bb = if_info->then_bb; /* THEN */
2211 basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
2212 basic_block join_bb = if_info->join_bb; /* JOIN */
2213 rtx jump = if_info->jump;
2214 rtx cond = if_info->cond;
2215 rtx insn_a, insn_b;
2216 rtx set_a, set_b;
2217 rtx orig_x, x, a, b;
2218
2219 /* We're looking for patterns of the form
2220
2221 (1) if (...) x = a; else x = b;
2222 (2) x = b; if (...) x = a;
2223 (3) if (...) x = a; // as if with an initial x = x.
2224
2225 The later patterns require jumps to be more expensive.
2226
2227 ??? For future expansion, look for multiple X in such patterns. */
2228
2229 /* Look for one of the potential sets. */
2230 insn_a = first_active_insn (then_bb);
2231 if (! insn_a
2232 || insn_a != last_active_insn (then_bb, FALSE)
2233 || (set_a = single_set (insn_a)) == NULL_RTX)
2234 return FALSE;
2235
2236 x = SET_DEST (set_a);
2237 a = SET_SRC (set_a);
2238
2239 /* Look for the other potential set. Make sure we've got equivalent
2240 destinations. */
2241 /* ??? This is overconservative. Storing to two different mems is
2242 as easy as conditionally computing the address. Storing to a
2243 single mem merely requires a scratch memory to use as one of the
2244 destination addresses; often the memory immediately below the
2245 stack pointer is available for this. */
2246 set_b = NULL_RTX;
2247 if (else_bb)
2248 {
2249 insn_b = first_active_insn (else_bb);
2250 if (! insn_b
2251 || insn_b != last_active_insn (else_bb, FALSE)
2252 || (set_b = single_set (insn_b)) == NULL_RTX
2253 || ! rtx_equal_p (x, SET_DEST (set_b)))
2254 return FALSE;
2255 }
2256 else
2257 {
2258 insn_b = prev_nonnote_insn (if_info->cond_earliest);
2259 /* We're going to be moving the evaluation of B down from above
2260 COND_EARLIEST to JUMP. Make sure the relevant data is still
2261 intact. */
2262 if (! insn_b
2263 || BLOCK_NUM (insn_b) != BLOCK_NUM (if_info->cond_earliest)
2264 || !NONJUMP_INSN_P (insn_b)
2265 || (set_b = single_set (insn_b)) == NULL_RTX
2266 || ! rtx_equal_p (x, SET_DEST (set_b))
2267 || ! noce_operand_ok (SET_SRC (set_b))
2268 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2269 || modified_between_p (SET_SRC (set_b),
2270 PREV_INSN (if_info->cond_earliest), jump)
2271 /* Likewise with X. In particular this can happen when
2272 noce_get_condition looks farther back in the instruction
2273 stream than one might expect. */
2274 || reg_overlap_mentioned_p (x, cond)
2275 || reg_overlap_mentioned_p (x, a)
2276 || modified_between_p (x, PREV_INSN (if_info->cond_earliest), jump))
2277 insn_b = set_b = NULL_RTX;
2278 }
2279
2280 /* If x has side effects then only the if-then-else form is safe to
2281 convert. But even in that case we would need to restore any notes
2282 (such as REG_INC) at then end. That can be tricky if
2283 noce_emit_move_insn expands to more than one insn, so disable the
2284 optimization entirely for now if there are side effects. */
2285 if (side_effects_p (x))
2286 return FALSE;
2287
2288 b = (set_b ? SET_SRC (set_b) : x);
2289
2290 /* Only operate on register destinations, and even then avoid extending
2291 the lifetime of hard registers on small register class machines. */
2292 orig_x = x;
2293 if (!REG_P (x)
2294 || (SMALL_REGISTER_CLASSES
2295 && REGNO (x) < FIRST_PSEUDO_REGISTER))
2296 {
2297 if (GET_MODE (x) == BLKmode)
2298 return FALSE;
2299
2300 if (GET_CODE (x) == ZERO_EXTRACT
2301 && (GET_CODE (XEXP (x, 1)) != CONST_INT
2302 || GET_CODE (XEXP (x, 2)) != CONST_INT))
2303 return FALSE;
2304
2305 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2306 ? XEXP (x, 0) : x));
2307 }
2308
2309 /* Don't operate on sources that may trap or are volatile. */
2310 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2311 return FALSE;
2312
2313 retry:
2314 /* Set up the info block for our subroutines. */
2315 if_info->insn_a = insn_a;
2316 if_info->insn_b = insn_b;
2317 if_info->x = x;
2318 if_info->a = a;
2319 if_info->b = b;
2320
2321 /* Try optimizations in some approximation of a useful order. */
2322 /* ??? Should first look to see if X is live incoming at all. If it
2323 isn't, we don't need anything but an unconditional set. */
2324
2325 /* Look and see if A and B are really the same. Avoid creating silly
2326 cmove constructs that no one will fix up later. */
2327 if (rtx_equal_p (a, b))
2328 {
2329 /* If we have an INSN_B, we don't have to create any new rtl. Just
2330 move the instruction that we already have. If we don't have an
2331 INSN_B, that means that A == X, and we've got a noop move. In
2332 that case don't do anything and let the code below delete INSN_A. */
2333 if (insn_b && else_bb)
2334 {
2335 rtx note;
2336
2337 if (else_bb && insn_b == BB_END (else_bb))
2338 BB_END (else_bb) = PREV_INSN (insn_b);
2339 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2340
2341 /* If there was a REG_EQUAL note, delete it since it may have been
2342 true due to this insn being after a jump. */
2343 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2344 remove_note (insn_b, note);
2345
2346 insn_b = NULL_RTX;
2347 }
2348 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2349 x must be executed twice. */
2350 else if (insn_b && side_effects_p (orig_x))
2351 return FALSE;
2352
2353 x = orig_x;
2354 goto success;
2355 }
2356
2357 if (!set_b && MEM_P (orig_x))
2358 {
2359 /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2360 for optimizations if writing to x may trap or fault,
2361 i.e. it's a memory other than a static var or a stack slot,
2362 is misaligned on strict aligned machines or is read-only. If
2363 x is a read-only memory, then the program is valid only if we
2364 avoid the store into it. If there are stores on both the
2365 THEN and ELSE arms, then we can go ahead with the conversion;
2366 either the program is broken, or the condition is always
2367 false such that the other memory is selected. */
2368 if (noce_mem_write_may_trap_or_fault_p (orig_x))
2369 return FALSE;
2370
2371 /* Avoid store speculation: given "if (...) x = a" where x is a
2372 MEM, we only want to do the store if x is always set
2373 somewhere in the function. This avoids cases like
2374 if (pthread_mutex_trylock(mutex))
2375 ++global_variable;
2376 where we only want global_variable to be changed if the mutex
2377 is held. FIXME: This should ideally be expressed directly in
2378 RTL somehow. */
2379 if (!noce_can_store_speculate_p (test_bb, orig_x))
2380 return FALSE;
2381 }
2382
2383 if (noce_try_move (if_info))
2384 goto success;
2385 if (noce_try_store_flag (if_info))
2386 goto success;
2387 if (noce_try_bitop (if_info))
2388 goto success;
2389 if (noce_try_minmax (if_info))
2390 goto success;
2391 if (noce_try_abs (if_info))
2392 goto success;
2393 if (HAVE_conditional_move
2394 && noce_try_cmove (if_info))
2395 goto success;
2396 if (! HAVE_conditional_execution)
2397 {
2398 if (noce_try_store_flag_constants (if_info))
2399 goto success;
2400 if (noce_try_addcc (if_info))
2401 goto success;
2402 if (noce_try_store_flag_mask (if_info))
2403 goto success;
2404 if (HAVE_conditional_move
2405 && noce_try_cmove_arith (if_info))
2406 goto success;
2407 if (noce_try_sign_mask (if_info))
2408 goto success;
2409 }
2410
2411 if (!else_bb && set_b)
2412 {
2413 insn_b = set_b = NULL_RTX;
2414 b = orig_x;
2415 goto retry;
2416 }
2417
2418 return FALSE;
2419
2420 success:
2421
2422 /* If we used a temporary, fix it up now. */
2423 if (orig_x != x)
2424 {
2425 rtx seq;
2426
2427 start_sequence ();
2428 noce_emit_move_insn (orig_x, x);
2429 seq = get_insns ();
2430 set_used_flags (orig_x);
2431 unshare_all_rtl_in_chain (seq);
2432 end_sequence ();
2433
2434 emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATOR (insn_a));
2435 }
2436
2437 /* The original THEN and ELSE blocks may now be removed. The test block
2438 must now jump to the join block. If the test block and the join block
2439 can be merged, do so. */
2440 if (else_bb)
2441 {
2442 delete_basic_block (else_bb);
2443 num_true_changes++;
2444 }
2445 else
2446 remove_edge (find_edge (test_bb, join_bb));
2447
2448 remove_edge (find_edge (then_bb, join_bb));
2449 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2450 delete_basic_block (then_bb);
2451 num_true_changes++;
2452
2453 if (can_merge_blocks_p (test_bb, join_bb))
2454 {
2455 merge_blocks (test_bb, join_bb);
2456 num_true_changes++;
2457 }
2458
2459 num_updated_if_blocks++;
2460 return TRUE;
2461 }
2462
2463 /* Check whether a block is suitable for conditional move conversion.
2464 Every insn must be a simple set of a register to a constant or a
2465 register. For each assignment, store the value in the array VALS,
2466 indexed by register number, then store the register number in
2467 REGS. COND is the condition we will test. */
2468
2469 static int
2470 check_cond_move_block (basic_block bb, rtx *vals, VEC (int, heap) **regs, rtx cond)
2471 {
2472 rtx insn;
2473
2474 /* We can only handle simple jumps at the end of the basic block.
2475 It is almost impossible to update the CFG otherwise. */
2476 insn = BB_END (bb);
2477 if (JUMP_P (insn) && !onlyjump_p (insn))
2478 return FALSE;
2479
2480 FOR_BB_INSNS (bb, insn)
2481 {
2482 rtx set, dest, src;
2483
2484 if (!INSN_P (insn) || JUMP_P (insn))
2485 continue;
2486 set = single_set (insn);
2487 if (!set)
2488 return FALSE;
2489
2490 dest = SET_DEST (set);
2491 src = SET_SRC (set);
2492 if (!REG_P (dest)
2493 || (SMALL_REGISTER_CLASSES && HARD_REGISTER_P (dest)))
2494 return FALSE;
2495
2496 if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2497 return FALSE;
2498
2499 if (side_effects_p (src) || side_effects_p (dest))
2500 return FALSE;
2501
2502 if (may_trap_p (src) || may_trap_p (dest))
2503 return FALSE;
2504
2505 /* Don't try to handle this if the source register was
2506 modified earlier in the block. */
2507 if ((REG_P (src)
2508 && vals[REGNO (src)] != NULL)
2509 || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2510 && vals[REGNO (SUBREG_REG (src))] != NULL))
2511 return FALSE;
2512
2513 /* Don't try to handle this if the destination register was
2514 modified earlier in the block. */
2515 if (vals[REGNO (dest)] != NULL)
2516 return FALSE;
2517
2518 /* Don't try to handle this if the condition uses the
2519 destination register. */
2520 if (reg_overlap_mentioned_p (dest, cond))
2521 return FALSE;
2522
2523 /* Don't try to handle this if the source register is modified
2524 later in the block. */
2525 if (!CONSTANT_P (src)
2526 && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2527 return FALSE;
2528
2529 vals[REGNO (dest)] = src;
2530
2531 VEC_safe_push (int, heap, *regs, REGNO (dest));
2532 }
2533
2534 return TRUE;
2535 }
2536
2537 /* Given a basic block BB suitable for conditional move conversion,
2538 a condition COND, and arrays THEN_VALS and ELSE_VALS containing the
2539 register values depending on COND, emit the insns in the block as
2540 conditional moves. If ELSE_BLOCK is true, THEN_BB was already
2541 processed. The caller has started a sequence for the conversion.
2542 Return true if successful, false if something goes wrong. */
2543
2544 static bool
2545 cond_move_convert_if_block (struct noce_if_info *if_infop,
2546 basic_block bb, rtx cond,
2547 rtx *then_vals, rtx *else_vals,
2548 bool else_block_p)
2549 {
2550 enum rtx_code code;
2551 rtx insn, cond_arg0, cond_arg1;
2552
2553 code = GET_CODE (cond);
2554 cond_arg0 = XEXP (cond, 0);
2555 cond_arg1 = XEXP (cond, 1);
2556
2557 FOR_BB_INSNS (bb, insn)
2558 {
2559 rtx set, target, dest, t, e;
2560 unsigned int regno;
2561
2562 if (!INSN_P (insn) || JUMP_P (insn))
2563 continue;
2564 set = single_set (insn);
2565 gcc_assert (set && REG_P (SET_DEST (set)));
2566
2567 dest = SET_DEST (set);
2568 regno = REGNO (dest);
2569
2570 t = then_vals[regno];
2571 e = else_vals[regno];
2572
2573 if (else_block_p)
2574 {
2575 /* If this register was set in the then block, we already
2576 handled this case there. */
2577 if (t)
2578 continue;
2579 t = dest;
2580 gcc_assert (e);
2581 }
2582 else
2583 {
2584 gcc_assert (t);
2585 if (!e)
2586 e = dest;
2587 }
2588
2589 target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2590 t, e);
2591 if (!target)
2592 return false;
2593
2594 if (target != dest)
2595 noce_emit_move_insn (dest, target);
2596 }
2597
2598 return true;
2599 }
2600
2601 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2602 it using only conditional moves. Return TRUE if we were successful at
2603 converting the block. */
2604
2605 static int
2606 cond_move_process_if_block (struct noce_if_info *if_info)
2607 {
2608 basic_block test_bb = if_info->test_bb;
2609 basic_block then_bb = if_info->then_bb;
2610 basic_block else_bb = if_info->else_bb;
2611 basic_block join_bb = if_info->join_bb;
2612 rtx jump = if_info->jump;
2613 rtx cond = if_info->cond;
2614 rtx seq, loc_insn;
2615 int max_reg, size, c, reg;
2616 rtx *then_vals;
2617 rtx *else_vals;
2618 VEC (int, heap) *then_regs = NULL;
2619 VEC (int, heap) *else_regs = NULL;
2620 unsigned int i;
2621
2622 /* Build a mapping for each block to the value used for each
2623 register. */
2624 max_reg = max_reg_num ();
2625 size = (max_reg + 1) * sizeof (rtx);
2626 then_vals = (rtx *) alloca (size);
2627 else_vals = (rtx *) alloca (size);
2628 memset (then_vals, 0, size);
2629 memset (else_vals, 0, size);
2630
2631 /* Make sure the blocks are suitable. */
2632 if (!check_cond_move_block (then_bb, then_vals, &then_regs, cond)
2633 || (else_bb && !check_cond_move_block (else_bb, else_vals, &else_regs, cond)))
2634 {
2635 VEC_free (int, heap, then_regs);
2636 VEC_free (int, heap, else_regs);
2637 return FALSE;
2638 }
2639
2640 /* Make sure the blocks can be used together. If the same register
2641 is set in both blocks, and is not set to a constant in both
2642 cases, then both blocks must set it to the same register. We
2643 have already verified that if it is set to a register, that the
2644 source register does not change after the assignment. Also count
2645 the number of registers set in only one of the blocks. */
2646 c = 0;
2647 for (i = 0; VEC_iterate (int, then_regs, i, reg); i++)
2648 {
2649 if (!then_vals[reg] && !else_vals[reg])
2650 continue;
2651
2652 if (!else_vals[reg])
2653 ++c;
2654 else
2655 {
2656 if (!CONSTANT_P (then_vals[reg])
2657 && !CONSTANT_P (else_vals[reg])
2658 && !rtx_equal_p (then_vals[reg], else_vals[reg]))
2659 {
2660 VEC_free (int, heap, then_regs);
2661 VEC_free (int, heap, else_regs);
2662 return FALSE;
2663 }
2664 }
2665 }
2666
2667 /* Finish off c for MAX_CONDITIONAL_EXECUTE. */
2668 for (i = 0; VEC_iterate (int, else_regs, i, reg); ++i)
2669 if (!then_vals[reg])
2670 ++c;
2671
2672 /* Make sure it is reasonable to convert this block. What matters
2673 is the number of assignments currently made in only one of the
2674 branches, since if we convert we are going to always execute
2675 them. */
2676 if (c > MAX_CONDITIONAL_EXECUTE)
2677 {
2678 VEC_free (int, heap, then_regs);
2679 VEC_free (int, heap, else_regs);
2680 return FALSE;
2681 }
2682
2683 /* Try to emit the conditional moves. First do the then block,
2684 then do anything left in the else blocks. */
2685 start_sequence ();
2686 if (!cond_move_convert_if_block (if_info, then_bb, cond,
2687 then_vals, else_vals, false)
2688 || (else_bb
2689 && !cond_move_convert_if_block (if_info, else_bb, cond,
2690 then_vals, else_vals, true)))
2691 {
2692 end_sequence ();
2693 VEC_free (int, heap, then_regs);
2694 VEC_free (int, heap, else_regs);
2695 return FALSE;
2696 }
2697 seq = end_ifcvt_sequence (if_info);
2698 if (!seq)
2699 {
2700 VEC_free (int, heap, then_regs);
2701 VEC_free (int, heap, else_regs);
2702 return FALSE;
2703 }
2704
2705 loc_insn = first_active_insn (then_bb);
2706 if (!loc_insn)
2707 {
2708 loc_insn = first_active_insn (else_bb);
2709 gcc_assert (loc_insn);
2710 }
2711 emit_insn_before_setloc (seq, jump, INSN_LOCATOR (loc_insn));
2712
2713 if (else_bb)
2714 {
2715 delete_basic_block (else_bb);
2716 num_true_changes++;
2717 }
2718 else
2719 remove_edge (find_edge (test_bb, join_bb));
2720
2721 remove_edge (find_edge (then_bb, join_bb));
2722 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2723 delete_basic_block (then_bb);
2724 num_true_changes++;
2725
2726 if (can_merge_blocks_p (test_bb, join_bb))
2727 {
2728 merge_blocks (test_bb, join_bb);
2729 num_true_changes++;
2730 }
2731
2732 num_updated_if_blocks++;
2733
2734 VEC_free (int, heap, then_regs);
2735 VEC_free (int, heap, else_regs);
2736 return TRUE;
2737 }
2738
2739 \f
2740 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
2741 IF-THEN-ELSE-JOIN block.
2742
2743 If so, we'll try to convert the insns to not require the branch,
2744 using only transformations that do not require conditional execution.
2745
2746 Return TRUE if we were successful at converting the block. */
2747
2748 static int
2749 noce_find_if_block (basic_block test_bb,
2750 edge then_edge, edge else_edge,
2751 int pass)
2752 {
2753 basic_block then_bb, else_bb, join_bb;
2754 bool then_else_reversed = false;
2755 rtx jump, cond;
2756 rtx cond_earliest;
2757 struct noce_if_info if_info;
2758
2759 /* We only ever should get here before reload. */
2760 gcc_assert (!reload_completed);
2761
2762 /* Recognize an IF-THEN-ELSE-JOIN block. */
2763 if (single_pred_p (then_edge->dest)
2764 && single_succ_p (then_edge->dest)
2765 && single_pred_p (else_edge->dest)
2766 && single_succ_p (else_edge->dest)
2767 && single_succ (then_edge->dest) == single_succ (else_edge->dest))
2768 {
2769 then_bb = then_edge->dest;
2770 else_bb = else_edge->dest;
2771 join_bb = single_succ (then_bb);
2772 }
2773 /* Recognize an IF-THEN-JOIN block. */
2774 else if (single_pred_p (then_edge->dest)
2775 && single_succ_p (then_edge->dest)
2776 && single_succ (then_edge->dest) == else_edge->dest)
2777 {
2778 then_bb = then_edge->dest;
2779 else_bb = NULL_BLOCK;
2780 join_bb = else_edge->dest;
2781 }
2782 /* Recognize an IF-ELSE-JOIN block. We can have those because the order
2783 of basic blocks in cfglayout mode does not matter, so the fallthrough
2784 edge can go to any basic block (and not just to bb->next_bb, like in
2785 cfgrtl mode). */
2786 else if (single_pred_p (else_edge->dest)
2787 && single_succ_p (else_edge->dest)
2788 && single_succ (else_edge->dest) == then_edge->dest)
2789 {
2790 /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
2791 To make this work, we have to invert the THEN and ELSE blocks
2792 and reverse the jump condition. */
2793 then_bb = else_edge->dest;
2794 else_bb = NULL_BLOCK;
2795 join_bb = single_succ (then_bb);
2796 then_else_reversed = true;
2797 }
2798 else
2799 /* Not a form we can handle. */
2800 return FALSE;
2801
2802 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
2803 if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
2804 return FALSE;
2805 if (else_bb
2806 && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
2807 return FALSE;
2808
2809 num_possible_if_blocks++;
2810
2811 if (dump_file)
2812 {
2813 fprintf (dump_file,
2814 "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
2815 (else_bb) ? "-ELSE" : "",
2816 pass, test_bb->index, then_bb->index);
2817
2818 if (else_bb)
2819 fprintf (dump_file, ", else %d", else_bb->index);
2820
2821 fprintf (dump_file, ", join %d\n", join_bb->index);
2822 }
2823
2824 /* If the conditional jump is more than just a conditional
2825 jump, then we can not do if-conversion on this block. */
2826 jump = BB_END (test_bb);
2827 if (! onlyjump_p (jump))
2828 return FALSE;
2829
2830 /* If this is not a standard conditional jump, we can't parse it. */
2831 cond = noce_get_condition (jump,
2832 &cond_earliest,
2833 then_else_reversed);
2834 if (!cond)
2835 return FALSE;
2836
2837 /* We must be comparing objects whose modes imply the size. */
2838 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2839 return FALSE;
2840
2841 /* Initialize an IF_INFO struct to pass around. */
2842 memset (&if_info, 0, sizeof if_info);
2843 if_info.test_bb = test_bb;
2844 if_info.then_bb = then_bb;
2845 if_info.else_bb = else_bb;
2846 if_info.join_bb = join_bb;
2847 if_info.cond = cond;
2848 if_info.cond_earliest = cond_earliest;
2849 if_info.jump = jump;
2850 if_info.then_else_reversed = then_else_reversed;
2851 if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
2852 predictable_edge_p (then_edge));
2853
2854 /* Do the real work. */
2855
2856 if (noce_process_if_block (&if_info))
2857 return TRUE;
2858
2859 if (HAVE_conditional_move
2860 && cond_move_process_if_block (&if_info))
2861 return TRUE;
2862
2863 return FALSE;
2864 }
2865 \f
2866
2867 /* Merge the blocks and mark for local life update. */
2868
2869 static void
2870 merge_if_block (struct ce_if_block * ce_info)
2871 {
2872 basic_block test_bb = ce_info->test_bb; /* last test block */
2873 basic_block then_bb = ce_info->then_bb; /* THEN */
2874 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
2875 basic_block join_bb = ce_info->join_bb; /* join block */
2876 basic_block combo_bb;
2877
2878 /* All block merging is done into the lower block numbers. */
2879
2880 combo_bb = test_bb;
2881 df_set_bb_dirty (test_bb);
2882
2883 /* Merge any basic blocks to handle && and || subtests. Each of
2884 the blocks are on the fallthru path from the predecessor block. */
2885 if (ce_info->num_multiple_test_blocks > 0)
2886 {
2887 basic_block bb = test_bb;
2888 basic_block last_test_bb = ce_info->last_test_bb;
2889 basic_block fallthru = block_fallthru (bb);
2890
2891 do
2892 {
2893 bb = fallthru;
2894 fallthru = block_fallthru (bb);
2895 merge_blocks (combo_bb, bb);
2896 num_true_changes++;
2897 }
2898 while (bb != last_test_bb);
2899 }
2900
2901 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2902 label, but it might if there were || tests. That label's count should be
2903 zero, and it normally should be removed. */
2904
2905 if (then_bb)
2906 {
2907 merge_blocks (combo_bb, then_bb);
2908 num_true_changes++;
2909 }
2910
2911 /* The ELSE block, if it existed, had a label. That label count
2912 will almost always be zero, but odd things can happen when labels
2913 get their addresses taken. */
2914 if (else_bb)
2915 {
2916 merge_blocks (combo_bb, else_bb);
2917 num_true_changes++;
2918 }
2919
2920 /* If there was no join block reported, that means it was not adjacent
2921 to the others, and so we cannot merge them. */
2922
2923 if (! join_bb)
2924 {
2925 rtx last = BB_END (combo_bb);
2926
2927 /* The outgoing edge for the current COMBO block should already
2928 be correct. Verify this. */
2929 if (EDGE_COUNT (combo_bb->succs) == 0)
2930 gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
2931 || (NONJUMP_INSN_P (last)
2932 && GET_CODE (PATTERN (last)) == TRAP_IF
2933 && (TRAP_CONDITION (PATTERN (last))
2934 == const_true_rtx)));
2935
2936 else
2937 /* There should still be something at the end of the THEN or ELSE
2938 blocks taking us to our final destination. */
2939 gcc_assert (JUMP_P (last)
2940 || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
2941 && CALL_P (last)
2942 && SIBLING_CALL_P (last))
2943 || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
2944 && can_throw_internal (last)));
2945 }
2946
2947 /* The JOIN block may have had quite a number of other predecessors too.
2948 Since we've already merged the TEST, THEN and ELSE blocks, we should
2949 have only one remaining edge from our if-then-else diamond. If there
2950 is more than one remaining edge, it must come from elsewhere. There
2951 may be zero incoming edges if the THEN block didn't actually join
2952 back up (as with a call to a non-return function). */
2953 else if (EDGE_COUNT (join_bb->preds) < 2
2954 && join_bb != EXIT_BLOCK_PTR)
2955 {
2956 /* We can merge the JOIN cleanly and update the dataflow try
2957 again on this pass.*/
2958 merge_blocks (combo_bb, join_bb);
2959 num_true_changes++;
2960 }
2961 else
2962 {
2963 /* We cannot merge the JOIN. */
2964
2965 /* The outgoing edge for the current COMBO block should already
2966 be correct. Verify this. */
2967 gcc_assert (single_succ_p (combo_bb)
2968 && single_succ (combo_bb) == join_bb);
2969
2970 /* Remove the jump and cruft from the end of the COMBO block. */
2971 if (join_bb != EXIT_BLOCK_PTR)
2972 tidy_fallthru_edge (single_succ_edge (combo_bb));
2973 }
2974
2975 num_updated_if_blocks++;
2976 }
2977 \f
2978 /* Find a block ending in a simple IF condition and try to transform it
2979 in some way. When converting a multi-block condition, put the new code
2980 in the first such block and delete the rest. Return a pointer to this
2981 first block if some transformation was done. Return NULL otherwise. */
2982
2983 static basic_block
2984 find_if_header (basic_block test_bb, int pass)
2985 {
2986 ce_if_block_t ce_info;
2987 edge then_edge;
2988 edge else_edge;
2989
2990 /* The kind of block we're looking for has exactly two successors. */
2991 if (EDGE_COUNT (test_bb->succs) != 2)
2992 return NULL;
2993
2994 then_edge = EDGE_SUCC (test_bb, 0);
2995 else_edge = EDGE_SUCC (test_bb, 1);
2996
2997 if (df_get_bb_dirty (then_edge->dest))
2998 return NULL;
2999 if (df_get_bb_dirty (else_edge->dest))
3000 return NULL;
3001
3002 /* Neither edge should be abnormal. */
3003 if ((then_edge->flags & EDGE_COMPLEX)
3004 || (else_edge->flags & EDGE_COMPLEX))
3005 return NULL;
3006
3007 /* Nor exit the loop. */
3008 if ((then_edge->flags & EDGE_LOOP_EXIT)
3009 || (else_edge->flags & EDGE_LOOP_EXIT))
3010 return NULL;
3011
3012 /* The THEN edge is canonically the one that falls through. */
3013 if (then_edge->flags & EDGE_FALLTHRU)
3014 ;
3015 else if (else_edge->flags & EDGE_FALLTHRU)
3016 {
3017 edge e = else_edge;
3018 else_edge = then_edge;
3019 then_edge = e;
3020 }
3021 else
3022 /* Otherwise this must be a multiway branch of some sort. */
3023 return NULL;
3024
3025 memset (&ce_info, '\0', sizeof (ce_info));
3026 ce_info.test_bb = test_bb;
3027 ce_info.then_bb = then_edge->dest;
3028 ce_info.else_bb = else_edge->dest;
3029 ce_info.pass = pass;
3030
3031 #ifdef IFCVT_INIT_EXTRA_FIELDS
3032 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
3033 #endif
3034
3035 if (! reload_completed
3036 && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3037 goto success;
3038
3039 if (HAVE_conditional_execution && reload_completed
3040 && cond_exec_find_if_block (&ce_info))
3041 goto success;
3042
3043 if (HAVE_trap
3044 && optab_handler (ctrap_optab, word_mode)->insn_code != CODE_FOR_nothing
3045 && find_cond_trap (test_bb, then_edge, else_edge))
3046 goto success;
3047
3048 if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3049 && (! HAVE_conditional_execution || reload_completed))
3050 {
3051 if (find_if_case_1 (test_bb, then_edge, else_edge))
3052 goto success;
3053 if (find_if_case_2 (test_bb, then_edge, else_edge))
3054 goto success;
3055 }
3056
3057 return NULL;
3058
3059 success:
3060 if (dump_file)
3061 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3062 /* Set this so we continue looking. */
3063 cond_exec_changed_p = TRUE;
3064 return ce_info.test_bb;
3065 }
3066
3067 /* Return true if a block has two edges, one of which falls through to the next
3068 block, and the other jumps to a specific block, so that we can tell if the
3069 block is part of an && test or an || test. Returns either -1 or the number
3070 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
3071
3072 static int
3073 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3074 {
3075 edge cur_edge;
3076 int fallthru_p = FALSE;
3077 int jump_p = FALSE;
3078 rtx insn;
3079 rtx end;
3080 int n_insns = 0;
3081 edge_iterator ei;
3082
3083 if (!cur_bb || !target_bb)
3084 return -1;
3085
3086 /* If no edges, obviously it doesn't jump or fallthru. */
3087 if (EDGE_COUNT (cur_bb->succs) == 0)
3088 return FALSE;
3089
3090 FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3091 {
3092 if (cur_edge->flags & EDGE_COMPLEX)
3093 /* Anything complex isn't what we want. */
3094 return -1;
3095
3096 else if (cur_edge->flags & EDGE_FALLTHRU)
3097 fallthru_p = TRUE;
3098
3099 else if (cur_edge->dest == target_bb)
3100 jump_p = TRUE;
3101
3102 else
3103 return -1;
3104 }
3105
3106 if ((jump_p & fallthru_p) == 0)
3107 return -1;
3108
3109 /* Don't allow calls in the block, since this is used to group && and ||
3110 together for conditional execution support. ??? we should support
3111 conditional execution support across calls for IA-64 some day, but
3112 for now it makes the code simpler. */
3113 end = BB_END (cur_bb);
3114 insn = BB_HEAD (cur_bb);
3115
3116 while (insn != NULL_RTX)
3117 {
3118 if (CALL_P (insn))
3119 return -1;
3120
3121 if (INSN_P (insn)
3122 && !JUMP_P (insn)
3123 && GET_CODE (PATTERN (insn)) != USE
3124 && GET_CODE (PATTERN (insn)) != CLOBBER)
3125 n_insns++;
3126
3127 if (insn == end)
3128 break;
3129
3130 insn = NEXT_INSN (insn);
3131 }
3132
3133 return n_insns;
3134 }
3135
3136 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3137 block. If so, we'll try to convert the insns to not require the branch.
3138 Return TRUE if we were successful at converting the block. */
3139
3140 static int
3141 cond_exec_find_if_block (struct ce_if_block * ce_info)
3142 {
3143 basic_block test_bb = ce_info->test_bb;
3144 basic_block then_bb = ce_info->then_bb;
3145 basic_block else_bb = ce_info->else_bb;
3146 basic_block join_bb = NULL_BLOCK;
3147 edge cur_edge;
3148 basic_block next;
3149 edge_iterator ei;
3150
3151 ce_info->last_test_bb = test_bb;
3152
3153 /* We only ever should get here after reload,
3154 and only if we have conditional execution. */
3155 gcc_assert (HAVE_conditional_execution && reload_completed);
3156
3157 /* Discover if any fall through predecessors of the current test basic block
3158 were && tests (which jump to the else block) or || tests (which jump to
3159 the then block). */
3160 if (single_pred_p (test_bb)
3161 && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3162 {
3163 basic_block bb = single_pred (test_bb);
3164 basic_block target_bb;
3165 int max_insns = MAX_CONDITIONAL_EXECUTE;
3166 int n_insns;
3167
3168 /* Determine if the preceding block is an && or || block. */
3169 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3170 {
3171 ce_info->and_and_p = TRUE;
3172 target_bb = else_bb;
3173 }
3174 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3175 {
3176 ce_info->and_and_p = FALSE;
3177 target_bb = then_bb;
3178 }
3179 else
3180 target_bb = NULL_BLOCK;
3181
3182 if (target_bb && n_insns <= max_insns)
3183 {
3184 int total_insns = 0;
3185 int blocks = 0;
3186
3187 ce_info->last_test_bb = test_bb;
3188
3189 /* Found at least one && or || block, look for more. */
3190 do
3191 {
3192 ce_info->test_bb = test_bb = bb;
3193 total_insns += n_insns;
3194 blocks++;
3195
3196 if (!single_pred_p (bb))
3197 break;
3198
3199 bb = single_pred (bb);
3200 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3201 }
3202 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3203
3204 ce_info->num_multiple_test_blocks = blocks;
3205 ce_info->num_multiple_test_insns = total_insns;
3206
3207 if (ce_info->and_and_p)
3208 ce_info->num_and_and_blocks = blocks;
3209 else
3210 ce_info->num_or_or_blocks = blocks;
3211 }
3212 }
3213
3214 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3215 other than any || blocks which jump to the THEN block. */
3216 if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3217 return FALSE;
3218
3219 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3220 FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3221 {
3222 if (cur_edge->flags & EDGE_COMPLEX)
3223 return FALSE;
3224 }
3225
3226 FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3227 {
3228 if (cur_edge->flags & EDGE_COMPLEX)
3229 return FALSE;
3230 }
3231
3232 /* The THEN block of an IF-THEN combo must have zero or one successors. */
3233 if (EDGE_COUNT (then_bb->succs) > 0
3234 && (!single_succ_p (then_bb)
3235 || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3236 || (epilogue_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
3237 return FALSE;
3238
3239 /* If the THEN block has no successors, conditional execution can still
3240 make a conditional call. Don't do this unless the ELSE block has
3241 only one incoming edge -- the CFG manipulation is too ugly otherwise.
3242 Check for the last insn of the THEN block being an indirect jump, which
3243 is listed as not having any successors, but confuses the rest of the CE
3244 code processing. ??? we should fix this in the future. */
3245 if (EDGE_COUNT (then_bb->succs) == 0)
3246 {
3247 if (single_pred_p (else_bb))
3248 {
3249 rtx last_insn = BB_END (then_bb);
3250
3251 while (last_insn
3252 && NOTE_P (last_insn)
3253 && last_insn != BB_HEAD (then_bb))
3254 last_insn = PREV_INSN (last_insn);
3255
3256 if (last_insn
3257 && JUMP_P (last_insn)
3258 && ! simplejump_p (last_insn))
3259 return FALSE;
3260
3261 join_bb = else_bb;
3262 else_bb = NULL_BLOCK;
3263 }
3264 else
3265 return FALSE;
3266 }
3267
3268 /* If the THEN block's successor is the other edge out of the TEST block,
3269 then we have an IF-THEN combo without an ELSE. */
3270 else if (single_succ (then_bb) == else_bb)
3271 {
3272 join_bb = else_bb;
3273 else_bb = NULL_BLOCK;
3274 }
3275
3276 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3277 has exactly one predecessor and one successor, and the outgoing edge
3278 is not complex, then we have an IF-THEN-ELSE combo. */
3279 else if (single_succ_p (else_bb)
3280 && single_succ (then_bb) == single_succ (else_bb)
3281 && single_pred_p (else_bb)
3282 && ! (single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3283 && ! (epilogue_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
3284 join_bb = single_succ (else_bb);
3285
3286 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
3287 else
3288 return FALSE;
3289
3290 num_possible_if_blocks++;
3291
3292 if (dump_file)
3293 {
3294 fprintf (dump_file,
3295 "\nIF-THEN%s block found, pass %d, start block %d "
3296 "[insn %d], then %d [%d]",
3297 (else_bb) ? "-ELSE" : "",
3298 ce_info->pass,
3299 test_bb->index,
3300 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3301 then_bb->index,
3302 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3303
3304 if (else_bb)
3305 fprintf (dump_file, ", else %d [%d]",
3306 else_bb->index,
3307 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3308
3309 fprintf (dump_file, ", join %d [%d]",
3310 join_bb->index,
3311 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3312
3313 if (ce_info->num_multiple_test_blocks > 0)
3314 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3315 ce_info->num_multiple_test_blocks,
3316 (ce_info->and_and_p) ? "&&" : "||",
3317 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3318 ce_info->last_test_bb->index,
3319 ((BB_HEAD (ce_info->last_test_bb))
3320 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3321 : -1));
3322
3323 fputc ('\n', dump_file);
3324 }
3325
3326 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
3327 first condition for free, since we've already asserted that there's a
3328 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
3329 we checked the FALLTHRU flag, those are already adjacent to the last IF
3330 block. */
3331 /* ??? As an enhancement, move the ELSE block. Have to deal with
3332 BLOCK notes, if by no other means than backing out the merge if they
3333 exist. Sticky enough I don't want to think about it now. */
3334 next = then_bb;
3335 if (else_bb && (next = next->next_bb) != else_bb)
3336 return FALSE;
3337 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
3338 {
3339 if (else_bb)
3340 join_bb = NULL;
3341 else
3342 return FALSE;
3343 }
3344
3345 /* Do the real work. */
3346
3347 ce_info->else_bb = else_bb;
3348 ce_info->join_bb = join_bb;
3349
3350 /* If we have && and || tests, try to first handle combining the && and ||
3351 tests into the conditional code, and if that fails, go back and handle
3352 it without the && and ||, which at present handles the && case if there
3353 was no ELSE block. */
3354 if (cond_exec_process_if_block (ce_info, TRUE))
3355 return TRUE;
3356
3357 if (ce_info->num_multiple_test_blocks)
3358 {
3359 cancel_changes (0);
3360
3361 if (cond_exec_process_if_block (ce_info, FALSE))
3362 return TRUE;
3363 }
3364
3365 return FALSE;
3366 }
3367
3368 /* Convert a branch over a trap, or a branch
3369 to a trap, into a conditional trap. */
3370
3371 static int
3372 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3373 {
3374 basic_block then_bb = then_edge->dest;
3375 basic_block else_bb = else_edge->dest;
3376 basic_block other_bb, trap_bb;
3377 rtx trap, jump, cond, cond_earliest, seq;
3378 enum rtx_code code;
3379
3380 /* Locate the block with the trap instruction. */
3381 /* ??? While we look for no successors, we really ought to allow
3382 EH successors. Need to fix merge_if_block for that to work. */
3383 if ((trap = block_has_only_trap (then_bb)) != NULL)
3384 trap_bb = then_bb, other_bb = else_bb;
3385 else if ((trap = block_has_only_trap (else_bb)) != NULL)
3386 trap_bb = else_bb, other_bb = then_bb;
3387 else
3388 return FALSE;
3389
3390 if (dump_file)
3391 {
3392 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3393 test_bb->index, trap_bb->index);
3394 }
3395
3396 /* If this is not a standard conditional jump, we can't parse it. */
3397 jump = BB_END (test_bb);
3398 cond = noce_get_condition (jump, &cond_earliest, false);
3399 if (! cond)
3400 return FALSE;
3401
3402 /* If the conditional jump is more than just a conditional jump, then
3403 we can not do if-conversion on this block. */
3404 if (! onlyjump_p (jump))
3405 return FALSE;
3406
3407 /* We must be comparing objects whose modes imply the size. */
3408 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3409 return FALSE;
3410
3411 /* Reverse the comparison code, if necessary. */
3412 code = GET_CODE (cond);
3413 if (then_bb == trap_bb)
3414 {
3415 code = reversed_comparison_code (cond, jump);
3416 if (code == UNKNOWN)
3417 return FALSE;
3418 }
3419
3420 /* Attempt to generate the conditional trap. */
3421 seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3422 copy_rtx (XEXP (cond, 1)),
3423 TRAP_CODE (PATTERN (trap)));
3424 if (seq == NULL)
3425 return FALSE;
3426
3427 /* Emit the new insns before cond_earliest. */
3428 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
3429
3430 /* Delete the trap block if possible. */
3431 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3432 df_set_bb_dirty (test_bb);
3433 df_set_bb_dirty (then_bb);
3434 df_set_bb_dirty (else_bb);
3435
3436 if (EDGE_COUNT (trap_bb->preds) == 0)
3437 {
3438 delete_basic_block (trap_bb);
3439 num_true_changes++;
3440 }
3441
3442 /* Wire together the blocks again. */
3443 if (current_ir_type () == IR_RTL_CFGLAYOUT)
3444 single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3445 else
3446 {
3447 rtx lab, newjump;
3448
3449 lab = JUMP_LABEL (jump);
3450 newjump = emit_jump_insn_after (gen_jump (lab), jump);
3451 LABEL_NUSES (lab) += 1;
3452 JUMP_LABEL (newjump) = lab;
3453 emit_barrier_after (newjump);
3454 }
3455 delete_insn (jump);
3456
3457 if (can_merge_blocks_p (test_bb, other_bb))
3458 {
3459 merge_blocks (test_bb, other_bb);
3460 num_true_changes++;
3461 }
3462
3463 num_updated_if_blocks++;
3464 return TRUE;
3465 }
3466
3467 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
3468 return it. */
3469
3470 static rtx
3471 block_has_only_trap (basic_block bb)
3472 {
3473 rtx trap;
3474
3475 /* We're not the exit block. */
3476 if (bb == EXIT_BLOCK_PTR)
3477 return NULL_RTX;
3478
3479 /* The block must have no successors. */
3480 if (EDGE_COUNT (bb->succs) > 0)
3481 return NULL_RTX;
3482
3483 /* The only instruction in the THEN block must be the trap. */
3484 trap = first_active_insn (bb);
3485 if (! (trap == BB_END (bb)
3486 && GET_CODE (PATTERN (trap)) == TRAP_IF
3487 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3488 return NULL_RTX;
3489
3490 return trap;
3491 }
3492
3493 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3494 transformable, but not necessarily the other. There need be no
3495 JOIN block.
3496
3497 Return TRUE if we were successful at converting the block.
3498
3499 Cases we'd like to look at:
3500
3501 (1)
3502 if (test) goto over; // x not live
3503 x = a;
3504 goto label;
3505 over:
3506
3507 becomes
3508
3509 x = a;
3510 if (! test) goto label;
3511
3512 (2)
3513 if (test) goto E; // x not live
3514 x = big();
3515 goto L;
3516 E:
3517 x = b;
3518 goto M;
3519
3520 becomes
3521
3522 x = b;
3523 if (test) goto M;
3524 x = big();
3525 goto L;
3526
3527 (3) // This one's really only interesting for targets that can do
3528 // multiway branching, e.g. IA-64 BBB bundles. For other targets
3529 // it results in multiple branches on a cache line, which often
3530 // does not sit well with predictors.
3531
3532 if (test1) goto E; // predicted not taken
3533 x = a;
3534 if (test2) goto F;
3535 ...
3536 E:
3537 x = b;
3538 J:
3539
3540 becomes
3541
3542 x = a;
3543 if (test1) goto E;
3544 if (test2) goto F;
3545
3546 Notes:
3547
3548 (A) Don't do (2) if the branch is predicted against the block we're
3549 eliminating. Do it anyway if we can eliminate a branch; this requires
3550 that the sole successor of the eliminated block postdominate the other
3551 side of the if.
3552
3553 (B) With CE, on (3) we can steal from both sides of the if, creating
3554
3555 if (test1) x = a;
3556 if (!test1) x = b;
3557 if (test1) goto J;
3558 if (test2) goto F;
3559 ...
3560 J:
3561
3562 Again, this is most useful if J postdominates.
3563
3564 (C) CE substitutes for helpful life information.
3565
3566 (D) These heuristics need a lot of work. */
3567
3568 /* Tests for case 1 above. */
3569
3570 static int
3571 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3572 {
3573 basic_block then_bb = then_edge->dest;
3574 basic_block else_bb = else_edge->dest;
3575 basic_block new_bb;
3576 int then_bb_index;
3577
3578 /* If we are partitioning hot/cold basic blocks, we don't want to
3579 mess up unconditional or indirect jumps that cross between hot
3580 and cold sections.
3581
3582 Basic block partitioning may result in some jumps that appear to
3583 be optimizable (or blocks that appear to be mergeable), but which really
3584 must be left untouched (they are required to make it safely across
3585 partition boundaries). See the comments at the top of
3586 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3587
3588 if ((BB_END (then_bb)
3589 && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3590 || (BB_END (test_bb)
3591 && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3592 || (BB_END (else_bb)
3593 && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3594 NULL_RTX)))
3595 return FALSE;
3596
3597 /* THEN has one successor. */
3598 if (!single_succ_p (then_bb))
3599 return FALSE;
3600
3601 /* THEN does not fall through, but is not strange either. */
3602 if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3603 return FALSE;
3604
3605 /* THEN has one predecessor. */
3606 if (!single_pred_p (then_bb))
3607 return FALSE;
3608
3609 /* THEN must do something. */
3610 if (forwarder_block_p (then_bb))
3611 return FALSE;
3612
3613 num_possible_if_blocks++;
3614 if (dump_file)
3615 fprintf (dump_file,
3616 "\nIF-CASE-1 found, start %d, then %d\n",
3617 test_bb->index, then_bb->index);
3618
3619 /* THEN is small. */
3620 if (! cheap_bb_rtx_cost_p (then_bb,
3621 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
3622 predictable_edge_p (then_edge)))))
3623 return FALSE;
3624
3625 /* Registers set are dead, or are predicable. */
3626 if (! dead_or_predicable (test_bb, then_bb, else_bb,
3627 single_succ (then_bb), 1))
3628 return FALSE;
3629
3630 /* Conversion went ok, including moving the insns and fixing up the
3631 jump. Adjust the CFG to match. */
3632
3633 /* We can avoid creating a new basic block if then_bb is immediately
3634 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
3635 thru to else_bb. */
3636
3637 if (then_bb->next_bb == else_bb
3638 && then_bb->prev_bb == test_bb
3639 && else_bb != EXIT_BLOCK_PTR)
3640 {
3641 redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
3642 new_bb = 0;
3643 }
3644 else
3645 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
3646 else_bb);
3647
3648 df_set_bb_dirty (test_bb);
3649 df_set_bb_dirty (else_bb);
3650
3651 then_bb_index = then_bb->index;
3652 delete_basic_block (then_bb);
3653
3654 /* Make rest of code believe that the newly created block is the THEN_BB
3655 block we removed. */
3656 if (new_bb)
3657 {
3658 df_bb_replace (then_bb_index, new_bb);
3659 /* Since the fallthru edge was redirected from test_bb to new_bb,
3660 we need to ensure that new_bb is in the same partition as
3661 test bb (you can not fall through across section boundaries). */
3662 BB_COPY_PARTITION (new_bb, test_bb);
3663 }
3664
3665 num_true_changes++;
3666 num_updated_if_blocks++;
3667
3668 return TRUE;
3669 }
3670
3671 /* Test for case 2 above. */
3672
3673 static int
3674 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
3675 {
3676 basic_block then_bb = then_edge->dest;
3677 basic_block else_bb = else_edge->dest;
3678 edge else_succ;
3679 rtx note;
3680
3681 /* If we are partitioning hot/cold basic blocks, we don't want to
3682 mess up unconditional or indirect jumps that cross between hot
3683 and cold sections.
3684
3685 Basic block partitioning may result in some jumps that appear to
3686 be optimizable (or blocks that appear to be mergeable), but which really
3687 must be left untouched (they are required to make it safely across
3688 partition boundaries). See the comments at the top of
3689 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3690
3691 if ((BB_END (then_bb)
3692 && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3693 || (BB_END (test_bb)
3694 && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3695 || (BB_END (else_bb)
3696 && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3697 NULL_RTX)))
3698 return FALSE;
3699
3700 /* ELSE has one successor. */
3701 if (!single_succ_p (else_bb))
3702 return FALSE;
3703 else
3704 else_succ = single_succ_edge (else_bb);
3705
3706 /* ELSE outgoing edge is not complex. */
3707 if (else_succ->flags & EDGE_COMPLEX)
3708 return FALSE;
3709
3710 /* ELSE has one predecessor. */
3711 if (!single_pred_p (else_bb))
3712 return FALSE;
3713
3714 /* THEN is not EXIT. */
3715 if (then_bb->index < NUM_FIXED_BLOCKS)
3716 return FALSE;
3717
3718 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
3719 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
3720 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
3721 ;
3722 else if (else_succ->dest->index < NUM_FIXED_BLOCKS
3723 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3724 else_succ->dest))
3725 ;
3726 else
3727 return FALSE;
3728
3729 num_possible_if_blocks++;
3730 if (dump_file)
3731 fprintf (dump_file,
3732 "\nIF-CASE-2 found, start %d, else %d\n",
3733 test_bb->index, else_bb->index);
3734
3735 /* ELSE is small. */
3736 if (! cheap_bb_rtx_cost_p (else_bb,
3737 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
3738 predictable_edge_p (else_edge)))))
3739 return FALSE;
3740
3741 /* Registers set are dead, or are predicable. */
3742 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
3743 return FALSE;
3744
3745 /* Conversion went ok, including moving the insns and fixing up the
3746 jump. Adjust the CFG to match. */
3747
3748 df_set_bb_dirty (test_bb);
3749 df_set_bb_dirty (then_bb);
3750 delete_basic_block (else_bb);
3751
3752 num_true_changes++;
3753 num_updated_if_blocks++;
3754
3755 /* ??? We may now fallthru from one of THEN's successors into a join
3756 block. Rerun cleanup_cfg? Examine things manually? Wait? */
3757
3758 return TRUE;
3759 }
3760
3761 /* A subroutine of dead_or_predicable called through for_each_rtx.
3762 Return 1 if a memory is found. */
3763
3764 static int
3765 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3766 {
3767 return MEM_P (*px);
3768 }
3769
3770 /* Used by the code above to perform the actual rtl transformations.
3771 Return TRUE if successful.
3772
3773 TEST_BB is the block containing the conditional branch. MERGE_BB
3774 is the block containing the code to manipulate. NEW_DEST is the
3775 label TEST_BB should be branching to after the conversion.
3776 REVERSEP is true if the sense of the branch should be reversed. */
3777
3778 static int
3779 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
3780 basic_block other_bb, basic_block new_dest, int reversep)
3781 {
3782 rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3783
3784 jump = BB_END (test_bb);
3785
3786 /* Find the extent of the real code in the merge block. */
3787 head = BB_HEAD (merge_bb);
3788 end = BB_END (merge_bb);
3789
3790 /* If merge_bb ends with a tablejump, predicating/moving insn's
3791 into test_bb and then deleting merge_bb will result in the jumptable
3792 that follows merge_bb being removed along with merge_bb and then we
3793 get an unresolved reference to the jumptable. */
3794 if (tablejump_p (end, NULL, NULL))
3795 return FALSE;
3796
3797 if (LABEL_P (head))
3798 head = NEXT_INSN (head);
3799 if (NOTE_P (head))
3800 {
3801 if (head == end)
3802 {
3803 head = end = NULL_RTX;
3804 goto no_body;
3805 }
3806 head = NEXT_INSN (head);
3807 }
3808
3809 if (JUMP_P (end))
3810 {
3811 if (head == end)
3812 {
3813 head = end = NULL_RTX;
3814 goto no_body;
3815 }
3816 end = PREV_INSN (end);
3817 }
3818
3819 /* Disable handling dead code by conditional execution if the machine needs
3820 to do anything funny with the tests, etc. */
3821 #ifndef IFCVT_MODIFY_TESTS
3822 if (HAVE_conditional_execution)
3823 {
3824 /* In the conditional execution case, we have things easy. We know
3825 the condition is reversible. We don't have to check life info
3826 because we're going to conditionally execute the code anyway.
3827 All that's left is making sure the insns involved can actually
3828 be predicated. */
3829
3830 rtx cond, prob_val;
3831
3832 cond = cond_exec_get_condition (jump);
3833 if (! cond)
3834 return FALSE;
3835
3836 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3837 if (prob_val)
3838 prob_val = XEXP (prob_val, 0);
3839
3840 if (reversep)
3841 {
3842 enum rtx_code rev = reversed_comparison_code (cond, jump);
3843 if (rev == UNKNOWN)
3844 return FALSE;
3845 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3846 XEXP (cond, 1));
3847 if (prob_val)
3848 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3849 }
3850
3851 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
3852 prob_val, 0))
3853 goto cancel;
3854
3855 earliest = jump;
3856 }
3857 else
3858 #endif
3859 {
3860 /* In the non-conditional execution case, we have to verify that there
3861 are no trapping operations, no calls, no references to memory, and
3862 that any registers modified are dead at the branch site. */
3863
3864 rtx insn, cond, prev;
3865 bitmap merge_set, test_live, test_set;
3866 unsigned i, fail = 0;
3867 bitmap_iterator bi;
3868
3869 /* Check for no calls or trapping operations. */
3870 for (insn = head; ; insn = NEXT_INSN (insn))
3871 {
3872 if (CALL_P (insn))
3873 return FALSE;
3874 if (INSN_P (insn))
3875 {
3876 if (may_trap_p (PATTERN (insn)))
3877 return FALSE;
3878
3879 /* ??? Even non-trapping memories such as stack frame
3880 references must be avoided. For stores, we collect
3881 no lifetime info; for reads, we'd have to assert
3882 true_dependence false against every store in the
3883 TEST range. */
3884 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3885 return FALSE;
3886 }
3887 if (insn == end)
3888 break;
3889 }
3890
3891 if (! any_condjump_p (jump))
3892 return FALSE;
3893
3894 /* Find the extent of the conditional. */
3895 cond = noce_get_condition (jump, &earliest, false);
3896 if (! cond)
3897 return FALSE;
3898
3899 /* Collect:
3900 MERGE_SET = set of registers set in MERGE_BB
3901 TEST_LIVE = set of registers live at EARLIEST
3902 TEST_SET = set of registers set between EARLIEST and the
3903 end of the block. */
3904
3905 merge_set = BITMAP_ALLOC (&reg_obstack);
3906 test_live = BITMAP_ALLOC (&reg_obstack);
3907 test_set = BITMAP_ALLOC (&reg_obstack);
3908
3909 /* ??? bb->local_set is only valid during calculate_global_regs_live,
3910 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
3911 since we've already asserted that MERGE_BB is small. */
3912 /* If we allocated new pseudos (e.g. in the conditional move
3913 expander called from noce_emit_cmove), we must resize the
3914 array first. */
3915 if (max_regno < max_reg_num ())
3916 max_regno = max_reg_num ();
3917
3918 FOR_BB_INSNS (merge_bb, insn)
3919 {
3920 if (INSN_P (insn))
3921 {
3922 unsigned int uid = INSN_UID (insn);
3923 df_ref *def_rec;
3924 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3925 {
3926 df_ref def = *def_rec;
3927 bitmap_set_bit (merge_set, DF_REF_REGNO (def));
3928 }
3929 }
3930 }
3931
3932 /* For small register class machines, don't lengthen lifetimes of
3933 hard registers before reload. */
3934 if (SMALL_REGISTER_CLASSES && ! reload_completed)
3935 {
3936 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
3937 {
3938 if (i < FIRST_PSEUDO_REGISTER
3939 && ! fixed_regs[i]
3940 && ! global_regs[i])
3941 fail = 1;
3942 }
3943 }
3944
3945 /* For TEST, we're interested in a range of insns, not a whole block.
3946 Moreover, we're interested in the insns live from OTHER_BB. */
3947
3948 /* The loop below takes the set of live registers
3949 after JUMP, and calculates the live set before EARLIEST. */
3950 bitmap_copy (test_live, df_get_live_in (other_bb));
3951 df_simulate_initialize_backwards (test_bb, test_live);
3952 for (insn = jump; ; insn = prev)
3953 {
3954 if (INSN_P (insn))
3955 {
3956 df_simulate_find_defs (insn, test_set);
3957 df_simulate_one_insn_backwards (test_bb, insn, test_live);
3958 }
3959 prev = PREV_INSN (insn);
3960 if (insn == earliest)
3961 break;
3962 }
3963
3964 /* We can perform the transformation if
3965 MERGE_SET & (TEST_SET | TEST_LIVE)
3966 and
3967 TEST_SET & DF_LIVE_IN (merge_bb)
3968 are empty. */
3969
3970 if (bitmap_intersect_p (test_set, merge_set)
3971 || bitmap_intersect_p (test_live, merge_set)
3972 || bitmap_intersect_p (test_set, df_get_live_in (merge_bb)))
3973 fail = 1;
3974
3975 BITMAP_FREE (merge_set);
3976 BITMAP_FREE (test_live);
3977 BITMAP_FREE (test_set);
3978
3979 if (fail)
3980 return FALSE;
3981 }
3982
3983 no_body:
3984 /* We don't want to use normal invert_jump or redirect_jump because
3985 we don't want to delete_insn called. Also, we want to do our own
3986 change group management. */
3987
3988 old_dest = JUMP_LABEL (jump);
3989 if (other_bb != new_dest)
3990 {
3991 new_label = block_label (new_dest);
3992 if (reversep
3993 ? ! invert_jump_1 (jump, new_label)
3994 : ! redirect_jump_1 (jump, new_label))
3995 goto cancel;
3996 }
3997
3998 if (! apply_change_group ())
3999 return FALSE;
4000
4001 if (other_bb != new_dest)
4002 {
4003 redirect_jump_2 (jump, old_dest, new_label, 0, reversep);
4004
4005 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4006 if (reversep)
4007 {
4008 gcov_type count, probability;
4009 count = BRANCH_EDGE (test_bb)->count;
4010 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
4011 FALLTHRU_EDGE (test_bb)->count = count;
4012 probability = BRANCH_EDGE (test_bb)->probability;
4013 BRANCH_EDGE (test_bb)->probability
4014 = FALLTHRU_EDGE (test_bb)->probability;
4015 FALLTHRU_EDGE (test_bb)->probability = probability;
4016 update_br_prob_note (test_bb);
4017 }
4018 }
4019
4020 /* Move the insns out of MERGE_BB to before the branch. */
4021 if (head != NULL)
4022 {
4023 rtx insn;
4024
4025 if (end == BB_END (merge_bb))
4026 BB_END (merge_bb) = PREV_INSN (head);
4027
4028 /* PR 21767: When moving insns above a conditional branch, REG_EQUAL
4029 notes might become invalid. */
4030 insn = head;
4031 do
4032 {
4033 rtx note, set;
4034
4035 if (! INSN_P (insn))
4036 continue;
4037 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4038 if (! note)
4039 continue;
4040 set = single_set (insn);
4041 if (!set || !function_invariant_p (SET_SRC (set)))
4042 remove_note (insn, note);
4043 } while (insn != end && (insn = NEXT_INSN (insn)));
4044
4045 reorder_insns (head, end, PREV_INSN (earliest));
4046 }
4047
4048 /* Remove the jump and edge if we can. */
4049 if (other_bb == new_dest)
4050 {
4051 delete_insn (jump);
4052 remove_edge (BRANCH_EDGE (test_bb));
4053 /* ??? Can't merge blocks here, as then_bb is still in use.
4054 At minimum, the merge will get done just before bb-reorder. */
4055 }
4056
4057 return TRUE;
4058
4059 cancel:
4060 cancel_changes (0);
4061 return FALSE;
4062 }
4063 \f
4064 /* Main entry point for all if-conversion. */
4065
4066 static void
4067 if_convert (void)
4068 {
4069 basic_block bb;
4070 int pass;
4071
4072 if (optimize == 1)
4073 {
4074 df_live_add_problem ();
4075 df_live_set_all_dirty ();
4076 }
4077
4078 num_possible_if_blocks = 0;
4079 num_updated_if_blocks = 0;
4080 num_true_changes = 0;
4081
4082 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4083 mark_loop_exit_edges ();
4084 loop_optimizer_finalize ();
4085 free_dominance_info (CDI_DOMINATORS);
4086
4087 /* Compute postdominators. */
4088 calculate_dominance_info (CDI_POST_DOMINATORS);
4089
4090 df_set_flags (DF_LR_RUN_DCE);
4091
4092 /* Go through each of the basic blocks looking for things to convert. If we
4093 have conditional execution, we make multiple passes to allow us to handle
4094 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
4095 pass = 0;
4096 do
4097 {
4098 df_analyze ();
4099 /* Only need to do dce on the first pass. */
4100 df_clear_flags (DF_LR_RUN_DCE);
4101 cond_exec_changed_p = FALSE;
4102 pass++;
4103
4104 #ifdef IFCVT_MULTIPLE_DUMPS
4105 if (dump_file && pass > 1)
4106 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4107 #endif
4108
4109 FOR_EACH_BB (bb)
4110 {
4111 basic_block new_bb;
4112 while (!df_get_bb_dirty (bb)
4113 && (new_bb = find_if_header (bb, pass)) != NULL)
4114 bb = new_bb;
4115 }
4116
4117 #ifdef IFCVT_MULTIPLE_DUMPS
4118 if (dump_file && cond_exec_changed_p)
4119 print_rtl_with_bb (dump_file, get_insns ());
4120 #endif
4121 }
4122 while (cond_exec_changed_p);
4123
4124 #ifdef IFCVT_MULTIPLE_DUMPS
4125 if (dump_file)
4126 fprintf (dump_file, "\n\n========== no more changes\n");
4127 #endif
4128
4129 free_dominance_info (CDI_POST_DOMINATORS);
4130
4131 if (dump_file)
4132 fflush (dump_file);
4133
4134 clear_aux_for_blocks ();
4135
4136 /* If we allocated new pseudos, we must resize the array for sched1. */
4137 if (max_regno < max_reg_num ())
4138 max_regno = max_reg_num ();
4139
4140 /* Write the final stats. */
4141 if (dump_file && num_possible_if_blocks > 0)
4142 {
4143 fprintf (dump_file,
4144 "\n%d possible IF blocks searched.\n",
4145 num_possible_if_blocks);
4146 fprintf (dump_file,
4147 "%d IF blocks converted.\n",
4148 num_updated_if_blocks);
4149 fprintf (dump_file,
4150 "%d true changes made.\n\n\n",
4151 num_true_changes);
4152 }
4153
4154 if (optimize == 1)
4155 df_remove_problem (df_live);
4156
4157 #ifdef ENABLE_CHECKING
4158 verify_flow_info ();
4159 #endif
4160 }
4161 \f
4162 static bool
4163 gate_handle_if_conversion (void)
4164 {
4165 return (optimize > 0)
4166 && dbg_cnt (if_conversion);
4167 }
4168
4169 /* If-conversion and CFG cleanup. */
4170 static unsigned int
4171 rest_of_handle_if_conversion (void)
4172 {
4173 if (flag_if_conversion)
4174 {
4175 if (dump_file)
4176 dump_flow_info (dump_file, dump_flags);
4177 cleanup_cfg (CLEANUP_EXPENSIVE);
4178 if_convert ();
4179 }
4180
4181 cleanup_cfg (0);
4182 return 0;
4183 }
4184
4185 struct rtl_opt_pass pass_rtl_ifcvt =
4186 {
4187 {
4188 RTL_PASS,
4189 "ce1", /* name */
4190 gate_handle_if_conversion, /* gate */
4191 rest_of_handle_if_conversion, /* execute */
4192 NULL, /* sub */
4193 NULL, /* next */
4194 0, /* static_pass_number */
4195 TV_IFCVT, /* tv_id */
4196 0, /* properties_required */
4197 0, /* properties_provided */
4198 0, /* properties_destroyed */
4199 0, /* todo_flags_start */
4200 TODO_df_finish | TODO_verify_rtl_sharing |
4201 TODO_dump_func /* todo_flags_finish */
4202 }
4203 };
4204
4205 static bool
4206 gate_handle_if_after_combine (void)
4207 {
4208 return optimize > 0 && flag_if_conversion
4209 && dbg_cnt (if_after_combine);
4210 }
4211
4212
4213 /* Rerun if-conversion, as combine may have simplified things enough
4214 to now meet sequence length restrictions. */
4215 static unsigned int
4216 rest_of_handle_if_after_combine (void)
4217 {
4218 if_convert ();
4219 return 0;
4220 }
4221
4222 struct rtl_opt_pass pass_if_after_combine =
4223 {
4224 {
4225 RTL_PASS,
4226 "ce2", /* name */
4227 gate_handle_if_after_combine, /* gate */
4228 rest_of_handle_if_after_combine, /* execute */
4229 NULL, /* sub */
4230 NULL, /* next */
4231 0, /* static_pass_number */
4232 TV_IFCVT, /* tv_id */
4233 0, /* properties_required */
4234 0, /* properties_provided */
4235 0, /* properties_destroyed */
4236 0, /* todo_flags_start */
4237 TODO_df_finish | TODO_verify_rtl_sharing |
4238 TODO_dump_func |
4239 TODO_ggc_collect /* todo_flags_finish */
4240 }
4241 };
4242
4243
4244 static bool
4245 gate_handle_if_after_reload (void)
4246 {
4247 return optimize > 0 && flag_if_conversion2
4248 && dbg_cnt (if_after_reload);
4249 }
4250
4251 static unsigned int
4252 rest_of_handle_if_after_reload (void)
4253 {
4254 if_convert ();
4255 return 0;
4256 }
4257
4258
4259 struct rtl_opt_pass pass_if_after_reload =
4260 {
4261 {
4262 RTL_PASS,
4263 "ce3", /* name */
4264 gate_handle_if_after_reload, /* gate */
4265 rest_of_handle_if_after_reload, /* execute */
4266 NULL, /* sub */
4267 NULL, /* next */
4268 0, /* static_pass_number */
4269 TV_IFCVT2, /* tv_id */
4270 0, /* properties_required */
4271 0, /* properties_provided */
4272 0, /* properties_destroyed */
4273 0, /* todo_flags_start */
4274 TODO_df_finish | TODO_verify_rtl_sharing |
4275 TODO_dump_func |
4276 TODO_ggc_collect /* todo_flags_finish */
4277 }
4278 };