re PR rtl-optimization/10914 (unswitch loops does not work on powerpc)
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
43
44
45 #ifndef HAVE_conditional_execution
46 #define HAVE_conditional_execution 0
47 #endif
48 #ifndef HAVE_conditional_move
49 #define HAVE_conditional_move 0
50 #endif
51 #ifndef HAVE_incscc
52 #define HAVE_incscc 0
53 #endif
54 #ifndef HAVE_decscc
55 #define HAVE_decscc 0
56 #endif
57 #ifndef HAVE_trap
58 #define HAVE_trap 0
59 #endif
60 #ifndef HAVE_conditional_trap
61 #define HAVE_conditional_trap 0
62 #endif
63
64 #ifndef MAX_CONDITIONAL_EXECUTE
65 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
66 #endif
67
68 #define NULL_EDGE ((struct edge_def *)NULL)
69 #define NULL_BLOCK ((struct basic_block_def *)NULL)
70
71 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
72 static int num_possible_if_blocks;
73
74 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
75 execution. */
76 static int num_updated_if_blocks;
77
78 /* # of basic blocks that were removed. */
79 static int num_removed_blocks;
80
81 /* Whether conditional execution changes were made. */
82 static int cond_exec_changed_p;
83
84 /* True if life data ok at present. */
85 static bool life_data_ok;
86
87 /* The post-dominator relation on the original block numbers. */
88 static dominance_info post_dominators;
89
90 /* Forward references. */
91 static int count_bb_insns (basic_block);
92 static rtx first_active_insn (basic_block);
93 static rtx last_active_insn (basic_block, int);
94 static int seq_contains_jump (rtx);
95 static basic_block block_fallthru (basic_block);
96 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
97 static rtx cond_exec_get_condition (rtx);
98 static int cond_exec_process_if_block (ce_if_block_t *, int);
99 static rtx noce_get_condition (rtx, rtx *);
100 static int noce_operand_ok (rtx);
101 static int noce_process_if_block (ce_if_block_t *);
102 static int process_if_block (ce_if_block_t *);
103 static void merge_if_block (ce_if_block_t *);
104 static int find_cond_trap (basic_block, edge, edge);
105 static basic_block find_if_header (basic_block, int);
106 static int block_jumps_and_fallthru_p (basic_block, basic_block);
107 static int find_if_block (ce_if_block_t *);
108 static int find_if_case_1 (basic_block, edge, edge);
109 static int find_if_case_2 (basic_block, edge, edge);
110 static int find_memory (rtx *, void *);
111 static int dead_or_predicable (basic_block, basic_block, basic_block,
112 basic_block, int);
113 static void noce_emit_move_insn (rtx, rtx);
114 static rtx block_has_only_trap (basic_block);
115 static void mark_loop_exit_edges (void);
116 \f
117 /* Sets EDGE_LOOP_EXIT flag for all loop exits. */
118 static void
119 mark_loop_exit_edges ()
120 {
121 struct loops loops;
122 basic_block bb;
123 edge e;
124
125 flow_loops_find (&loops, LOOP_TREE);
126
127 if (loops.num > 1)
128 {
129 FOR_EACH_BB (bb)
130 {
131 for (e = bb->succ; e; e = e->succ_next)
132 {
133 if (find_common_loop (bb->loop_father, e->dest->loop_father)
134 != bb->loop_father)
135 e->flags |= EDGE_LOOP_EXIT;
136 else
137 e->flags &= ~EDGE_LOOP_EXIT;
138 }
139 }
140 }
141
142 flow_loops_free (&loops);
143 }
144
145 /* Count the number of non-jump active insns in BB. */
146
147 static int
148 count_bb_insns (basic_block bb)
149 {
150 int count = 0;
151 rtx insn = bb->head;
152
153 while (1)
154 {
155 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
156 count++;
157
158 if (insn == bb->end)
159 break;
160 insn = NEXT_INSN (insn);
161 }
162
163 return count;
164 }
165
166 /* Return the first non-jump active insn in the basic block. */
167
168 static rtx
169 first_active_insn (basic_block bb)
170 {
171 rtx insn = bb->head;
172
173 if (GET_CODE (insn) == CODE_LABEL)
174 {
175 if (insn == bb->end)
176 return NULL_RTX;
177 insn = NEXT_INSN (insn);
178 }
179
180 while (GET_CODE (insn) == NOTE)
181 {
182 if (insn == bb->end)
183 return NULL_RTX;
184 insn = NEXT_INSN (insn);
185 }
186
187 if (GET_CODE (insn) == JUMP_INSN)
188 return NULL_RTX;
189
190 return insn;
191 }
192
193 /* Return the last non-jump active (non-jump) insn in the basic block. */
194
195 static rtx
196 last_active_insn (basic_block bb, int skip_use_p)
197 {
198 rtx insn = bb->end;
199 rtx head = bb->head;
200
201 while (GET_CODE (insn) == NOTE
202 || GET_CODE (insn) == JUMP_INSN
203 || (skip_use_p
204 && GET_CODE (insn) == INSN
205 && GET_CODE (PATTERN (insn)) == USE))
206 {
207 if (insn == head)
208 return NULL_RTX;
209 insn = PREV_INSN (insn);
210 }
211
212 if (GET_CODE (insn) == CODE_LABEL)
213 return NULL_RTX;
214
215 return insn;
216 }
217
218 /* It is possible, especially when having dealt with multi-word
219 arithmetic, for the expanders to have emitted jumps. Search
220 through the sequence and return TRUE if a jump exists so that
221 we can abort the conversion. */
222
223 static int
224 seq_contains_jump (rtx insn)
225 {
226 while (insn)
227 {
228 if (GET_CODE (insn) == JUMP_INSN)
229 return 1;
230 insn = NEXT_INSN (insn);
231 }
232 return 0;
233 }
234
235 static basic_block
236 block_fallthru (basic_block bb)
237 {
238 edge e;
239
240 for (e = bb->succ;
241 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
242 e = e->succ_next)
243 ;
244
245 return (e) ? e->dest : NULL_BLOCK;
246 }
247 \f
248 /* Go through a bunch of insns, converting them to conditional
249 execution format if possible. Return TRUE if all of the non-note
250 insns were processed. */
251
252 static int
253 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
254 /* if block information */rtx start,
255 /* first insn to look at */rtx end,
256 /* last insn to look at */rtx test,
257 /* conditional execution test */rtx prob_val,
258 /* probability of branch taken. */int mod_ok)
259 {
260 int must_be_last = FALSE;
261 rtx insn;
262 rtx xtest;
263 rtx pattern;
264
265 if (!start || !end)
266 return FALSE;
267
268 for (insn = start; ; insn = NEXT_INSN (insn))
269 {
270 if (GET_CODE (insn) == NOTE)
271 goto insn_done;
272
273 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
274 abort ();
275
276 /* Remove USE insns that get in the way. */
277 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
278 {
279 /* ??? Ug. Actually unlinking the thing is problematic,
280 given what we'd have to coordinate with our callers. */
281 PUT_CODE (insn, NOTE);
282 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
283 NOTE_SOURCE_FILE (insn) = 0;
284 goto insn_done;
285 }
286
287 /* Last insn wasn't last? */
288 if (must_be_last)
289 return FALSE;
290
291 if (modified_in_p (test, insn))
292 {
293 if (!mod_ok)
294 return FALSE;
295 must_be_last = TRUE;
296 }
297
298 /* Now build the conditional form of the instruction. */
299 pattern = PATTERN (insn);
300 xtest = copy_rtx (test);
301
302 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
303 two conditions. */
304 if (GET_CODE (pattern) == COND_EXEC)
305 {
306 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
307 return FALSE;
308
309 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
310 COND_EXEC_TEST (pattern));
311 pattern = COND_EXEC_CODE (pattern);
312 }
313
314 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
315
316 /* If the machine needs to modify the insn being conditionally executed,
317 say for example to force a constant integer operand into a temp
318 register, do so here. */
319 #ifdef IFCVT_MODIFY_INSN
320 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
321 if (! pattern)
322 return FALSE;
323 #endif
324
325 validate_change (insn, &PATTERN (insn), pattern, 1);
326
327 if (GET_CODE (insn) == CALL_INSN && prob_val)
328 validate_change (insn, &REG_NOTES (insn),
329 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
330 REG_NOTES (insn)), 1);
331
332 insn_done:
333 if (insn == end)
334 break;
335 }
336
337 return TRUE;
338 }
339
340 /* Return the condition for a jump. Do not do any special processing. */
341
342 static rtx
343 cond_exec_get_condition (rtx jump)
344 {
345 rtx test_if, cond;
346
347 if (any_condjump_p (jump))
348 test_if = SET_SRC (pc_set (jump));
349 else
350 return NULL_RTX;
351 cond = XEXP (test_if, 0);
352
353 /* If this branches to JUMP_LABEL when the condition is false,
354 reverse the condition. */
355 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
356 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
357 {
358 enum rtx_code rev = reversed_comparison_code (cond, jump);
359 if (rev == UNKNOWN)
360 return NULL_RTX;
361
362 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
363 XEXP (cond, 1));
364 }
365
366 return cond;
367 }
368
369 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
370 to conditional execution. Return TRUE if we were successful at
371 converting the block. */
372
373 static int
374 cond_exec_process_if_block (ce_if_block_t * ce_info,
375 /* if block information */int do_multiple_p)
376 {
377 basic_block test_bb = ce_info->test_bb; /* last test block */
378 basic_block then_bb = ce_info->then_bb; /* THEN */
379 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
380 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
381 rtx then_start; /* first insn in THEN block */
382 rtx then_end; /* last insn + 1 in THEN block */
383 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
384 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
385 int max; /* max # of insns to convert. */
386 int then_mod_ok; /* whether conditional mods are ok in THEN */
387 rtx true_expr; /* test for else block insns */
388 rtx false_expr; /* test for then block insns */
389 rtx true_prob_val; /* probability of else block */
390 rtx false_prob_val; /* probability of then block */
391 int n_insns;
392 enum rtx_code false_code;
393
394 /* If test is comprised of && or || elements, and we've failed at handling
395 all of them together, just use the last test if it is the special case of
396 && elements without an ELSE block. */
397 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
398 {
399 if (else_bb || ! ce_info->and_and_p)
400 return FALSE;
401
402 ce_info->test_bb = test_bb = ce_info->last_test_bb;
403 ce_info->num_multiple_test_blocks = 0;
404 ce_info->num_and_and_blocks = 0;
405 ce_info->num_or_or_blocks = 0;
406 }
407
408 /* Find the conditional jump to the ELSE or JOIN part, and isolate
409 the test. */
410 test_expr = cond_exec_get_condition (test_bb->end);
411 if (! test_expr)
412 return FALSE;
413
414 /* If the conditional jump is more than just a conditional jump,
415 then we can not do conditional execution conversion on this block. */
416 if (! onlyjump_p (test_bb->end))
417 return FALSE;
418
419 /* Collect the bounds of where we're to search, skipping any labels, jumps
420 and notes at the beginning and end of the block. Then count the total
421 number of insns and see if it is small enough to convert. */
422 then_start = first_active_insn (then_bb);
423 then_end = last_active_insn (then_bb, TRUE);
424 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
425 max = MAX_CONDITIONAL_EXECUTE;
426
427 if (else_bb)
428 {
429 max *= 2;
430 else_start = first_active_insn (else_bb);
431 else_end = last_active_insn (else_bb, TRUE);
432 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
433 }
434
435 if (n_insns > max)
436 return FALSE;
437
438 /* Map test_expr/test_jump into the appropriate MD tests to use on
439 the conditionally executed code. */
440
441 true_expr = test_expr;
442
443 false_code = reversed_comparison_code (true_expr, test_bb->end);
444 if (false_code != UNKNOWN)
445 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
446 XEXP (true_expr, 0), XEXP (true_expr, 1));
447 else
448 false_expr = NULL_RTX;
449
450 #ifdef IFCVT_MODIFY_TESTS
451 /* If the machine description needs to modify the tests, such as setting a
452 conditional execution register from a comparison, it can do so here. */
453 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
454
455 /* See if the conversion failed */
456 if (!true_expr || !false_expr)
457 goto fail;
458 #endif
459
460 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
461 if (true_prob_val)
462 {
463 true_prob_val = XEXP (true_prob_val, 0);
464 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
465 }
466 else
467 false_prob_val = NULL_RTX;
468
469 /* If we have && or || tests, do them here. These tests are in the adjacent
470 blocks after the first block containing the test. */
471 if (ce_info->num_multiple_test_blocks > 0)
472 {
473 basic_block bb = test_bb;
474 basic_block last_test_bb = ce_info->last_test_bb;
475
476 if (! false_expr)
477 goto fail;
478
479 do
480 {
481 rtx start, end;
482 rtx t, f;
483
484 bb = block_fallthru (bb);
485 start = first_active_insn (bb);
486 end = last_active_insn (bb, TRUE);
487 if (start
488 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
489 false_prob_val, FALSE))
490 goto fail;
491
492 /* If the conditional jump is more than just a conditional jump, then
493 we can not do conditional execution conversion on this block. */
494 if (! onlyjump_p (bb->end))
495 goto fail;
496
497 /* Find the conditional jump and isolate the test. */
498 t = cond_exec_get_condition (bb->end);
499 if (! t)
500 goto fail;
501
502 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
503 GET_MODE (t),
504 XEXP (t, 0),
505 XEXP (t, 1));
506
507 if (ce_info->and_and_p)
508 {
509 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
510 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
511 }
512 else
513 {
514 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
515 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
516 }
517
518 /* If the machine description needs to modify the tests, such as
519 setting a conditional execution register from a comparison, it can
520 do so here. */
521 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
522 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
523
524 /* See if the conversion failed */
525 if (!t || !f)
526 goto fail;
527 #endif
528
529 true_expr = t;
530 false_expr = f;
531 }
532 while (bb != last_test_bb);
533 }
534
535 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
536 on then THEN block. */
537 then_mod_ok = (else_bb == NULL_BLOCK);
538
539 /* Go through the THEN and ELSE blocks converting the insns if possible
540 to conditional execution. */
541
542 if (then_end
543 && (! false_expr
544 || ! cond_exec_process_insns (ce_info, then_start, then_end,
545 false_expr, false_prob_val,
546 then_mod_ok)))
547 goto fail;
548
549 if (else_bb && else_end
550 && ! cond_exec_process_insns (ce_info, else_start, else_end,
551 true_expr, true_prob_val, TRUE))
552 goto fail;
553
554 /* If we cannot apply the changes, fail. Do not go through the normal fail
555 processing, since apply_change_group will call cancel_changes. */
556 if (! apply_change_group ())
557 {
558 #ifdef IFCVT_MODIFY_CANCEL
559 /* Cancel any machine dependent changes. */
560 IFCVT_MODIFY_CANCEL (ce_info);
561 #endif
562 return FALSE;
563 }
564
565 #ifdef IFCVT_MODIFY_FINAL
566 /* Do any machine dependent final modifications */
567 IFCVT_MODIFY_FINAL (ce_info);
568 #endif
569
570 /* Conversion succeeded. */
571 if (rtl_dump_file)
572 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
573 n_insns, (n_insns == 1) ? " was" : "s were");
574
575 /* Merge the blocks! */
576 merge_if_block (ce_info);
577 cond_exec_changed_p = TRUE;
578 return TRUE;
579
580 fail:
581 #ifdef IFCVT_MODIFY_CANCEL
582 /* Cancel any machine dependent changes. */
583 IFCVT_MODIFY_CANCEL (ce_info);
584 #endif
585
586 cancel_changes (0);
587 return FALSE;
588 }
589 \f
590 /* Used by noce_process_if_block to communicate with its subroutines.
591
592 The subroutines know that A and B may be evaluated freely. They
593 know that X is a register. They should insert new instructions
594 before cond_earliest. */
595
596 struct noce_if_info
597 {
598 basic_block test_bb;
599 rtx insn_a, insn_b;
600 rtx x, a, b;
601 rtx jump, cond, cond_earliest;
602 };
603
604 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
605 static int noce_try_store_flag (struct noce_if_info *);
606 static int noce_try_addcc (struct noce_if_info *);
607 static int noce_try_store_flag_constants (struct noce_if_info *);
608 static int noce_try_store_flag_mask (struct noce_if_info *);
609 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
610 rtx, rtx, rtx);
611 static int noce_try_cmove (struct noce_if_info *);
612 static int noce_try_cmove_arith (struct noce_if_info *);
613 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
614 static int noce_try_minmax (struct noce_if_info *);
615 static int noce_try_abs (struct noce_if_info *);
616
617 /* Helper function for noce_try_store_flag*. */
618
619 static rtx
620 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
621 int normalize)
622 {
623 rtx cond = if_info->cond;
624 int cond_complex;
625 enum rtx_code code;
626
627 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
628 || ! general_operand (XEXP (cond, 1), VOIDmode));
629
630 /* If earliest == jump, or when the condition is complex, try to
631 build the store_flag insn directly. */
632
633 if (cond_complex)
634 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
635
636 if (reversep)
637 code = reversed_comparison_code (cond, if_info->jump);
638 else
639 code = GET_CODE (cond);
640
641 if ((if_info->cond_earliest == if_info->jump || cond_complex)
642 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
643 {
644 rtx tmp;
645
646 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
647 XEXP (cond, 1));
648 tmp = gen_rtx_SET (VOIDmode, x, tmp);
649
650 start_sequence ();
651 tmp = emit_insn (tmp);
652
653 if (recog_memoized (tmp) >= 0)
654 {
655 tmp = get_insns ();
656 end_sequence ();
657 emit_insn (tmp);
658
659 if_info->cond_earliest = if_info->jump;
660
661 return x;
662 }
663
664 end_sequence ();
665 }
666
667 /* Don't even try if the comparison operands or the mode of X are weird. */
668 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
669 return NULL_RTX;
670
671 return emit_store_flag (x, code, XEXP (cond, 0),
672 XEXP (cond, 1), VOIDmode,
673 (code == LTU || code == LEU
674 || code == GEU || code == GTU), normalize);
675 }
676
677 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
678 static void
679 noce_emit_move_insn (rtx x, rtx y)
680 {
681 enum machine_mode outmode, inmode;
682 rtx outer, inner;
683 int bitpos;
684
685 if (GET_CODE (x) != STRICT_LOW_PART)
686 {
687 emit_move_insn (x, y);
688 return;
689 }
690
691 outer = XEXP (x, 0);
692 inner = XEXP (outer, 0);
693 outmode = GET_MODE (outer);
694 inmode = GET_MODE (inner);
695 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
696 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
697 GET_MODE_BITSIZE (inmode));
698 }
699
700 /* Convert "if (test) x = 1; else x = 0".
701
702 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
703 tried in noce_try_store_flag_constants after noce_try_cmove has had
704 a go at the conversion. */
705
706 static int
707 noce_try_store_flag (struct noce_if_info *if_info)
708 {
709 int reversep;
710 rtx target, seq;
711
712 if (GET_CODE (if_info->b) == CONST_INT
713 && INTVAL (if_info->b) == STORE_FLAG_VALUE
714 && if_info->a == const0_rtx)
715 reversep = 0;
716 else if (if_info->b == const0_rtx
717 && GET_CODE (if_info->a) == CONST_INT
718 && INTVAL (if_info->a) == STORE_FLAG_VALUE
719 && (reversed_comparison_code (if_info->cond, if_info->jump)
720 != UNKNOWN))
721 reversep = 1;
722 else
723 return FALSE;
724
725 start_sequence ();
726
727 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
728 if (target)
729 {
730 if (target != if_info->x)
731 noce_emit_move_insn (if_info->x, target);
732
733 seq = get_insns ();
734 end_sequence ();
735 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
736
737 return TRUE;
738 }
739 else
740 {
741 end_sequence ();
742 return FALSE;
743 }
744 }
745
746 /* Convert "if (test) x = a; else x = b", for A and B constant. */
747
748 static int
749 noce_try_store_flag_constants (struct noce_if_info *if_info)
750 {
751 rtx target, seq;
752 int reversep;
753 HOST_WIDE_INT itrue, ifalse, diff, tmp;
754 int normalize, can_reverse;
755 enum machine_mode mode;
756
757 if (! no_new_pseudos
758 && GET_CODE (if_info->a) == CONST_INT
759 && GET_CODE (if_info->b) == CONST_INT)
760 {
761 mode = GET_MODE (if_info->x);
762 ifalse = INTVAL (if_info->a);
763 itrue = INTVAL (if_info->b);
764
765 /* Make sure we can represent the difference between the two values. */
766 if ((itrue - ifalse > 0)
767 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
768 return FALSE;
769
770 diff = trunc_int_for_mode (itrue - ifalse, mode);
771
772 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
773 != UNKNOWN);
774
775 reversep = 0;
776 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
777 normalize = 0;
778 else if (ifalse == 0 && exact_log2 (itrue) >= 0
779 && (STORE_FLAG_VALUE == 1
780 || BRANCH_COST >= 2))
781 normalize = 1;
782 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
783 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
784 normalize = 1, reversep = 1;
785 else if (itrue == -1
786 && (STORE_FLAG_VALUE == -1
787 || BRANCH_COST >= 2))
788 normalize = -1;
789 else if (ifalse == -1 && can_reverse
790 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
791 normalize = -1, reversep = 1;
792 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
793 || BRANCH_COST >= 3)
794 normalize = -1;
795 else
796 return FALSE;
797
798 if (reversep)
799 {
800 tmp = itrue; itrue = ifalse; ifalse = tmp;
801 diff = trunc_int_for_mode (-diff, mode);
802 }
803
804 start_sequence ();
805 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
806 if (! target)
807 {
808 end_sequence ();
809 return FALSE;
810 }
811
812 /* if (test) x = 3; else x = 4;
813 => x = 3 + (test == 0); */
814 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
815 {
816 target = expand_simple_binop (mode,
817 (diff == STORE_FLAG_VALUE
818 ? PLUS : MINUS),
819 GEN_INT (ifalse), target, if_info->x, 0,
820 OPTAB_WIDEN);
821 }
822
823 /* if (test) x = 8; else x = 0;
824 => x = (test != 0) << 3; */
825 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
826 {
827 target = expand_simple_binop (mode, ASHIFT,
828 target, GEN_INT (tmp), if_info->x, 0,
829 OPTAB_WIDEN);
830 }
831
832 /* if (test) x = -1; else x = b;
833 => x = -(test != 0) | b; */
834 else if (itrue == -1)
835 {
836 target = expand_simple_binop (mode, IOR,
837 target, GEN_INT (ifalse), if_info->x, 0,
838 OPTAB_WIDEN);
839 }
840
841 /* if (test) x = a; else x = b;
842 => x = (-(test != 0) & (b - a)) + a; */
843 else
844 {
845 target = expand_simple_binop (mode, AND,
846 target, GEN_INT (diff), if_info->x, 0,
847 OPTAB_WIDEN);
848 if (target)
849 target = expand_simple_binop (mode, PLUS,
850 target, GEN_INT (ifalse),
851 if_info->x, 0, OPTAB_WIDEN);
852 }
853
854 if (! target)
855 {
856 end_sequence ();
857 return FALSE;
858 }
859
860 if (target != if_info->x)
861 noce_emit_move_insn (if_info->x, target);
862
863 seq = get_insns ();
864 end_sequence ();
865
866 if (seq_contains_jump (seq))
867 return FALSE;
868
869 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
870
871 return TRUE;
872 }
873
874 return FALSE;
875 }
876
877 /* Convert "if (test) foo++" into "foo += (test != 0)", and
878 similarly for "foo--". */
879
880 static int
881 noce_try_addcc (struct noce_if_info *if_info)
882 {
883 rtx target, seq;
884 int subtract, normalize;
885
886 if (! no_new_pseudos
887 /* Should be no `else' case to worry about. */
888 && if_info->b == if_info->x
889 && GET_CODE (if_info->a) == PLUS
890 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
891 && (reversed_comparison_code (if_info->cond, if_info->jump)
892 != UNKNOWN))
893 {
894 rtx cond = if_info->cond;
895 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
896
897 /* First try to use addcc pattern. */
898 if (general_operand (XEXP (cond, 0), VOIDmode)
899 && general_operand (XEXP (cond, 1), VOIDmode))
900 {
901 start_sequence ();
902 target = emit_conditional_add (if_info->x, code,
903 XEXP (cond, 0), XEXP (cond, 1),
904 VOIDmode,
905 if_info->b, XEXP (if_info->a, 1),
906 GET_MODE (if_info->x),
907 (code == LTU || code == GEU
908 || code == LEU || code == GTU));
909 if (target)
910 {
911 if (target != if_info->x)
912 noce_emit_move_insn (if_info->x, target);
913
914 seq = get_insns ();
915 end_sequence ();
916 emit_insn_before_setloc (seq, if_info->jump,
917 INSN_LOCATOR (if_info->insn_a));
918 return TRUE;
919 }
920 end_sequence ();
921 }
922
923 /* If that fails, construct conditional increment or decrement using
924 setcc. */
925 if (BRANCH_COST >= 2
926 && (XEXP (if_info->a, 1) == const1_rtx
927 || XEXP (if_info->a, 1) == constm1_rtx))
928 {
929 start_sequence ();
930 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
931 subtract = 0, normalize = 0;
932 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
933 subtract = 1, normalize = 0;
934 else
935 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
936
937
938 target = noce_emit_store_flag (if_info,
939 gen_reg_rtx (GET_MODE (if_info->x)),
940 1, normalize);
941
942 if (target)
943 target = expand_simple_binop (GET_MODE (if_info->x),
944 subtract ? MINUS : PLUS,
945 if_info->x, target, if_info->x,
946 0, OPTAB_WIDEN);
947 if (target)
948 {
949 if (target != if_info->x)
950 noce_emit_move_insn (if_info->x, target);
951
952 seq = get_insns ();
953 end_sequence ();
954
955 if (seq_contains_jump (seq))
956 return FALSE;
957
958 emit_insn_before_setloc (seq, if_info->jump,
959 INSN_LOCATOR (if_info->insn_a));
960
961 return TRUE;
962 }
963 end_sequence ();
964 }
965 }
966
967 return FALSE;
968 }
969
970 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
971
972 static int
973 noce_try_store_flag_mask (struct noce_if_info *if_info)
974 {
975 rtx target, seq;
976 int reversep;
977
978 reversep = 0;
979 if (! no_new_pseudos
980 && (BRANCH_COST >= 2
981 || STORE_FLAG_VALUE == -1)
982 && ((if_info->a == const0_rtx
983 && rtx_equal_p (if_info->b, if_info->x))
984 || ((reversep = (reversed_comparison_code (if_info->cond,
985 if_info->jump)
986 != UNKNOWN))
987 && if_info->b == const0_rtx
988 && rtx_equal_p (if_info->a, if_info->x))))
989 {
990 start_sequence ();
991 target = noce_emit_store_flag (if_info,
992 gen_reg_rtx (GET_MODE (if_info->x)),
993 reversep, -1);
994 if (target)
995 target = expand_simple_binop (GET_MODE (if_info->x), AND,
996 if_info->x, target, if_info->x, 0,
997 OPTAB_WIDEN);
998
999 if (target)
1000 {
1001 if (target != if_info->x)
1002 noce_emit_move_insn (if_info->x, target);
1003
1004 seq = get_insns ();
1005 end_sequence ();
1006
1007 if (seq_contains_jump (seq))
1008 return FALSE;
1009
1010 emit_insn_before_setloc (seq, if_info->jump,
1011 INSN_LOCATOR (if_info->insn_a));
1012
1013 return TRUE;
1014 }
1015
1016 end_sequence ();
1017 }
1018
1019 return FALSE;
1020 }
1021
1022 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1023
1024 static rtx
1025 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1026 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1027 {
1028 /* If earliest == jump, try to build the cmove insn directly.
1029 This is helpful when combine has created some complex condition
1030 (like for alpha's cmovlbs) that we can't hope to regenerate
1031 through the normal interface. */
1032
1033 if (if_info->cond_earliest == if_info->jump)
1034 {
1035 rtx tmp;
1036
1037 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1038 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1039 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1040
1041 start_sequence ();
1042 tmp = emit_insn (tmp);
1043
1044 if (recog_memoized (tmp) >= 0)
1045 {
1046 tmp = get_insns ();
1047 end_sequence ();
1048 emit_insn (tmp);
1049
1050 return x;
1051 }
1052
1053 end_sequence ();
1054 }
1055
1056 /* Don't even try if the comparison operands are weird. */
1057 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1058 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1059 return NULL_RTX;
1060
1061 #if HAVE_conditional_move
1062 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1063 vtrue, vfalse, GET_MODE (x),
1064 (code == LTU || code == GEU
1065 || code == LEU || code == GTU));
1066 #else
1067 /* We'll never get here, as noce_process_if_block doesn't call the
1068 functions involved. Ifdef code, however, should be discouraged
1069 because it leads to typos in the code not selected. However,
1070 emit_conditional_move won't exist either. */
1071 return NULL_RTX;
1072 #endif
1073 }
1074
1075 /* Try only simple constants and registers here. More complex cases
1076 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1077 has had a go at it. */
1078
1079 static int
1080 noce_try_cmove (struct noce_if_info *if_info)
1081 {
1082 enum rtx_code code;
1083 rtx target, seq;
1084
1085 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1086 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1087 {
1088 start_sequence ();
1089
1090 code = GET_CODE (if_info->cond);
1091 target = noce_emit_cmove (if_info, if_info->x, code,
1092 XEXP (if_info->cond, 0),
1093 XEXP (if_info->cond, 1),
1094 if_info->a, if_info->b);
1095
1096 if (target)
1097 {
1098 if (target != if_info->x)
1099 noce_emit_move_insn (if_info->x, target);
1100
1101 seq = get_insns ();
1102 end_sequence ();
1103 emit_insn_before_setloc (seq, if_info->jump,
1104 INSN_LOCATOR (if_info->insn_a));
1105 return TRUE;
1106 }
1107 else
1108 {
1109 end_sequence ();
1110 return FALSE;
1111 }
1112 }
1113
1114 return FALSE;
1115 }
1116
1117 /* Try more complex cases involving conditional_move. */
1118
1119 static int
1120 noce_try_cmove_arith (struct noce_if_info *if_info)
1121 {
1122 rtx a = if_info->a;
1123 rtx b = if_info->b;
1124 rtx x = if_info->x;
1125 rtx insn_a, insn_b;
1126 rtx tmp, target;
1127 int is_mem = 0;
1128 enum rtx_code code;
1129
1130 /* A conditional move from two memory sources is equivalent to a
1131 conditional on their addresses followed by a load. Don't do this
1132 early because it'll screw alias analysis. Note that we've
1133 already checked for no side effects. */
1134 if (! no_new_pseudos && cse_not_expected
1135 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1136 && BRANCH_COST >= 5)
1137 {
1138 a = XEXP (a, 0);
1139 b = XEXP (b, 0);
1140 x = gen_reg_rtx (Pmode);
1141 is_mem = 1;
1142 }
1143
1144 /* ??? We could handle this if we knew that a load from A or B could
1145 not fault. This is also true if we've already loaded
1146 from the address along the path from ENTRY. */
1147 else if (may_trap_p (a) || may_trap_p (b))
1148 return FALSE;
1149
1150 /* if (test) x = a + b; else x = c - d;
1151 => y = a + b;
1152 x = c - d;
1153 if (test)
1154 x = y;
1155 */
1156
1157 code = GET_CODE (if_info->cond);
1158 insn_a = if_info->insn_a;
1159 insn_b = if_info->insn_b;
1160
1161 /* Possibly rearrange operands to make things come out more natural. */
1162 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1163 {
1164 int reversep = 0;
1165 if (rtx_equal_p (b, x))
1166 reversep = 1;
1167 else if (general_operand (b, GET_MODE (b)))
1168 reversep = 1;
1169
1170 if (reversep)
1171 {
1172 code = reversed_comparison_code (if_info->cond, if_info->jump);
1173 tmp = a, a = b, b = tmp;
1174 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1175 }
1176 }
1177
1178 start_sequence ();
1179
1180 /* If either operand is complex, load it into a register first.
1181 The best way to do this is to copy the original insn. In this
1182 way we preserve any clobbers etc that the insn may have had.
1183 This is of course not possible in the IS_MEM case. */
1184 if (! general_operand (a, GET_MODE (a)))
1185 {
1186 rtx set;
1187
1188 if (no_new_pseudos)
1189 goto end_seq_and_fail;
1190
1191 if (is_mem)
1192 {
1193 tmp = gen_reg_rtx (GET_MODE (a));
1194 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1195 }
1196 else if (! insn_a)
1197 goto end_seq_and_fail;
1198 else
1199 {
1200 a = gen_reg_rtx (GET_MODE (a));
1201 tmp = copy_rtx (insn_a);
1202 set = single_set (tmp);
1203 SET_DEST (set) = a;
1204 tmp = emit_insn (PATTERN (tmp));
1205 }
1206 if (recog_memoized (tmp) < 0)
1207 goto end_seq_and_fail;
1208 }
1209 if (! general_operand (b, GET_MODE (b)))
1210 {
1211 rtx set;
1212
1213 if (no_new_pseudos)
1214 goto end_seq_and_fail;
1215
1216 if (is_mem)
1217 {
1218 tmp = gen_reg_rtx (GET_MODE (b));
1219 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1220 }
1221 else if (! insn_b)
1222 goto end_seq_and_fail;
1223 else
1224 {
1225 b = gen_reg_rtx (GET_MODE (b));
1226 tmp = copy_rtx (insn_b);
1227 set = single_set (tmp);
1228 SET_DEST (set) = b;
1229 tmp = emit_insn (PATTERN (tmp));
1230 }
1231 if (recog_memoized (tmp) < 0)
1232 goto end_seq_and_fail;
1233 }
1234
1235 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1236 XEXP (if_info->cond, 1), a, b);
1237
1238 if (! target)
1239 goto end_seq_and_fail;
1240
1241 /* If we're handling a memory for above, emit the load now. */
1242 if (is_mem)
1243 {
1244 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1245
1246 /* Copy over flags as appropriate. */
1247 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1248 MEM_VOLATILE_P (tmp) = 1;
1249 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1250 MEM_IN_STRUCT_P (tmp) = 1;
1251 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1252 MEM_SCALAR_P (tmp) = 1;
1253 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1254 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1255 set_mem_align (tmp,
1256 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1257
1258 noce_emit_move_insn (if_info->x, tmp);
1259 }
1260 else if (target != x)
1261 noce_emit_move_insn (x, target);
1262
1263 tmp = get_insns ();
1264 end_sequence ();
1265 emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1266 return TRUE;
1267
1268 end_seq_and_fail:
1269 end_sequence ();
1270 return FALSE;
1271 }
1272
1273 /* For most cases, the simplified condition we found is the best
1274 choice, but this is not the case for the min/max/abs transforms.
1275 For these we wish to know that it is A or B in the condition. */
1276
1277 static rtx
1278 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1279 rtx *earliest)
1280 {
1281 rtx cond, set, insn;
1282 int reverse;
1283
1284 /* If target is already mentioned in the known condition, return it. */
1285 if (reg_mentioned_p (target, if_info->cond))
1286 {
1287 *earliest = if_info->cond_earliest;
1288 return if_info->cond;
1289 }
1290
1291 set = pc_set (if_info->jump);
1292 cond = XEXP (SET_SRC (set), 0);
1293 reverse
1294 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1295 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1296
1297 /* If we're looking for a constant, try to make the conditional
1298 have that constant in it. There are two reasons why it may
1299 not have the constant we want:
1300
1301 1. GCC may have needed to put the constant in a register, because
1302 the target can't compare directly against that constant. For
1303 this case, we look for a SET immediately before the comparison
1304 that puts a constant in that register.
1305
1306 2. GCC may have canonicalized the conditional, for example
1307 replacing "if x < 4" with "if x <= 3". We can undo that (or
1308 make equivalent types of changes) to get the constants we need
1309 if they're off by one in the right direction. */
1310
1311 if (GET_CODE (target) == CONST_INT)
1312 {
1313 enum rtx_code code = GET_CODE (if_info->cond);
1314 rtx op_a = XEXP (if_info->cond, 0);
1315 rtx op_b = XEXP (if_info->cond, 1);
1316 rtx prev_insn;
1317
1318 /* First, look to see if we put a constant in a register. */
1319 prev_insn = PREV_INSN (if_info->cond_earliest);
1320 if (prev_insn
1321 && INSN_P (prev_insn)
1322 && GET_CODE (PATTERN (prev_insn)) == SET)
1323 {
1324 rtx src = find_reg_equal_equiv_note (prev_insn);
1325 if (!src)
1326 src = SET_SRC (PATTERN (prev_insn));
1327 if (GET_CODE (src) == CONST_INT)
1328 {
1329 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1330 op_a = src;
1331 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1332 op_b = src;
1333
1334 if (GET_CODE (op_a) == CONST_INT)
1335 {
1336 rtx tmp = op_a;
1337 op_a = op_b;
1338 op_b = tmp;
1339 code = swap_condition (code);
1340 }
1341 }
1342 }
1343
1344 /* Now, look to see if we can get the right constant by
1345 adjusting the conditional. */
1346 if (GET_CODE (op_b) == CONST_INT)
1347 {
1348 HOST_WIDE_INT desired_val = INTVAL (target);
1349 HOST_WIDE_INT actual_val = INTVAL (op_b);
1350
1351 switch (code)
1352 {
1353 case LT:
1354 if (actual_val == desired_val + 1)
1355 {
1356 code = LE;
1357 op_b = GEN_INT (desired_val);
1358 }
1359 break;
1360 case LE:
1361 if (actual_val == desired_val - 1)
1362 {
1363 code = LT;
1364 op_b = GEN_INT (desired_val);
1365 }
1366 break;
1367 case GT:
1368 if (actual_val == desired_val - 1)
1369 {
1370 code = GE;
1371 op_b = GEN_INT (desired_val);
1372 }
1373 break;
1374 case GE:
1375 if (actual_val == desired_val + 1)
1376 {
1377 code = GT;
1378 op_b = GEN_INT (desired_val);
1379 }
1380 break;
1381 default:
1382 break;
1383 }
1384 }
1385
1386 /* If we made any changes, generate a new conditional that is
1387 equivalent to what we started with, but has the right
1388 constants in it. */
1389 if (code != GET_CODE (if_info->cond)
1390 || op_a != XEXP (if_info->cond, 0)
1391 || op_b != XEXP (if_info->cond, 1))
1392 {
1393 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1394 *earliest = if_info->cond_earliest;
1395 return cond;
1396 }
1397 }
1398
1399 cond = canonicalize_condition (if_info->jump, cond, reverse,
1400 earliest, target, false);
1401 if (! cond || ! reg_mentioned_p (target, cond))
1402 return NULL;
1403
1404 /* We almost certainly searched back to a different place.
1405 Need to re-verify correct lifetimes. */
1406
1407 /* X may not be mentioned in the range (cond_earliest, jump]. */
1408 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1409 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1410 return NULL;
1411
1412 /* A and B may not be modified in the range [cond_earliest, jump). */
1413 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1414 if (INSN_P (insn)
1415 && (modified_in_p (if_info->a, insn)
1416 || modified_in_p (if_info->b, insn)))
1417 return NULL;
1418
1419 return cond;
1420 }
1421
1422 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1423
1424 static int
1425 noce_try_minmax (struct noce_if_info *if_info)
1426 {
1427 rtx cond, earliest, target, seq;
1428 enum rtx_code code, op;
1429 int unsignedp;
1430
1431 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1432 if (no_new_pseudos)
1433 return FALSE;
1434
1435 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1436 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1437 to get the target to tell us... */
1438 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1439 || HONOR_NANS (GET_MODE (if_info->x)))
1440 return FALSE;
1441
1442 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1443 if (!cond)
1444 return FALSE;
1445
1446 /* Verify the condition is of the form we expect, and canonicalize
1447 the comparison code. */
1448 code = GET_CODE (cond);
1449 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1450 {
1451 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1452 return FALSE;
1453 }
1454 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1455 {
1456 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1457 return FALSE;
1458 code = swap_condition (code);
1459 }
1460 else
1461 return FALSE;
1462
1463 /* Determine what sort of operation this is. Note that the code is for
1464 a taken branch, so the code->operation mapping appears backwards. */
1465 switch (code)
1466 {
1467 case LT:
1468 case LE:
1469 case UNLT:
1470 case UNLE:
1471 op = SMAX;
1472 unsignedp = 0;
1473 break;
1474 case GT:
1475 case GE:
1476 case UNGT:
1477 case UNGE:
1478 op = SMIN;
1479 unsignedp = 0;
1480 break;
1481 case LTU:
1482 case LEU:
1483 op = UMAX;
1484 unsignedp = 1;
1485 break;
1486 case GTU:
1487 case GEU:
1488 op = UMIN;
1489 unsignedp = 1;
1490 break;
1491 default:
1492 return FALSE;
1493 }
1494
1495 start_sequence ();
1496
1497 target = expand_simple_binop (GET_MODE (if_info->x), op,
1498 if_info->a, if_info->b,
1499 if_info->x, unsignedp, OPTAB_WIDEN);
1500 if (! target)
1501 {
1502 end_sequence ();
1503 return FALSE;
1504 }
1505 if (target != if_info->x)
1506 noce_emit_move_insn (if_info->x, target);
1507
1508 seq = get_insns ();
1509 end_sequence ();
1510
1511 if (seq_contains_jump (seq))
1512 return FALSE;
1513
1514 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1515 if_info->cond = cond;
1516 if_info->cond_earliest = earliest;
1517
1518 return TRUE;
1519 }
1520
1521 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1522
1523 static int
1524 noce_try_abs (struct noce_if_info *if_info)
1525 {
1526 rtx cond, earliest, target, seq, a, b, c;
1527 int negate;
1528
1529 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1530 if (no_new_pseudos)
1531 return FALSE;
1532
1533 /* Recognize A and B as constituting an ABS or NABS. */
1534 a = if_info->a;
1535 b = if_info->b;
1536 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1537 negate = 0;
1538 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1539 {
1540 c = a; a = b; b = c;
1541 negate = 1;
1542 }
1543 else
1544 return FALSE;
1545
1546 cond = noce_get_alt_condition (if_info, b, &earliest);
1547 if (!cond)
1548 return FALSE;
1549
1550 /* Verify the condition is of the form we expect. */
1551 if (rtx_equal_p (XEXP (cond, 0), b))
1552 c = XEXP (cond, 1);
1553 else if (rtx_equal_p (XEXP (cond, 1), b))
1554 c = XEXP (cond, 0);
1555 else
1556 return FALSE;
1557
1558 /* Verify that C is zero. Search backward through the block for
1559 a REG_EQUAL note if necessary. */
1560 if (REG_P (c))
1561 {
1562 rtx insn, note = NULL;
1563 for (insn = earliest;
1564 insn != if_info->test_bb->head;
1565 insn = PREV_INSN (insn))
1566 if (INSN_P (insn)
1567 && ((note = find_reg_note (insn, REG_EQUAL, c))
1568 || (note = find_reg_note (insn, REG_EQUIV, c))))
1569 break;
1570 if (! note)
1571 return FALSE;
1572 c = XEXP (note, 0);
1573 }
1574 if (GET_CODE (c) == MEM
1575 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1576 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1577 c = get_pool_constant (XEXP (c, 0));
1578
1579 /* Work around funny ideas get_condition has wrt canonicalization.
1580 Note that these rtx constants are known to be CONST_INT, and
1581 therefore imply integer comparisons. */
1582 if (c == constm1_rtx && GET_CODE (cond) == GT)
1583 ;
1584 else if (c == const1_rtx && GET_CODE (cond) == LT)
1585 ;
1586 else if (c != CONST0_RTX (GET_MODE (b)))
1587 return FALSE;
1588
1589 /* Determine what sort of operation this is. */
1590 switch (GET_CODE (cond))
1591 {
1592 case LT:
1593 case LE:
1594 case UNLT:
1595 case UNLE:
1596 negate = !negate;
1597 break;
1598 case GT:
1599 case GE:
1600 case UNGT:
1601 case UNGE:
1602 break;
1603 default:
1604 return FALSE;
1605 }
1606
1607 start_sequence ();
1608
1609 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1610
1611 /* ??? It's a quandry whether cmove would be better here, especially
1612 for integers. Perhaps combine will clean things up. */
1613 if (target && negate)
1614 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1615
1616 if (! target)
1617 {
1618 end_sequence ();
1619 return FALSE;
1620 }
1621
1622 if (target != if_info->x)
1623 noce_emit_move_insn (if_info->x, target);
1624
1625 seq = get_insns ();
1626 end_sequence ();
1627
1628 if (seq_contains_jump (seq))
1629 return FALSE;
1630
1631 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1632 if_info->cond = cond;
1633 if_info->cond_earliest = earliest;
1634
1635 return TRUE;
1636 }
1637
1638 /* Similar to get_condition, only the resulting condition must be
1639 valid at JUMP, instead of at EARLIEST. */
1640
1641 static rtx
1642 noce_get_condition (rtx jump, rtx *earliest)
1643 {
1644 rtx cond, set, tmp, insn;
1645 bool reverse;
1646
1647 if (! any_condjump_p (jump))
1648 return NULL_RTX;
1649
1650 set = pc_set (jump);
1651
1652 /* If this branches to JUMP_LABEL when the condition is false,
1653 reverse the condition. */
1654 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1655 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1656
1657 /* If the condition variable is a register and is MODE_INT, accept it. */
1658
1659 cond = XEXP (SET_SRC (set), 0);
1660 tmp = XEXP (cond, 0);
1661 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1662 {
1663 *earliest = jump;
1664
1665 if (reverse)
1666 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1667 GET_MODE (cond), tmp, XEXP (cond, 1));
1668 return cond;
1669 }
1670
1671 /* Otherwise, fall back on canonicalize_condition to do the dirty
1672 work of manipulating MODE_CC values and COMPARE rtx codes. */
1673
1674 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
1675 false);
1676 if (!tmp)
1677 return NULL_RTX;
1678
1679 /* We are going to insert code before JUMP, not before EARLIEST.
1680 We must therefore be certain that the given condition is valid
1681 at JUMP by virtue of not having been modified since. */
1682 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1683 if (INSN_P (insn) && modified_in_p (tmp, insn))
1684 break;
1685 if (insn == jump)
1686 return tmp;
1687
1688 /* The condition was modified. See if we can get a partial result
1689 that doesn't follow all the reversals. Perhaps combine can fold
1690 them together later. */
1691 tmp = XEXP (tmp, 0);
1692 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1693 return NULL_RTX;
1694 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp,
1695 false);
1696 if (!tmp)
1697 return NULL_RTX;
1698
1699 /* For sanity's sake, re-validate the new result. */
1700 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1701 if (INSN_P (insn) && modified_in_p (tmp, insn))
1702 return NULL_RTX;
1703
1704 return tmp;
1705 }
1706
1707 /* Return true if OP is ok for if-then-else processing. */
1708
1709 static int
1710 noce_operand_ok (rtx op)
1711 {
1712 /* We special-case memories, so handle any of them with
1713 no address side effects. */
1714 if (GET_CODE (op) == MEM)
1715 return ! side_effects_p (XEXP (op, 0));
1716
1717 if (side_effects_p (op))
1718 return FALSE;
1719
1720 return ! may_trap_p (op);
1721 }
1722
1723 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1724 without using conditional execution. Return TRUE if we were
1725 successful at converting the block. */
1726
1727 static int
1728 noce_process_if_block (struct ce_if_block * ce_info)
1729 {
1730 basic_block test_bb = ce_info->test_bb; /* test block */
1731 basic_block then_bb = ce_info->then_bb; /* THEN */
1732 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1733 struct noce_if_info if_info;
1734 rtx insn_a, insn_b;
1735 rtx set_a, set_b;
1736 rtx orig_x, x, a, b;
1737 rtx jump, cond;
1738
1739 /* We're looking for patterns of the form
1740
1741 (1) if (...) x = a; else x = b;
1742 (2) x = b; if (...) x = a;
1743 (3) if (...) x = a; // as if with an initial x = x.
1744
1745 The later patterns require jumps to be more expensive.
1746
1747 ??? For future expansion, look for multiple X in such patterns. */
1748
1749 /* If test is comprised of && or || elements, don't handle it unless it is
1750 the special case of && elements without an ELSE block. */
1751 if (ce_info->num_multiple_test_blocks)
1752 {
1753 if (else_bb || ! ce_info->and_and_p)
1754 return FALSE;
1755
1756 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1757 ce_info->num_multiple_test_blocks = 0;
1758 ce_info->num_and_and_blocks = 0;
1759 ce_info->num_or_or_blocks = 0;
1760 }
1761
1762 /* If this is not a standard conditional jump, we can't parse it. */
1763 jump = test_bb->end;
1764 cond = noce_get_condition (jump, &if_info.cond_earliest);
1765 if (! cond)
1766 return FALSE;
1767
1768 /* If the conditional jump is more than just a conditional
1769 jump, then we can not do if-conversion on this block. */
1770 if (! onlyjump_p (jump))
1771 return FALSE;
1772
1773 /* We must be comparing objects whose modes imply the size. */
1774 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1775 return FALSE;
1776
1777 /* Look for one of the potential sets. */
1778 insn_a = first_active_insn (then_bb);
1779 if (! insn_a
1780 || insn_a != last_active_insn (then_bb, FALSE)
1781 || (set_a = single_set (insn_a)) == NULL_RTX)
1782 return FALSE;
1783
1784 x = SET_DEST (set_a);
1785 a = SET_SRC (set_a);
1786
1787 /* Look for the other potential set. Make sure we've got equivalent
1788 destinations. */
1789 /* ??? This is overconservative. Storing to two different mems is
1790 as easy as conditionally computing the address. Storing to a
1791 single mem merely requires a scratch memory to use as one of the
1792 destination addresses; often the memory immediately below the
1793 stack pointer is available for this. */
1794 set_b = NULL_RTX;
1795 if (else_bb)
1796 {
1797 insn_b = first_active_insn (else_bb);
1798 if (! insn_b
1799 || insn_b != last_active_insn (else_bb, FALSE)
1800 || (set_b = single_set (insn_b)) == NULL_RTX
1801 || ! rtx_equal_p (x, SET_DEST (set_b)))
1802 return FALSE;
1803 }
1804 else
1805 {
1806 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1807 /* We're going to be moving the evaluation of B down from above
1808 COND_EARLIEST to JUMP. Make sure the relevant data is still
1809 intact. */
1810 if (! insn_b
1811 || GET_CODE (insn_b) != INSN
1812 || (set_b = single_set (insn_b)) == NULL_RTX
1813 || ! rtx_equal_p (x, SET_DEST (set_b))
1814 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1815 || modified_between_p (SET_SRC (set_b),
1816 PREV_INSN (if_info.cond_earliest), jump)
1817 /* Likewise with X. In particular this can happen when
1818 noce_get_condition looks farther back in the instruction
1819 stream than one might expect. */
1820 || reg_overlap_mentioned_p (x, cond)
1821 || reg_overlap_mentioned_p (x, a)
1822 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1823 insn_b = set_b = NULL_RTX;
1824 }
1825
1826 /* If x has side effects then only the if-then-else form is safe to
1827 convert. But even in that case we would need to restore any notes
1828 (such as REG_INC) at then end. That can be tricky if
1829 noce_emit_move_insn expands to more than one insn, so disable the
1830 optimization entirely for now if there are side effects. */
1831 if (side_effects_p (x))
1832 return FALSE;
1833
1834 b = (set_b ? SET_SRC (set_b) : x);
1835
1836 /* Only operate on register destinations, and even then avoid extending
1837 the lifetime of hard registers on small register class machines. */
1838 orig_x = x;
1839 if (GET_CODE (x) != REG
1840 || (SMALL_REGISTER_CLASSES
1841 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1842 {
1843 if (no_new_pseudos || GET_MODE (x) == BLKmode)
1844 return FALSE;
1845 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1846 ? XEXP (x, 0) : x));
1847 }
1848
1849 /* Don't operate on sources that may trap or are volatile. */
1850 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1851 return FALSE;
1852
1853 /* Set up the info block for our subroutines. */
1854 if_info.test_bb = test_bb;
1855 if_info.cond = cond;
1856 if_info.jump = jump;
1857 if_info.insn_a = insn_a;
1858 if_info.insn_b = insn_b;
1859 if_info.x = x;
1860 if_info.a = a;
1861 if_info.b = b;
1862
1863 /* Try optimizations in some approximation of a useful order. */
1864 /* ??? Should first look to see if X is live incoming at all. If it
1865 isn't, we don't need anything but an unconditional set. */
1866
1867 /* Look and see if A and B are really the same. Avoid creating silly
1868 cmove constructs that no one will fix up later. */
1869 if (rtx_equal_p (a, b))
1870 {
1871 /* If we have an INSN_B, we don't have to create any new rtl. Just
1872 move the instruction that we already have. If we don't have an
1873 INSN_B, that means that A == X, and we've got a noop move. In
1874 that case don't do anything and let the code below delete INSN_A. */
1875 if (insn_b && else_bb)
1876 {
1877 rtx note;
1878
1879 if (else_bb && insn_b == else_bb->end)
1880 else_bb->end = PREV_INSN (insn_b);
1881 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
1882
1883 /* If there was a REG_EQUAL note, delete it since it may have been
1884 true due to this insn being after a jump. */
1885 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1886 remove_note (insn_b, note);
1887
1888 insn_b = NULL_RTX;
1889 }
1890 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1891 x must be executed twice. */
1892 else if (insn_b && side_effects_p (orig_x))
1893 return FALSE;
1894
1895 x = orig_x;
1896 goto success;
1897 }
1898
1899 if (noce_try_store_flag (&if_info))
1900 goto success;
1901 if (noce_try_minmax (&if_info))
1902 goto success;
1903 if (noce_try_abs (&if_info))
1904 goto success;
1905 if (HAVE_conditional_move
1906 && noce_try_cmove (&if_info))
1907 goto success;
1908 if (! HAVE_conditional_execution)
1909 {
1910 if (noce_try_store_flag_constants (&if_info))
1911 goto success;
1912 if (noce_try_addcc (&if_info))
1913 goto success;
1914 if (noce_try_store_flag_mask (&if_info))
1915 goto success;
1916 if (HAVE_conditional_move
1917 && noce_try_cmove_arith (&if_info))
1918 goto success;
1919 }
1920
1921 return FALSE;
1922
1923 success:
1924 /* The original sets may now be killed. */
1925 delete_insn (insn_a);
1926
1927 /* Several special cases here: First, we may have reused insn_b above,
1928 in which case insn_b is now NULL. Second, we want to delete insn_b
1929 if it came from the ELSE block, because follows the now correct
1930 write that appears in the TEST block. However, if we got insn_b from
1931 the TEST block, it may in fact be loading data needed for the comparison.
1932 We'll let life_analysis remove the insn if it's really dead. */
1933 if (insn_b && else_bb)
1934 delete_insn (insn_b);
1935
1936 /* The new insns will have been inserted immediately before the jump. We
1937 should be able to remove the jump with impunity, but the condition itself
1938 may have been modified by gcse to be shared across basic blocks. */
1939 delete_insn (jump);
1940
1941 /* If we used a temporary, fix it up now. */
1942 if (orig_x != x)
1943 {
1944 start_sequence ();
1945 noce_emit_move_insn (copy_rtx (orig_x), x);
1946 insn_b = get_insns ();
1947 end_sequence ();
1948
1949 emit_insn_after_setloc (insn_b, test_bb->end, INSN_LOCATOR (insn_a));
1950 }
1951
1952 /* Merge the blocks! */
1953 merge_if_block (ce_info);
1954
1955 return TRUE;
1956 }
1957 \f
1958 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1959 straight line code. Return true if successful. */
1960
1961 static int
1962 process_if_block (struct ce_if_block * ce_info)
1963 {
1964 if (! reload_completed
1965 && noce_process_if_block (ce_info))
1966 return TRUE;
1967
1968 if (HAVE_conditional_execution && reload_completed)
1969 {
1970 /* If we have && and || tests, try to first handle combining the && and
1971 || tests into the conditional code, and if that fails, go back and
1972 handle it without the && and ||, which at present handles the && case
1973 if there was no ELSE block. */
1974 if (cond_exec_process_if_block (ce_info, TRUE))
1975 return TRUE;
1976
1977 if (ce_info->num_multiple_test_blocks)
1978 {
1979 cancel_changes (0);
1980
1981 if (cond_exec_process_if_block (ce_info, FALSE))
1982 return TRUE;
1983 }
1984 }
1985
1986 return FALSE;
1987 }
1988
1989 /* Merge the blocks and mark for local life update. */
1990
1991 static void
1992 merge_if_block (struct ce_if_block * ce_info)
1993 {
1994 basic_block test_bb = ce_info->test_bb; /* last test block */
1995 basic_block then_bb = ce_info->then_bb; /* THEN */
1996 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1997 basic_block join_bb = ce_info->join_bb; /* join block */
1998 basic_block combo_bb;
1999
2000 /* All block merging is done into the lower block numbers. */
2001
2002 combo_bb = test_bb;
2003
2004 /* Merge any basic blocks to handle && and || subtests. Each of
2005 the blocks are on the fallthru path from the predecessor block. */
2006 if (ce_info->num_multiple_test_blocks > 0)
2007 {
2008 basic_block bb = test_bb;
2009 basic_block last_test_bb = ce_info->last_test_bb;
2010 basic_block fallthru = block_fallthru (bb);
2011
2012 do
2013 {
2014 bb = fallthru;
2015 fallthru = block_fallthru (bb);
2016 if (post_dominators)
2017 delete_from_dominance_info (post_dominators, bb);
2018 merge_blocks (combo_bb, bb);
2019 num_removed_blocks++;
2020 }
2021 while (bb != last_test_bb);
2022 }
2023
2024 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2025 label, but it might if there were || tests. That label's count should be
2026 zero, and it normally should be removed. */
2027
2028 if (then_bb)
2029 {
2030 if (combo_bb->global_live_at_end)
2031 COPY_REG_SET (combo_bb->global_live_at_end,
2032 then_bb->global_live_at_end);
2033 if (post_dominators)
2034 delete_from_dominance_info (post_dominators, then_bb);
2035 merge_blocks (combo_bb, then_bb);
2036 num_removed_blocks++;
2037 }
2038
2039 /* The ELSE block, if it existed, had a label. That label count
2040 will almost always be zero, but odd things can happen when labels
2041 get their addresses taken. */
2042 if (else_bb)
2043 {
2044 if (post_dominators)
2045 delete_from_dominance_info (post_dominators, else_bb);
2046 merge_blocks (combo_bb, else_bb);
2047 num_removed_blocks++;
2048 }
2049
2050 /* If there was no join block reported, that means it was not adjacent
2051 to the others, and so we cannot merge them. */
2052
2053 if (! join_bb)
2054 {
2055 rtx last = combo_bb->end;
2056
2057 /* The outgoing edge for the current COMBO block should already
2058 be correct. Verify this. */
2059 if (combo_bb->succ == NULL_EDGE)
2060 {
2061 if (find_reg_note (last, REG_NORETURN, NULL))
2062 ;
2063 else if (GET_CODE (last) == INSN
2064 && GET_CODE (PATTERN (last)) == TRAP_IF
2065 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2066 ;
2067 else
2068 abort ();
2069 }
2070
2071 /* There should still be something at the end of the THEN or ELSE
2072 blocks taking us to our final destination. */
2073 else if (GET_CODE (last) == JUMP_INSN)
2074 ;
2075 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2076 && GET_CODE (last) == CALL_INSN
2077 && SIBLING_CALL_P (last))
2078 ;
2079 else if ((combo_bb->succ->flags & EDGE_EH)
2080 && can_throw_internal (last))
2081 ;
2082 else
2083 abort ();
2084 }
2085
2086 /* The JOIN block may have had quite a number of other predecessors too.
2087 Since we've already merged the TEST, THEN and ELSE blocks, we should
2088 have only one remaining edge from our if-then-else diamond. If there
2089 is more than one remaining edge, it must come from elsewhere. There
2090 may be zero incoming edges if the THEN block didn't actually join
2091 back up (as with a call to abort). */
2092 else if ((join_bb->pred == NULL
2093 || join_bb->pred->pred_next == NULL)
2094 && join_bb != EXIT_BLOCK_PTR)
2095 {
2096 /* We can merge the JOIN. */
2097 if (combo_bb->global_live_at_end)
2098 COPY_REG_SET (combo_bb->global_live_at_end,
2099 join_bb->global_live_at_end);
2100
2101 if (post_dominators)
2102 delete_from_dominance_info (post_dominators, join_bb);
2103 merge_blocks (combo_bb, join_bb);
2104 num_removed_blocks++;
2105 }
2106 else
2107 {
2108 /* We cannot merge the JOIN. */
2109
2110 /* The outgoing edge for the current COMBO block should already
2111 be correct. Verify this. */
2112 if (combo_bb->succ->succ_next != NULL_EDGE
2113 || combo_bb->succ->dest != join_bb)
2114 abort ();
2115
2116 /* Remove the jump and cruft from the end of the COMBO block. */
2117 if (join_bb != EXIT_BLOCK_PTR)
2118 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2119 }
2120
2121 num_updated_if_blocks++;
2122 }
2123 \f
2124 /* Find a block ending in a simple IF condition and try to transform it
2125 in some way. When converting a multi-block condition, put the new code
2126 in the first such block and delete the rest. Return a pointer to this
2127 first block if some transformation was done. Return NULL otherwise. */
2128
2129 static basic_block
2130 find_if_header (basic_block test_bb, int pass)
2131 {
2132 ce_if_block_t ce_info;
2133 edge then_edge;
2134 edge else_edge;
2135
2136 /* The kind of block we're looking for has exactly two successors. */
2137 if ((then_edge = test_bb->succ) == NULL_EDGE
2138 || (else_edge = then_edge->succ_next) == NULL_EDGE
2139 || else_edge->succ_next != NULL_EDGE)
2140 return NULL;
2141
2142 /* Neither edge should be abnormal. */
2143 if ((then_edge->flags & EDGE_COMPLEX)
2144 || (else_edge->flags & EDGE_COMPLEX))
2145 return NULL;
2146
2147 /* Nor exit the loop. */
2148 if ((then_edge->flags & EDGE_LOOP_EXIT)
2149 || (else_edge->flags & EDGE_LOOP_EXIT))
2150 return NULL;
2151
2152 /* The THEN edge is canonically the one that falls through. */
2153 if (then_edge->flags & EDGE_FALLTHRU)
2154 ;
2155 else if (else_edge->flags & EDGE_FALLTHRU)
2156 {
2157 edge e = else_edge;
2158 else_edge = then_edge;
2159 then_edge = e;
2160 }
2161 else
2162 /* Otherwise this must be a multiway branch of some sort. */
2163 return NULL;
2164
2165 memset (&ce_info, '\0', sizeof (ce_info));
2166 ce_info.test_bb = test_bb;
2167 ce_info.then_bb = then_edge->dest;
2168 ce_info.else_bb = else_edge->dest;
2169 ce_info.pass = pass;
2170
2171 #ifdef IFCVT_INIT_EXTRA_FIELDS
2172 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2173 #endif
2174
2175 if (find_if_block (&ce_info))
2176 goto success;
2177
2178 if (HAVE_trap && HAVE_conditional_trap
2179 && find_cond_trap (test_bb, then_edge, else_edge))
2180 goto success;
2181
2182 if (post_dominators
2183 && (! HAVE_conditional_execution || reload_completed))
2184 {
2185 if (find_if_case_1 (test_bb, then_edge, else_edge))
2186 goto success;
2187 if (find_if_case_2 (test_bb, then_edge, else_edge))
2188 goto success;
2189 }
2190
2191 return NULL;
2192
2193 success:
2194 if (rtl_dump_file)
2195 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2196 return ce_info.test_bb;
2197 }
2198
2199 /* Return true if a block has two edges, one of which falls through to the next
2200 block, and the other jumps to a specific block, so that we can tell if the
2201 block is part of an && test or an || test. Returns either -1 or the number
2202 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2203
2204 static int
2205 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2206 {
2207 edge cur_edge;
2208 int fallthru_p = FALSE;
2209 int jump_p = FALSE;
2210 rtx insn;
2211 rtx end;
2212 int n_insns = 0;
2213
2214 if (!cur_bb || !target_bb)
2215 return -1;
2216
2217 /* If no edges, obviously it doesn't jump or fallthru. */
2218 if (cur_bb->succ == NULL_EDGE)
2219 return FALSE;
2220
2221 for (cur_edge = cur_bb->succ;
2222 cur_edge != NULL_EDGE;
2223 cur_edge = cur_edge->succ_next)
2224 {
2225 if (cur_edge->flags & EDGE_COMPLEX)
2226 /* Anything complex isn't what we want. */
2227 return -1;
2228
2229 else if (cur_edge->flags & EDGE_FALLTHRU)
2230 fallthru_p = TRUE;
2231
2232 else if (cur_edge->dest == target_bb)
2233 jump_p = TRUE;
2234
2235 else
2236 return -1;
2237 }
2238
2239 if ((jump_p & fallthru_p) == 0)
2240 return -1;
2241
2242 /* Don't allow calls in the block, since this is used to group && and ||
2243 together for conditional execution support. ??? we should support
2244 conditional execution support across calls for IA-64 some day, but
2245 for now it makes the code simpler. */
2246 end = cur_bb->end;
2247 insn = cur_bb->head;
2248
2249 while (insn != NULL_RTX)
2250 {
2251 if (GET_CODE (insn) == CALL_INSN)
2252 return -1;
2253
2254 if (INSN_P (insn)
2255 && GET_CODE (insn) != JUMP_INSN
2256 && GET_CODE (PATTERN (insn)) != USE
2257 && GET_CODE (PATTERN (insn)) != CLOBBER)
2258 n_insns++;
2259
2260 if (insn == end)
2261 break;
2262
2263 insn = NEXT_INSN (insn);
2264 }
2265
2266 return n_insns;
2267 }
2268
2269 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2270 block. If so, we'll try to convert the insns to not require the branch.
2271 Return TRUE if we were successful at converting the block. */
2272
2273 static int
2274 find_if_block (struct ce_if_block * ce_info)
2275 {
2276 basic_block test_bb = ce_info->test_bb;
2277 basic_block then_bb = ce_info->then_bb;
2278 basic_block else_bb = ce_info->else_bb;
2279 basic_block join_bb = NULL_BLOCK;
2280 edge then_succ = then_bb->succ;
2281 edge else_succ = else_bb->succ;
2282 int then_predecessors;
2283 int else_predecessors;
2284 edge cur_edge;
2285 basic_block next;
2286
2287 ce_info->last_test_bb = test_bb;
2288
2289 /* Discover if any fall through predecessors of the current test basic block
2290 were && tests (which jump to the else block) or || tests (which jump to
2291 the then block). */
2292 if (HAVE_conditional_execution && reload_completed
2293 && test_bb->pred != NULL_EDGE
2294 && test_bb->pred->pred_next == NULL_EDGE
2295 && test_bb->pred->flags == EDGE_FALLTHRU)
2296 {
2297 basic_block bb = test_bb->pred->src;
2298 basic_block target_bb;
2299 int max_insns = MAX_CONDITIONAL_EXECUTE;
2300 int n_insns;
2301
2302 /* Determine if the preceding block is an && or || block. */
2303 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2304 {
2305 ce_info->and_and_p = TRUE;
2306 target_bb = else_bb;
2307 }
2308 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2309 {
2310 ce_info->and_and_p = FALSE;
2311 target_bb = then_bb;
2312 }
2313 else
2314 target_bb = NULL_BLOCK;
2315
2316 if (target_bb && n_insns <= max_insns)
2317 {
2318 int total_insns = 0;
2319 int blocks = 0;
2320
2321 ce_info->last_test_bb = test_bb;
2322
2323 /* Found at least one && or || block, look for more. */
2324 do
2325 {
2326 ce_info->test_bb = test_bb = bb;
2327 total_insns += n_insns;
2328 blocks++;
2329
2330 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2331 break;
2332
2333 bb = bb->pred->src;
2334 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2335 }
2336 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2337
2338 ce_info->num_multiple_test_blocks = blocks;
2339 ce_info->num_multiple_test_insns = total_insns;
2340
2341 if (ce_info->and_and_p)
2342 ce_info->num_and_and_blocks = blocks;
2343 else
2344 ce_info->num_or_or_blocks = blocks;
2345 }
2346 }
2347
2348 /* Count the number of edges the THEN and ELSE blocks have. */
2349 then_predecessors = 0;
2350 for (cur_edge = then_bb->pred;
2351 cur_edge != NULL_EDGE;
2352 cur_edge = cur_edge->pred_next)
2353 {
2354 then_predecessors++;
2355 if (cur_edge->flags & EDGE_COMPLEX)
2356 return FALSE;
2357 }
2358
2359 else_predecessors = 0;
2360 for (cur_edge = else_bb->pred;
2361 cur_edge != NULL_EDGE;
2362 cur_edge = cur_edge->pred_next)
2363 {
2364 else_predecessors++;
2365 if (cur_edge->flags & EDGE_COMPLEX)
2366 return FALSE;
2367 }
2368
2369 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2370 other than any || blocks which jump to the THEN block. */
2371 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2372 return FALSE;
2373
2374 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2375 if (then_succ != NULL_EDGE
2376 && (then_succ->succ_next != NULL_EDGE
2377 || (then_succ->flags & EDGE_COMPLEX)
2378 || (flow2_completed && tablejump_p (then_bb->end, NULL, NULL))))
2379 return FALSE;
2380
2381 /* If the THEN block has no successors, conditional execution can still
2382 make a conditional call. Don't do this unless the ELSE block has
2383 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2384 Check for the last insn of the THEN block being an indirect jump, which
2385 is listed as not having any successors, but confuses the rest of the CE
2386 code processing. ??? we should fix this in the future. */
2387 if (then_succ == NULL)
2388 {
2389 if (else_bb->pred->pred_next == NULL_EDGE)
2390 {
2391 rtx last_insn = then_bb->end;
2392
2393 while (last_insn
2394 && GET_CODE (last_insn) == NOTE
2395 && last_insn != then_bb->head)
2396 last_insn = PREV_INSN (last_insn);
2397
2398 if (last_insn
2399 && GET_CODE (last_insn) == JUMP_INSN
2400 && ! simplejump_p (last_insn))
2401 return FALSE;
2402
2403 join_bb = else_bb;
2404 else_bb = NULL_BLOCK;
2405 }
2406 else
2407 return FALSE;
2408 }
2409
2410 /* If the THEN block's successor is the other edge out of the TEST block,
2411 then we have an IF-THEN combo without an ELSE. */
2412 else if (then_succ->dest == else_bb)
2413 {
2414 join_bb = else_bb;
2415 else_bb = NULL_BLOCK;
2416 }
2417
2418 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2419 has exactly one predecessor and one successor, and the outgoing edge
2420 is not complex, then we have an IF-THEN-ELSE combo. */
2421 else if (else_succ != NULL_EDGE
2422 && then_succ->dest == else_succ->dest
2423 && else_bb->pred->pred_next == NULL_EDGE
2424 && else_succ->succ_next == NULL_EDGE
2425 && ! (else_succ->flags & EDGE_COMPLEX)
2426 && ! (flow2_completed && tablejump_p (else_bb->end, NULL, NULL)))
2427 join_bb = else_succ->dest;
2428
2429 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2430 else
2431 return FALSE;
2432
2433 num_possible_if_blocks++;
2434
2435 if (rtl_dump_file)
2436 {
2437 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2438 (else_bb) ? "-ELSE" : "",
2439 ce_info->pass,
2440 test_bb->index, (test_bb->head) ? (int)INSN_UID (test_bb->head) : -1,
2441 then_bb->index, (then_bb->head) ? (int)INSN_UID (then_bb->head) : -1);
2442
2443 if (else_bb)
2444 fprintf (rtl_dump_file, ", else %d [%d]",
2445 else_bb->index, (else_bb->head) ? (int)INSN_UID (else_bb->head) : -1);
2446
2447 fprintf (rtl_dump_file, ", join %d [%d]",
2448 join_bb->index, (join_bb->head) ? (int)INSN_UID (join_bb->head) : -1);
2449
2450 if (ce_info->num_multiple_test_blocks > 0)
2451 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2452 ce_info->num_multiple_test_blocks,
2453 (ce_info->and_and_p) ? "&&" : "||",
2454 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2455 ce_info->last_test_bb->index,
2456 ((ce_info->last_test_bb->head)
2457 ? (int)INSN_UID (ce_info->last_test_bb->head)
2458 : -1));
2459
2460 fputc ('\n', rtl_dump_file);
2461 }
2462
2463 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2464 first condition for free, since we've already asserted that there's a
2465 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2466 we checked the FALLTHRU flag, those are already adjacent to the last IF
2467 block. */
2468 /* ??? As an enhancement, move the ELSE block. Have to deal with
2469 BLOCK notes, if by no other means than aborting the merge if they
2470 exist. Sticky enough I don't want to think about it now. */
2471 next = then_bb;
2472 if (else_bb && (next = next->next_bb) != else_bb)
2473 return FALSE;
2474 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2475 {
2476 if (else_bb)
2477 join_bb = NULL;
2478 else
2479 return FALSE;
2480 }
2481
2482 /* Do the real work. */
2483 ce_info->else_bb = else_bb;
2484 ce_info->join_bb = join_bb;
2485
2486 return process_if_block (ce_info);
2487 }
2488
2489 /* Convert a branch over a trap, or a branch
2490 to a trap, into a conditional trap. */
2491
2492 static int
2493 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2494 {
2495 basic_block then_bb = then_edge->dest;
2496 basic_block else_bb = else_edge->dest;
2497 basic_block other_bb, trap_bb;
2498 rtx trap, jump, cond, cond_earliest, seq;
2499 enum rtx_code code;
2500
2501 /* Locate the block with the trap instruction. */
2502 /* ??? While we look for no successors, we really ought to allow
2503 EH successors. Need to fix merge_if_block for that to work. */
2504 if ((trap = block_has_only_trap (then_bb)) != NULL)
2505 trap_bb = then_bb, other_bb = else_bb;
2506 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2507 trap_bb = else_bb, other_bb = then_bb;
2508 else
2509 return FALSE;
2510
2511 if (rtl_dump_file)
2512 {
2513 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2514 test_bb->index, trap_bb->index);
2515 }
2516
2517 /* If this is not a standard conditional jump, we can't parse it. */
2518 jump = test_bb->end;
2519 cond = noce_get_condition (jump, &cond_earliest);
2520 if (! cond)
2521 return FALSE;
2522
2523 /* If the conditional jump is more than just a conditional jump, then
2524 we can not do if-conversion on this block. */
2525 if (! onlyjump_p (jump))
2526 return FALSE;
2527
2528 /* We must be comparing objects whose modes imply the size. */
2529 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2530 return FALSE;
2531
2532 /* Reverse the comparison code, if necessary. */
2533 code = GET_CODE (cond);
2534 if (then_bb == trap_bb)
2535 {
2536 code = reversed_comparison_code (cond, jump);
2537 if (code == UNKNOWN)
2538 return FALSE;
2539 }
2540
2541 /* Attempt to generate the conditional trap. */
2542 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2543 TRAP_CODE (PATTERN (trap)));
2544 if (seq == NULL)
2545 return FALSE;
2546
2547 /* Emit the new insns before cond_earliest. */
2548 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2549
2550 /* Delete the trap block if possible. */
2551 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2552 if (trap_bb->pred == NULL)
2553 {
2554 if (post_dominators)
2555 delete_from_dominance_info (post_dominators, trap_bb);
2556 delete_block (trap_bb);
2557 num_removed_blocks++;
2558 }
2559
2560 /* If the non-trap block and the test are now adjacent, merge them.
2561 Otherwise we must insert a direct branch. */
2562 if (test_bb->next_bb == other_bb)
2563 {
2564 struct ce_if_block new_ce_info;
2565 delete_insn (jump);
2566 memset (&new_ce_info, '\0', sizeof (new_ce_info));
2567 new_ce_info.test_bb = test_bb;
2568 new_ce_info.then_bb = NULL;
2569 new_ce_info.else_bb = NULL;
2570 new_ce_info.join_bb = other_bb;
2571 merge_if_block (&new_ce_info);
2572 }
2573 else
2574 {
2575 rtx lab, newjump;
2576
2577 lab = JUMP_LABEL (jump);
2578 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2579 LABEL_NUSES (lab) += 1;
2580 JUMP_LABEL (newjump) = lab;
2581 emit_barrier_after (newjump);
2582
2583 delete_insn (jump);
2584 }
2585
2586 return TRUE;
2587 }
2588
2589 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2590 return it. */
2591
2592 static rtx
2593 block_has_only_trap (basic_block bb)
2594 {
2595 rtx trap;
2596
2597 /* We're not the exit block. */
2598 if (bb == EXIT_BLOCK_PTR)
2599 return NULL_RTX;
2600
2601 /* The block must have no successors. */
2602 if (bb->succ)
2603 return NULL_RTX;
2604
2605 /* The only instruction in the THEN block must be the trap. */
2606 trap = first_active_insn (bb);
2607 if (! (trap == bb->end
2608 && GET_CODE (PATTERN (trap)) == TRAP_IF
2609 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2610 return NULL_RTX;
2611
2612 return trap;
2613 }
2614
2615 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2616 transformable, but not necessarily the other. There need be no
2617 JOIN block.
2618
2619 Return TRUE if we were successful at converting the block.
2620
2621 Cases we'd like to look at:
2622
2623 (1)
2624 if (test) goto over; // x not live
2625 x = a;
2626 goto label;
2627 over:
2628
2629 becomes
2630
2631 x = a;
2632 if (! test) goto label;
2633
2634 (2)
2635 if (test) goto E; // x not live
2636 x = big();
2637 goto L;
2638 E:
2639 x = b;
2640 goto M;
2641
2642 becomes
2643
2644 x = b;
2645 if (test) goto M;
2646 x = big();
2647 goto L;
2648
2649 (3) // This one's really only interesting for targets that can do
2650 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2651 // it results in multiple branches on a cache line, which often
2652 // does not sit well with predictors.
2653
2654 if (test1) goto E; // predicted not taken
2655 x = a;
2656 if (test2) goto F;
2657 ...
2658 E:
2659 x = b;
2660 J:
2661
2662 becomes
2663
2664 x = a;
2665 if (test1) goto E;
2666 if (test2) goto F;
2667
2668 Notes:
2669
2670 (A) Don't do (2) if the branch is predicted against the block we're
2671 eliminating. Do it anyway if we can eliminate a branch; this requires
2672 that the sole successor of the eliminated block postdominate the other
2673 side of the if.
2674
2675 (B) With CE, on (3) we can steal from both sides of the if, creating
2676
2677 if (test1) x = a;
2678 if (!test1) x = b;
2679 if (test1) goto J;
2680 if (test2) goto F;
2681 ...
2682 J:
2683
2684 Again, this is most useful if J postdominates.
2685
2686 (C) CE substitutes for helpful life information.
2687
2688 (D) These heuristics need a lot of work. */
2689
2690 /* Tests for case 1 above. */
2691
2692 static int
2693 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
2694 {
2695 basic_block then_bb = then_edge->dest;
2696 basic_block else_bb = else_edge->dest, new_bb;
2697 edge then_succ = then_bb->succ;
2698 int then_bb_index;
2699
2700 /* THEN has one successor. */
2701 if (!then_succ || then_succ->succ_next != NULL)
2702 return FALSE;
2703
2704 /* THEN does not fall through, but is not strange either. */
2705 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2706 return FALSE;
2707
2708 /* THEN has one predecessor. */
2709 if (then_bb->pred->pred_next != NULL)
2710 return FALSE;
2711
2712 /* THEN must do something. */
2713 if (forwarder_block_p (then_bb))
2714 return FALSE;
2715
2716 num_possible_if_blocks++;
2717 if (rtl_dump_file)
2718 fprintf (rtl_dump_file,
2719 "\nIF-CASE-1 found, start %d, then %d\n",
2720 test_bb->index, then_bb->index);
2721
2722 /* THEN is small. */
2723 if (count_bb_insns (then_bb) > BRANCH_COST)
2724 return FALSE;
2725
2726 /* Registers set are dead, or are predicable. */
2727 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2728 then_bb->succ->dest, 1))
2729 return FALSE;
2730
2731 /* Conversion went ok, including moving the insns and fixing up the
2732 jump. Adjust the CFG to match. */
2733
2734 bitmap_operation (test_bb->global_live_at_end,
2735 else_bb->global_live_at_start,
2736 then_bb->global_live_at_end, BITMAP_IOR);
2737
2738 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2739 then_bb_index = then_bb->index;
2740 if (post_dominators)
2741 delete_from_dominance_info (post_dominators, then_bb);
2742 delete_block (then_bb);
2743
2744 /* Make rest of code believe that the newly created block is the THEN_BB
2745 block we removed. */
2746 if (new_bb)
2747 {
2748 new_bb->index = then_bb_index;
2749 BASIC_BLOCK (then_bb_index) = new_bb;
2750 if (post_dominators)
2751 add_to_dominance_info (post_dominators, new_bb);
2752 }
2753 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2754 later. */
2755
2756 num_removed_blocks++;
2757 num_updated_if_blocks++;
2758
2759 return TRUE;
2760 }
2761
2762 /* Test for case 2 above. */
2763
2764 static int
2765 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
2766 {
2767 basic_block then_bb = then_edge->dest;
2768 basic_block else_bb = else_edge->dest;
2769 edge else_succ = else_bb->succ;
2770 rtx note;
2771
2772 /* ELSE has one successor. */
2773 if (!else_succ || else_succ->succ_next != NULL)
2774 return FALSE;
2775
2776 /* ELSE outgoing edge is not complex. */
2777 if (else_succ->flags & EDGE_COMPLEX)
2778 return FALSE;
2779
2780 /* ELSE has one predecessor. */
2781 if (else_bb->pred->pred_next != NULL)
2782 return FALSE;
2783
2784 /* THEN is not EXIT. */
2785 if (then_bb->index < 0)
2786 return FALSE;
2787
2788 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2789 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2790 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2791 ;
2792 else if (else_succ->dest->index < 0
2793 || dominated_by_p (post_dominators, then_bb,
2794 else_succ->dest))
2795 ;
2796 else
2797 return FALSE;
2798
2799 num_possible_if_blocks++;
2800 if (rtl_dump_file)
2801 fprintf (rtl_dump_file,
2802 "\nIF-CASE-2 found, start %d, else %d\n",
2803 test_bb->index, else_bb->index);
2804
2805 /* ELSE is small. */
2806 if (count_bb_insns (else_bb) > BRANCH_COST)
2807 return FALSE;
2808
2809 /* Registers set are dead, or are predicable. */
2810 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2811 return FALSE;
2812
2813 /* Conversion went ok, including moving the insns and fixing up the
2814 jump. Adjust the CFG to match. */
2815
2816 bitmap_operation (test_bb->global_live_at_end,
2817 then_bb->global_live_at_start,
2818 else_bb->global_live_at_end, BITMAP_IOR);
2819
2820 if (post_dominators)
2821 delete_from_dominance_info (post_dominators, else_bb);
2822 delete_block (else_bb);
2823
2824 num_removed_blocks++;
2825 num_updated_if_blocks++;
2826
2827 /* ??? We may now fallthru from one of THEN's successors into a join
2828 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2829
2830 return TRUE;
2831 }
2832
2833 /* A subroutine of dead_or_predicable called through for_each_rtx.
2834 Return 1 if a memory is found. */
2835
2836 static int
2837 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
2838 {
2839 return GET_CODE (*px) == MEM;
2840 }
2841
2842 /* Used by the code above to perform the actual rtl transformations.
2843 Return TRUE if successful.
2844
2845 TEST_BB is the block containing the conditional branch. MERGE_BB
2846 is the block containing the code to manipulate. NEW_DEST is the
2847 label TEST_BB should be branching to after the conversion.
2848 REVERSEP is true if the sense of the branch should be reversed. */
2849
2850 static int
2851 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
2852 basic_block other_bb, basic_block new_dest, int reversep)
2853 {
2854 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2855
2856 jump = test_bb->end;
2857
2858 /* Find the extent of the real code in the merge block. */
2859 head = merge_bb->head;
2860 end = merge_bb->end;
2861
2862 if (GET_CODE (head) == CODE_LABEL)
2863 head = NEXT_INSN (head);
2864 if (GET_CODE (head) == NOTE)
2865 {
2866 if (head == end)
2867 {
2868 head = end = NULL_RTX;
2869 goto no_body;
2870 }
2871 head = NEXT_INSN (head);
2872 }
2873
2874 if (GET_CODE (end) == JUMP_INSN)
2875 {
2876 if (head == end)
2877 {
2878 head = end = NULL_RTX;
2879 goto no_body;
2880 }
2881 end = PREV_INSN (end);
2882 }
2883
2884 /* Disable handling dead code by conditional execution if the machine needs
2885 to do anything funny with the tests, etc. */
2886 #ifndef IFCVT_MODIFY_TESTS
2887 if (HAVE_conditional_execution)
2888 {
2889 /* In the conditional execution case, we have things easy. We know
2890 the condition is reversible. We don't have to check life info,
2891 becase we're going to conditionally execute the code anyway.
2892 All that's left is making sure the insns involved can actually
2893 be predicated. */
2894
2895 rtx cond, prob_val;
2896
2897 cond = cond_exec_get_condition (jump);
2898 if (! cond)
2899 return FALSE;
2900
2901 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2902 if (prob_val)
2903 prob_val = XEXP (prob_val, 0);
2904
2905 if (reversep)
2906 {
2907 enum rtx_code rev = reversed_comparison_code (cond, jump);
2908 if (rev == UNKNOWN)
2909 return FALSE;
2910 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2911 XEXP (cond, 1));
2912 if (prob_val)
2913 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2914 }
2915
2916 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
2917 prob_val, 0))
2918 goto cancel;
2919
2920 earliest = jump;
2921 }
2922 else
2923 #endif
2924 {
2925 /* In the non-conditional execution case, we have to verify that there
2926 are no trapping operations, no calls, no references to memory, and
2927 that any registers modified are dead at the branch site. */
2928
2929 rtx insn, cond, prev;
2930 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2931 regset merge_set, tmp, test_live, test_set;
2932 struct propagate_block_info *pbi;
2933 int i, fail = 0;
2934
2935 /* Check for no calls or trapping operations. */
2936 for (insn = head; ; insn = NEXT_INSN (insn))
2937 {
2938 if (GET_CODE (insn) == CALL_INSN)
2939 return FALSE;
2940 if (INSN_P (insn))
2941 {
2942 if (may_trap_p (PATTERN (insn)))
2943 return FALSE;
2944
2945 /* ??? Even non-trapping memories such as stack frame
2946 references must be avoided. For stores, we collect
2947 no lifetime info; for reads, we'd have to assert
2948 true_dependence false against every store in the
2949 TEST range. */
2950 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2951 return FALSE;
2952 }
2953 if (insn == end)
2954 break;
2955 }
2956
2957 if (! any_condjump_p (jump))
2958 return FALSE;
2959
2960 /* Find the extent of the conditional. */
2961 cond = noce_get_condition (jump, &earliest);
2962 if (! cond)
2963 return FALSE;
2964
2965 /* Collect:
2966 MERGE_SET = set of registers set in MERGE_BB
2967 TEST_LIVE = set of registers live at EARLIEST
2968 TEST_SET = set of registers set between EARLIEST and the
2969 end of the block. */
2970
2971 tmp = INITIALIZE_REG_SET (tmp_head);
2972 merge_set = INITIALIZE_REG_SET (merge_set_head);
2973 test_live = INITIALIZE_REG_SET (test_live_head);
2974 test_set = INITIALIZE_REG_SET (test_set_head);
2975
2976 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2977 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2978 since we've already asserted that MERGE_BB is small. */
2979 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2980
2981 /* For small register class machines, don't lengthen lifetimes of
2982 hard registers before reload. */
2983 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2984 {
2985 EXECUTE_IF_SET_IN_BITMAP
2986 (merge_set, 0, i,
2987 {
2988 if (i < FIRST_PSEUDO_REGISTER
2989 && ! fixed_regs[i]
2990 && ! global_regs[i])
2991 fail = 1;
2992 });
2993 }
2994
2995 /* For TEST, we're interested in a range of insns, not a whole block.
2996 Moreover, we're interested in the insns live from OTHER_BB. */
2997
2998 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2999 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3000 0);
3001
3002 for (insn = jump; ; insn = prev)
3003 {
3004 prev = propagate_one_insn (pbi, insn);
3005 if (insn == earliest)
3006 break;
3007 }
3008
3009 free_propagate_block_info (pbi);
3010
3011 /* We can perform the transformation if
3012 MERGE_SET & (TEST_SET | TEST_LIVE)
3013 and
3014 TEST_SET & merge_bb->global_live_at_start
3015 are empty. */
3016
3017 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3018 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3019 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3020
3021 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3022 BITMAP_AND);
3023 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3024
3025 FREE_REG_SET (tmp);
3026 FREE_REG_SET (merge_set);
3027 FREE_REG_SET (test_live);
3028 FREE_REG_SET (test_set);
3029
3030 if (fail)
3031 return FALSE;
3032 }
3033
3034 no_body:
3035 /* We don't want to use normal invert_jump or redirect_jump because
3036 we don't want to delete_insn called. Also, we want to do our own
3037 change group management. */
3038
3039 old_dest = JUMP_LABEL (jump);
3040 if (other_bb != new_dest)
3041 {
3042 new_label = block_label (new_dest);
3043 if (reversep
3044 ? ! invert_jump_1 (jump, new_label)
3045 : ! redirect_jump_1 (jump, new_label))
3046 goto cancel;
3047 }
3048
3049 if (! apply_change_group ())
3050 return FALSE;
3051
3052 if (other_bb != new_dest)
3053 {
3054 if (old_dest)
3055 LABEL_NUSES (old_dest) -= 1;
3056 if (new_label)
3057 LABEL_NUSES (new_label) += 1;
3058 JUMP_LABEL (jump) = new_label;
3059 if (reversep)
3060 invert_br_probabilities (jump);
3061
3062 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3063 if (reversep)
3064 {
3065 gcov_type count, probability;
3066 count = BRANCH_EDGE (test_bb)->count;
3067 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3068 FALLTHRU_EDGE (test_bb)->count = count;
3069 probability = BRANCH_EDGE (test_bb)->probability;
3070 BRANCH_EDGE (test_bb)->probability
3071 = FALLTHRU_EDGE (test_bb)->probability;
3072 FALLTHRU_EDGE (test_bb)->probability = probability;
3073 update_br_prob_note (test_bb);
3074 }
3075 }
3076
3077 /* Move the insns out of MERGE_BB to before the branch. */
3078 if (head != NULL)
3079 {
3080 if (end == merge_bb->end)
3081 merge_bb->end = PREV_INSN (head);
3082
3083 if (squeeze_notes (&head, &end))
3084 return TRUE;
3085
3086 reorder_insns (head, end, PREV_INSN (earliest));
3087 }
3088
3089 /* Remove the jump and edge if we can. */
3090 if (other_bb == new_dest)
3091 {
3092 delete_insn (jump);
3093 remove_edge (BRANCH_EDGE (test_bb));
3094 /* ??? Can't merge blocks here, as then_bb is still in use.
3095 At minimum, the merge will get done just before bb-reorder. */
3096 }
3097
3098 return TRUE;
3099
3100 cancel:
3101 cancel_changes (0);
3102 return FALSE;
3103 }
3104 \f
3105 /* Main entry point for all if-conversion. */
3106
3107 void
3108 if_convert (int x_life_data_ok)
3109 {
3110 basic_block bb;
3111 int pass;
3112
3113 num_possible_if_blocks = 0;
3114 num_updated_if_blocks = 0;
3115 num_removed_blocks = 0;
3116 life_data_ok = (x_life_data_ok != 0);
3117
3118 if (! (* targetm.cannot_modify_jumps_p) ())
3119 mark_loop_exit_edges ();
3120
3121 /* Free up basic_block_for_insn so that we don't have to keep it
3122 up to date, either here or in merge_blocks. */
3123 free_basic_block_vars (1);
3124
3125 /* Compute postdominators if we think we'll use them. */
3126 post_dominators = NULL;
3127 if (HAVE_conditional_execution || life_data_ok)
3128 {
3129 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
3130 }
3131 if (life_data_ok)
3132 clear_bb_flags ();
3133
3134 /* Go through each of the basic blocks looking for things to convert. If we
3135 have conditional execution, we make multiple passes to allow us to handle
3136 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3137 pass = 0;
3138 do
3139 {
3140 cond_exec_changed_p = FALSE;
3141 pass++;
3142
3143 #ifdef IFCVT_MULTIPLE_DUMPS
3144 if (rtl_dump_file && pass > 1)
3145 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3146 #endif
3147
3148 FOR_EACH_BB (bb)
3149 {
3150 basic_block new_bb;
3151 while ((new_bb = find_if_header (bb, pass)))
3152 bb = new_bb;
3153 }
3154
3155 #ifdef IFCVT_MULTIPLE_DUMPS
3156 if (rtl_dump_file && cond_exec_changed_p)
3157 print_rtl_with_bb (rtl_dump_file, get_insns ());
3158 #endif
3159 }
3160 while (cond_exec_changed_p);
3161
3162 #ifdef IFCVT_MULTIPLE_DUMPS
3163 if (rtl_dump_file)
3164 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3165 #endif
3166
3167 if (post_dominators)
3168 free_dominance_info (post_dominators);
3169
3170 if (rtl_dump_file)
3171 fflush (rtl_dump_file);
3172
3173 clear_aux_for_blocks ();
3174
3175 /* Rebuild life info for basic blocks that require it. */
3176 if (num_removed_blocks && life_data_ok)
3177 {
3178 /* If we allocated new pseudos, we must resize the array for sched1. */
3179 if (max_regno < max_reg_num ())
3180 {
3181 max_regno = max_reg_num ();
3182 allocate_reg_info (max_regno, FALSE, FALSE);
3183 }
3184 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3185 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3186 | PROP_KILL_DEAD_CODE);
3187 }
3188
3189 /* Write the final stats. */
3190 if (rtl_dump_file && num_possible_if_blocks > 0)
3191 {
3192 fprintf (rtl_dump_file,
3193 "\n%d possible IF blocks searched.\n",
3194 num_possible_if_blocks);
3195 fprintf (rtl_dump_file,
3196 "%d IF blocks converted.\n",
3197 num_updated_if_blocks);
3198 fprintf (rtl_dump_file,
3199 "%d basic blocks deleted.\n\n\n",
3200 num_removed_blocks);
3201 }
3202
3203 #ifdef ENABLE_CHECKING
3204 verify_flow_info ();
3205 #endif
3206 }