optabs.c (expand_abs_nojump): Split out from ...
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41
42
43 #ifndef HAVE_conditional_execution
44 #define HAVE_conditional_execution 0
45 #endif
46 #ifndef HAVE_conditional_move
47 #define HAVE_conditional_move 0
48 #endif
49 #ifndef HAVE_incscc
50 #define HAVE_incscc 0
51 #endif
52 #ifndef HAVE_decscc
53 #define HAVE_decscc 0
54 #endif
55 #ifndef HAVE_trap
56 #define HAVE_trap 0
57 #endif
58 #ifndef HAVE_conditional_trap
59 #define HAVE_conditional_trap 0
60 #endif
61
62 #ifndef MAX_CONDITIONAL_EXECUTE
63 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
64 #endif
65
66 #define NULL_EDGE ((struct edge_def *)NULL)
67 #define NULL_BLOCK ((struct basic_block_def *)NULL)
68
69 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
70 static int num_possible_if_blocks;
71
72 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
73 execution. */
74 static int num_updated_if_blocks;
75
76 /* # of basic blocks that were removed. */
77 static int num_removed_blocks;
78
79 /* Whether conditional execution changes were made. */
80 static int cond_exec_changed_p;
81
82 /* True if life data ok at present. */
83 static bool life_data_ok;
84
85 /* The post-dominator relation on the original block numbers. */
86 static dominance_info post_dominators;
87
88 /* Forward references. */
89 static int count_bb_insns PARAMS ((basic_block));
90 static rtx first_active_insn PARAMS ((basic_block));
91 static rtx last_active_insn PARAMS ((basic_block, int));
92 static int seq_contains_jump PARAMS ((rtx));
93 static basic_block block_fallthru PARAMS ((basic_block));
94 static int cond_exec_process_insns PARAMS ((ce_if_block_t *,
95 rtx, rtx, rtx, rtx, int));
96 static rtx cond_exec_get_condition PARAMS ((rtx));
97 static int cond_exec_process_if_block PARAMS ((ce_if_block_t *, int));
98 static rtx noce_get_condition PARAMS ((rtx, rtx *));
99 static int noce_operand_ok PARAMS ((rtx));
100 static int noce_process_if_block PARAMS ((ce_if_block_t *));
101 static int process_if_block PARAMS ((ce_if_block_t *));
102 static void merge_if_block PARAMS ((ce_if_block_t *));
103 static int find_cond_trap PARAMS ((basic_block, edge, edge));
104 static basic_block find_if_header PARAMS ((basic_block, int));
105 static int block_jumps_and_fallthru_p PARAMS ((basic_block, basic_block));
106 static int find_if_block PARAMS ((ce_if_block_t *));
107 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
108 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
109 static int find_memory PARAMS ((rtx *, void *));
110 static int dead_or_predicable PARAMS ((basic_block, basic_block,
111 basic_block, basic_block, int));
112 static void noce_emit_move_insn PARAMS ((rtx, rtx));
113 static rtx block_has_only_trap PARAMS ((basic_block));
114 \f
115 /* Count the number of non-jump active insns in BB. */
116
117 static int
118 count_bb_insns (bb)
119 basic_block bb;
120 {
121 int count = 0;
122 rtx insn = bb->head;
123
124 while (1)
125 {
126 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
127 count++;
128
129 if (insn == bb->end)
130 break;
131 insn = NEXT_INSN (insn);
132 }
133
134 return count;
135 }
136
137 /* Return the first non-jump active insn in the basic block. */
138
139 static rtx
140 first_active_insn (bb)
141 basic_block bb;
142 {
143 rtx insn = bb->head;
144
145 if (GET_CODE (insn) == CODE_LABEL)
146 {
147 if (insn == bb->end)
148 return NULL_RTX;
149 insn = NEXT_INSN (insn);
150 }
151
152 while (GET_CODE (insn) == NOTE)
153 {
154 if (insn == bb->end)
155 return NULL_RTX;
156 insn = NEXT_INSN (insn);
157 }
158
159 if (GET_CODE (insn) == JUMP_INSN)
160 return NULL_RTX;
161
162 return insn;
163 }
164
165 /* Return the last non-jump active (non-jump) insn in the basic block. */
166
167 static rtx
168 last_active_insn (bb, skip_use_p)
169 basic_block bb;
170 int skip_use_p;
171 {
172 rtx insn = bb->end;
173 rtx head = bb->head;
174
175 while (GET_CODE (insn) == NOTE
176 || GET_CODE (insn) == JUMP_INSN
177 || (skip_use_p
178 && GET_CODE (insn) == INSN
179 && GET_CODE (PATTERN (insn)) == USE))
180 {
181 if (insn == head)
182 return NULL_RTX;
183 insn = PREV_INSN (insn);
184 }
185
186 if (GET_CODE (insn) == CODE_LABEL)
187 return NULL_RTX;
188
189 return insn;
190 }
191
192 /* It is possible, especially when having dealt with multi-word
193 arithmetic, for the expanders to have emitted jumps. Search
194 through the sequence and return TRUE if a jump exists so that
195 we can abort the conversion. */
196
197 static int
198 seq_contains_jump (insn)
199 rtx insn;
200 {
201 while (insn)
202 {
203 if (GET_CODE (insn) == JUMP_INSN)
204 return 1;
205 insn = NEXT_INSN (insn);
206 }
207 return 0;
208 }
209
210 static basic_block
211 block_fallthru (bb)
212 basic_block bb;
213 {
214 edge e;
215
216 for (e = bb->succ;
217 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
218 e = e->succ_next)
219 ;
220
221 return (e) ? e->dest : NULL_BLOCK;
222 }
223 \f
224 /* Go through a bunch of insns, converting them to conditional
225 execution format if possible. Return TRUE if all of the non-note
226 insns were processed. */
227
228 static int
229 cond_exec_process_insns (ce_info, start, end, test, prob_val, mod_ok)
230 ce_if_block_t *ce_info ATTRIBUTE_UNUSED; /* if block information */
231 rtx start; /* first insn to look at */
232 rtx end; /* last insn to look at */
233 rtx test; /* conditional execution test */
234 rtx prob_val; /* probability of branch taken. */
235 int mod_ok; /* true if modifications ok last insn. */
236 {
237 int must_be_last = FALSE;
238 rtx insn;
239 rtx xtest;
240 rtx pattern;
241
242 if (!start || !end)
243 return FALSE;
244
245 for (insn = start; ; insn = NEXT_INSN (insn))
246 {
247 if (GET_CODE (insn) == NOTE)
248 goto insn_done;
249
250 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
251 abort ();
252
253 /* Remove USE insns that get in the way. */
254 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
255 {
256 /* ??? Ug. Actually unlinking the thing is problematic,
257 given what we'd have to coordinate with our callers. */
258 PUT_CODE (insn, NOTE);
259 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
260 NOTE_SOURCE_FILE (insn) = 0;
261 goto insn_done;
262 }
263
264 /* Last insn wasn't last? */
265 if (must_be_last)
266 return FALSE;
267
268 if (modified_in_p (test, insn))
269 {
270 if (!mod_ok)
271 return FALSE;
272 must_be_last = TRUE;
273 }
274
275 /* Now build the conditional form of the instruction. */
276 pattern = PATTERN (insn);
277 xtest = copy_rtx (test);
278
279 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
280 two conditions. */
281 if (GET_CODE (pattern) == COND_EXEC)
282 {
283 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
284 return FALSE;
285
286 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
287 COND_EXEC_TEST (pattern));
288 pattern = COND_EXEC_CODE (pattern);
289 }
290
291 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
292
293 /* If the machine needs to modify the insn being conditionally executed,
294 say for example to force a constant integer operand into a temp
295 register, do so here. */
296 #ifdef IFCVT_MODIFY_INSN
297 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
298 if (! pattern)
299 return FALSE;
300 #endif
301
302 validate_change (insn, &PATTERN (insn), pattern, 1);
303
304 if (GET_CODE (insn) == CALL_INSN && prob_val)
305 validate_change (insn, &REG_NOTES (insn),
306 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
307 REG_NOTES (insn)), 1);
308
309 insn_done:
310 if (insn == end)
311 break;
312 }
313
314 return TRUE;
315 }
316
317 /* Return the condition for a jump. Do not do any special processing. */
318
319 static rtx
320 cond_exec_get_condition (jump)
321 rtx jump;
322 {
323 rtx test_if, cond;
324
325 if (any_condjump_p (jump))
326 test_if = SET_SRC (pc_set (jump));
327 else
328 return NULL_RTX;
329 cond = XEXP (test_if, 0);
330
331 /* If this branches to JUMP_LABEL when the condition is false,
332 reverse the condition. */
333 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
334 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
335 {
336 enum rtx_code rev = reversed_comparison_code (cond, jump);
337 if (rev == UNKNOWN)
338 return NULL_RTX;
339
340 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
341 XEXP (cond, 1));
342 }
343
344 return cond;
345 }
346
347 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
348 to conditional execution. Return TRUE if we were successful at
349 converting the block. */
350
351 static int
352 cond_exec_process_if_block (ce_info, do_multiple_p)
353 ce_if_block_t * ce_info; /* if block information */
354 int do_multiple_p; /* != 0 if we should handle && and || blocks */
355 {
356 basic_block test_bb = ce_info->test_bb; /* last test block */
357 basic_block then_bb = ce_info->then_bb; /* THEN */
358 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
359 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
360 rtx then_start; /* first insn in THEN block */
361 rtx then_end; /* last insn + 1 in THEN block */
362 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
363 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
364 int max; /* max # of insns to convert. */
365 int then_mod_ok; /* whether conditional mods are ok in THEN */
366 rtx true_expr; /* test for else block insns */
367 rtx false_expr; /* test for then block insns */
368 rtx true_prob_val; /* probability of else block */
369 rtx false_prob_val; /* probability of then block */
370 int n_insns;
371 enum rtx_code false_code;
372
373 /* If test is comprised of && or || elements, and we've failed at handling
374 all of them together, just use the last test if it is the special case of
375 && elements without an ELSE block. */
376 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
377 {
378 if (else_bb || ! ce_info->and_and_p)
379 return FALSE;
380
381 ce_info->test_bb = test_bb = ce_info->last_test_bb;
382 ce_info->num_multiple_test_blocks = 0;
383 ce_info->num_and_and_blocks = 0;
384 ce_info->num_or_or_blocks = 0;
385 }
386
387 /* Find the conditional jump to the ELSE or JOIN part, and isolate
388 the test. */
389 test_expr = cond_exec_get_condition (test_bb->end);
390 if (! test_expr)
391 return FALSE;
392
393 /* If the conditional jump is more than just a conditional jump,
394 then we can not do conditional execution conversion on this block. */
395 if (! onlyjump_p (test_bb->end))
396 return FALSE;
397
398 /* Collect the bounds of where we're to search, skipping any labels, jumps
399 and notes at the beginning and end of the block. Then count the total
400 number of insns and see if it is small enough to convert. */
401 then_start = first_active_insn (then_bb);
402 then_end = last_active_insn (then_bb, TRUE);
403 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
404 max = MAX_CONDITIONAL_EXECUTE;
405
406 if (else_bb)
407 {
408 max *= 2;
409 else_start = first_active_insn (else_bb);
410 else_end = last_active_insn (else_bb, TRUE);
411 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
412 }
413
414 if (n_insns > max)
415 return FALSE;
416
417 /* Map test_expr/test_jump into the appropriate MD tests to use on
418 the conditionally executed code. */
419
420 true_expr = test_expr;
421
422 false_code = reversed_comparison_code (true_expr, test_bb->end);
423 if (false_code != UNKNOWN)
424 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
425 XEXP (true_expr, 0), XEXP (true_expr, 1));
426 else
427 false_expr = NULL_RTX;
428
429 #ifdef IFCVT_MODIFY_TESTS
430 /* If the machine description needs to modify the tests, such as setting a
431 conditional execution register from a comparison, it can do so here. */
432 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
433
434 /* See if the conversion failed */
435 if (!true_expr || !false_expr)
436 goto fail;
437 #endif
438
439 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
440 if (true_prob_val)
441 {
442 true_prob_val = XEXP (true_prob_val, 0);
443 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
444 }
445 else
446 false_prob_val = NULL_RTX;
447
448 /* If we have && or || tests, do them here. These tests are in the adjacent
449 blocks after the first block containing the test. */
450 if (ce_info->num_multiple_test_blocks > 0)
451 {
452 basic_block bb = test_bb;
453 basic_block last_test_bb = ce_info->last_test_bb;
454
455 if (! false_expr)
456 goto fail;
457
458 do
459 {
460 rtx start, end;
461 rtx t, f;
462
463 bb = block_fallthru (bb);
464 start = first_active_insn (bb);
465 end = last_active_insn (bb, TRUE);
466 if (start
467 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
468 false_prob_val, FALSE))
469 goto fail;
470
471 /* If the conditional jump is more than just a conditional jump, then
472 we can not do conditional execution conversion on this block. */
473 if (! onlyjump_p (bb->end))
474 goto fail;
475
476 /* Find the conditional jump and isolate the test. */
477 t = cond_exec_get_condition (bb->end);
478 if (! t)
479 goto fail;
480
481 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
482 GET_MODE (t),
483 XEXP (t, 0),
484 XEXP (t, 1));
485
486 if (ce_info->and_and_p)
487 {
488 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
489 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
490 }
491 else
492 {
493 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
494 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
495 }
496
497 /* If the machine description needs to modify the tests, such as
498 setting a conditional execution register from a comparison, it can
499 do so here. */
500 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
501 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
502
503 /* See if the conversion failed */
504 if (!t || !f)
505 goto fail;
506 #endif
507
508 true_expr = t;
509 false_expr = f;
510 }
511 while (bb != last_test_bb);
512 }
513
514 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
515 on then THEN block. */
516 then_mod_ok = (else_bb == NULL_BLOCK);
517
518 /* Go through the THEN and ELSE blocks converting the insns if possible
519 to conditional execution. */
520
521 if (then_end
522 && (! false_expr
523 || ! cond_exec_process_insns (ce_info, then_start, then_end,
524 false_expr, false_prob_val,
525 then_mod_ok)))
526 goto fail;
527
528 if (else_bb && else_end
529 && ! cond_exec_process_insns (ce_info, else_start, else_end,
530 true_expr, true_prob_val, TRUE))
531 goto fail;
532
533 /* If we cannot apply the changes, fail. Do not go through the normal fail
534 processing, since apply_change_group will call cancel_changes. */
535 if (! apply_change_group ())
536 {
537 #ifdef IFCVT_MODIFY_CANCEL
538 /* Cancel any machine dependent changes. */
539 IFCVT_MODIFY_CANCEL (ce_info);
540 #endif
541 return FALSE;
542 }
543
544 #ifdef IFCVT_MODIFY_FINAL
545 /* Do any machine dependent final modifications */
546 IFCVT_MODIFY_FINAL (ce_info);
547 #endif
548
549 /* Conversion succeeded. */
550 if (rtl_dump_file)
551 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
552 n_insns, (n_insns == 1) ? " was" : "s were");
553
554 /* Merge the blocks! */
555 merge_if_block (ce_info);
556 cond_exec_changed_p = TRUE;
557 return TRUE;
558
559 fail:
560 #ifdef IFCVT_MODIFY_CANCEL
561 /* Cancel any machine dependent changes. */
562 IFCVT_MODIFY_CANCEL (ce_info);
563 #endif
564
565 cancel_changes (0);
566 return FALSE;
567 }
568 \f
569 /* Used by noce_process_if_block to communicate with its subroutines.
570
571 The subroutines know that A and B may be evaluated freely. They
572 know that X is a register. They should insert new instructions
573 before cond_earliest. */
574
575 struct noce_if_info
576 {
577 basic_block test_bb;
578 rtx insn_a, insn_b;
579 rtx x, a, b;
580 rtx jump, cond, cond_earliest;
581 };
582
583 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
584 rtx, int, int));
585 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
586 static int noce_try_addcc PARAMS ((struct noce_if_info *));
587 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
588 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
589 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
590 rtx, enum rtx_code, rtx,
591 rtx, rtx, rtx));
592 static int noce_try_cmove PARAMS ((struct noce_if_info *));
593 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
594 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
595 rtx, rtx *));
596 static int noce_try_minmax PARAMS ((struct noce_if_info *));
597 static int noce_try_abs PARAMS ((struct noce_if_info *));
598
599 /* Helper function for noce_try_store_flag*. */
600
601 static rtx
602 noce_emit_store_flag (if_info, x, reversep, normalize)
603 struct noce_if_info *if_info;
604 rtx x;
605 int reversep, normalize;
606 {
607 rtx cond = if_info->cond;
608 int cond_complex;
609 enum rtx_code code;
610
611 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
612 || ! general_operand (XEXP (cond, 1), VOIDmode));
613
614 /* If earliest == jump, or when the condition is complex, try to
615 build the store_flag insn directly. */
616
617 if (cond_complex)
618 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
619
620 if (reversep)
621 code = reversed_comparison_code (cond, if_info->jump);
622 else
623 code = GET_CODE (cond);
624
625 if ((if_info->cond_earliest == if_info->jump || cond_complex)
626 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
627 {
628 rtx tmp;
629
630 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
631 XEXP (cond, 1));
632 tmp = gen_rtx_SET (VOIDmode, x, tmp);
633
634 start_sequence ();
635 tmp = emit_insn (tmp);
636
637 if (recog_memoized (tmp) >= 0)
638 {
639 tmp = get_insns ();
640 end_sequence ();
641 emit_insn (tmp);
642
643 if_info->cond_earliest = if_info->jump;
644
645 return x;
646 }
647
648 end_sequence ();
649 }
650
651 /* Don't even try if the comparison operands or the mode of X are weird. */
652 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
653 return NULL_RTX;
654
655 return emit_store_flag (x, code, XEXP (cond, 0),
656 XEXP (cond, 1), VOIDmode,
657 (code == LTU || code == LEU
658 || code == GEU || code == GTU), normalize);
659 }
660
661 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
662 static void
663 noce_emit_move_insn (x, y)
664 rtx x, y;
665 {
666 enum machine_mode outmode, inmode;
667 rtx outer, inner;
668 int bitpos;
669
670 if (GET_CODE (x) != STRICT_LOW_PART)
671 {
672 emit_move_insn (x, y);
673 return;
674 }
675
676 outer = XEXP (x, 0);
677 inner = XEXP (outer, 0);
678 outmode = GET_MODE (outer);
679 inmode = GET_MODE (inner);
680 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
681 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
682 GET_MODE_BITSIZE (inmode));
683 }
684
685 /* Convert "if (test) x = 1; else x = 0".
686
687 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
688 tried in noce_try_store_flag_constants after noce_try_cmove has had
689 a go at the conversion. */
690
691 static int
692 noce_try_store_flag (if_info)
693 struct noce_if_info *if_info;
694 {
695 int reversep;
696 rtx target, seq;
697
698 if (GET_CODE (if_info->b) == CONST_INT
699 && INTVAL (if_info->b) == STORE_FLAG_VALUE
700 && if_info->a == const0_rtx)
701 reversep = 0;
702 else if (if_info->b == const0_rtx
703 && GET_CODE (if_info->a) == CONST_INT
704 && INTVAL (if_info->a) == STORE_FLAG_VALUE
705 && (reversed_comparison_code (if_info->cond, if_info->jump)
706 != UNKNOWN))
707 reversep = 1;
708 else
709 return FALSE;
710
711 start_sequence ();
712
713 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
714 if (target)
715 {
716 if (target != if_info->x)
717 noce_emit_move_insn (if_info->x, target);
718
719 seq = get_insns ();
720 end_sequence ();
721 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
722
723 return TRUE;
724 }
725 else
726 {
727 end_sequence ();
728 return FALSE;
729 }
730 }
731
732 /* Convert "if (test) x = a; else x = b", for A and B constant. */
733
734 static int
735 noce_try_store_flag_constants (if_info)
736 struct noce_if_info *if_info;
737 {
738 rtx target, seq;
739 int reversep;
740 HOST_WIDE_INT itrue, ifalse, diff, tmp;
741 int normalize, can_reverse;
742 enum machine_mode mode;
743
744 if (! no_new_pseudos
745 && GET_CODE (if_info->a) == CONST_INT
746 && GET_CODE (if_info->b) == CONST_INT)
747 {
748 mode = GET_MODE (if_info->x);
749 ifalse = INTVAL (if_info->a);
750 itrue = INTVAL (if_info->b);
751
752 /* Make sure we can represent the difference between the two values. */
753 if ((itrue - ifalse > 0)
754 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
755 return FALSE;
756
757 diff = trunc_int_for_mode (itrue - ifalse, mode);
758
759 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
760 != UNKNOWN);
761
762 reversep = 0;
763 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
764 normalize = 0;
765 else if (ifalse == 0 && exact_log2 (itrue) >= 0
766 && (STORE_FLAG_VALUE == 1
767 || BRANCH_COST >= 2))
768 normalize = 1;
769 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
770 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
771 normalize = 1, reversep = 1;
772 else if (itrue == -1
773 && (STORE_FLAG_VALUE == -1
774 || BRANCH_COST >= 2))
775 normalize = -1;
776 else if (ifalse == -1 && can_reverse
777 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
778 normalize = -1, reversep = 1;
779 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
780 || BRANCH_COST >= 3)
781 normalize = -1;
782 else
783 return FALSE;
784
785 if (reversep)
786 {
787 tmp = itrue; itrue = ifalse; ifalse = tmp;
788 diff = trunc_int_for_mode (-diff, mode);
789 }
790
791 start_sequence ();
792 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
793 if (! target)
794 {
795 end_sequence ();
796 return FALSE;
797 }
798
799 /* if (test) x = 3; else x = 4;
800 => x = 3 + (test == 0); */
801 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
802 {
803 target = expand_simple_binop (mode,
804 (diff == STORE_FLAG_VALUE
805 ? PLUS : MINUS),
806 GEN_INT (ifalse), target, if_info->x, 0,
807 OPTAB_WIDEN);
808 }
809
810 /* if (test) x = 8; else x = 0;
811 => x = (test != 0) << 3; */
812 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
813 {
814 target = expand_simple_binop (mode, ASHIFT,
815 target, GEN_INT (tmp), if_info->x, 0,
816 OPTAB_WIDEN);
817 }
818
819 /* if (test) x = -1; else x = b;
820 => x = -(test != 0) | b; */
821 else if (itrue == -1)
822 {
823 target = expand_simple_binop (mode, IOR,
824 target, GEN_INT (ifalse), if_info->x, 0,
825 OPTAB_WIDEN);
826 }
827
828 /* if (test) x = a; else x = b;
829 => x = (-(test != 0) & (b - a)) + a; */
830 else
831 {
832 target = expand_simple_binop (mode, AND,
833 target, GEN_INT (diff), if_info->x, 0,
834 OPTAB_WIDEN);
835 if (target)
836 target = expand_simple_binop (mode, PLUS,
837 target, GEN_INT (ifalse),
838 if_info->x, 0, OPTAB_WIDEN);
839 }
840
841 if (! target)
842 {
843 end_sequence ();
844 return FALSE;
845 }
846
847 if (target != if_info->x)
848 noce_emit_move_insn (if_info->x, target);
849
850 seq = get_insns ();
851 end_sequence ();
852
853 if (seq_contains_jump (seq))
854 return FALSE;
855
856 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
857
858 return TRUE;
859 }
860
861 return FALSE;
862 }
863
864 /* Convert "if (test) foo++" into "foo += (test != 0)", and
865 similarly for "foo--". */
866
867 static int
868 noce_try_addcc (if_info)
869 struct noce_if_info *if_info;
870 {
871 rtx target, seq;
872 int subtract, normalize;
873
874 if (! no_new_pseudos
875 /* Should be no `else' case to worry about. */
876 && if_info->b == if_info->x
877 && GET_CODE (if_info->a) == PLUS
878 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
879 && (reversed_comparison_code (if_info->cond, if_info->jump)
880 != UNKNOWN))
881 {
882 rtx cond = if_info->cond;
883 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
884
885 /* First try to use addcc pattern. */
886 if (general_operand (XEXP (cond, 0), VOIDmode)
887 && general_operand (XEXP (cond, 1), VOIDmode))
888 {
889 start_sequence ();
890 target = emit_conditional_add (if_info->x, code,
891 XEXP (cond, 0), XEXP (cond, 1),
892 VOIDmode,
893 if_info->b, XEXP (if_info->a, 1),
894 GET_MODE (if_info->x),
895 (code == LTU || code == GEU
896 || code == LEU || code == GTU));
897 if (target)
898 {
899 if (target != if_info->x)
900 noce_emit_move_insn (if_info->x, target);
901
902 seq = get_insns ();
903 end_sequence ();
904 emit_insn_before_scope (seq, if_info->jump,
905 INSN_SCOPE (if_info->insn_a));
906 return TRUE;
907 }
908 end_sequence ();
909 }
910
911 /* If that fails, construct conditional increment or decrement using
912 setcc. */
913 if (BRANCH_COST >= 2
914 && (XEXP (if_info->a, 1) == const1_rtx
915 || XEXP (if_info->a, 1) == constm1_rtx))
916 {
917 start_sequence ();
918 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
919 subtract = 0, normalize = 0;
920 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
921 subtract = 1, normalize = 0;
922 else
923 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
924
925
926 target = noce_emit_store_flag (if_info,
927 gen_reg_rtx (GET_MODE (if_info->x)),
928 1, normalize);
929
930 if (target)
931 target = expand_simple_binop (GET_MODE (if_info->x),
932 subtract ? MINUS : PLUS,
933 if_info->x, target, if_info->x,
934 0, OPTAB_WIDEN);
935 if (target)
936 {
937 if (target != if_info->x)
938 noce_emit_move_insn (if_info->x, target);
939
940 seq = get_insns ();
941 end_sequence ();
942
943 if (seq_contains_jump (seq))
944 return FALSE;
945
946 emit_insn_before_scope (seq, if_info->jump,
947 INSN_SCOPE (if_info->insn_a));
948
949 return TRUE;
950 }
951 end_sequence ();
952 }
953 }
954
955 return FALSE;
956 }
957
958 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
959
960 static int
961 noce_try_store_flag_mask (if_info)
962 struct noce_if_info *if_info;
963 {
964 rtx target, seq;
965 int reversep;
966
967 reversep = 0;
968 if (! no_new_pseudos
969 && (BRANCH_COST >= 2
970 || STORE_FLAG_VALUE == -1)
971 && ((if_info->a == const0_rtx
972 && rtx_equal_p (if_info->b, if_info->x))
973 || ((reversep = (reversed_comparison_code (if_info->cond,
974 if_info->jump)
975 != UNKNOWN))
976 && if_info->b == const0_rtx
977 && rtx_equal_p (if_info->a, if_info->x))))
978 {
979 start_sequence ();
980 target = noce_emit_store_flag (if_info,
981 gen_reg_rtx (GET_MODE (if_info->x)),
982 reversep, -1);
983 if (target)
984 target = expand_simple_binop (GET_MODE (if_info->x), AND,
985 if_info->x, target, if_info->x, 0,
986 OPTAB_WIDEN);
987
988 if (target)
989 {
990 if (target != if_info->x)
991 noce_emit_move_insn (if_info->x, target);
992
993 seq = get_insns ();
994 end_sequence ();
995
996 if (seq_contains_jump (seq))
997 return FALSE;
998
999 emit_insn_before_scope (seq, if_info->jump,
1000 INSN_SCOPE (if_info->insn_a));
1001
1002 return TRUE;
1003 }
1004
1005 end_sequence ();
1006 }
1007
1008 return FALSE;
1009 }
1010
1011 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1012
1013 static rtx
1014 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
1015 struct noce_if_info *if_info;
1016 rtx x, cmp_a, cmp_b, vfalse, vtrue;
1017 enum rtx_code code;
1018 {
1019 /* If earliest == jump, try to build the cmove insn directly.
1020 This is helpful when combine has created some complex condition
1021 (like for alpha's cmovlbs) that we can't hope to regenerate
1022 through the normal interface. */
1023
1024 if (if_info->cond_earliest == if_info->jump)
1025 {
1026 rtx tmp;
1027
1028 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1029 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1030 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1031
1032 start_sequence ();
1033 tmp = emit_insn (tmp);
1034
1035 if (recog_memoized (tmp) >= 0)
1036 {
1037 tmp = get_insns ();
1038 end_sequence ();
1039 emit_insn (tmp);
1040
1041 return x;
1042 }
1043
1044 end_sequence ();
1045 }
1046
1047 /* Don't even try if the comparison operands are weird. */
1048 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1049 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1050 return NULL_RTX;
1051
1052 #if HAVE_conditional_move
1053 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1054 vtrue, vfalse, GET_MODE (x),
1055 (code == LTU || code == GEU
1056 || code == LEU || code == GTU));
1057 #else
1058 /* We'll never get here, as noce_process_if_block doesn't call the
1059 functions involved. Ifdef code, however, should be discouraged
1060 because it leads to typos in the code not selected. However,
1061 emit_conditional_move won't exist either. */
1062 return NULL_RTX;
1063 #endif
1064 }
1065
1066 /* Try only simple constants and registers here. More complex cases
1067 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1068 has had a go at it. */
1069
1070 static int
1071 noce_try_cmove (if_info)
1072 struct noce_if_info *if_info;
1073 {
1074 enum rtx_code code;
1075 rtx target, seq;
1076
1077 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1078 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1079 {
1080 start_sequence ();
1081
1082 code = GET_CODE (if_info->cond);
1083 target = noce_emit_cmove (if_info, if_info->x, code,
1084 XEXP (if_info->cond, 0),
1085 XEXP (if_info->cond, 1),
1086 if_info->a, if_info->b);
1087
1088 if (target)
1089 {
1090 if (target != if_info->x)
1091 noce_emit_move_insn (if_info->x, target);
1092
1093 seq = get_insns ();
1094 end_sequence ();
1095 emit_insn_before_scope (seq, if_info->jump,
1096 INSN_SCOPE (if_info->insn_a));
1097 return TRUE;
1098 }
1099 else
1100 {
1101 end_sequence ();
1102 return FALSE;
1103 }
1104 }
1105
1106 return FALSE;
1107 }
1108
1109 /* Try more complex cases involving conditional_move. */
1110
1111 static int
1112 noce_try_cmove_arith (if_info)
1113 struct noce_if_info *if_info;
1114 {
1115 rtx a = if_info->a;
1116 rtx b = if_info->b;
1117 rtx x = if_info->x;
1118 rtx insn_a, insn_b;
1119 rtx tmp, target;
1120 int is_mem = 0;
1121 enum rtx_code code;
1122
1123 /* A conditional move from two memory sources is equivalent to a
1124 conditional on their addresses followed by a load. Don't do this
1125 early because it'll screw alias analysis. Note that we've
1126 already checked for no side effects. */
1127 if (! no_new_pseudos && cse_not_expected
1128 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1129 && BRANCH_COST >= 5)
1130 {
1131 a = XEXP (a, 0);
1132 b = XEXP (b, 0);
1133 x = gen_reg_rtx (Pmode);
1134 is_mem = 1;
1135 }
1136
1137 /* ??? We could handle this if we knew that a load from A or B could
1138 not fault. This is also true if we've already loaded
1139 from the address along the path from ENTRY. */
1140 else if (may_trap_p (a) || may_trap_p (b))
1141 return FALSE;
1142
1143 /* if (test) x = a + b; else x = c - d;
1144 => y = a + b;
1145 x = c - d;
1146 if (test)
1147 x = y;
1148 */
1149
1150 code = GET_CODE (if_info->cond);
1151 insn_a = if_info->insn_a;
1152 insn_b = if_info->insn_b;
1153
1154 /* Possibly rearrange operands to make things come out more natural. */
1155 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1156 {
1157 int reversep = 0;
1158 if (rtx_equal_p (b, x))
1159 reversep = 1;
1160 else if (general_operand (b, GET_MODE (b)))
1161 reversep = 1;
1162
1163 if (reversep)
1164 {
1165 code = reversed_comparison_code (if_info->cond, if_info->jump);
1166 tmp = a, a = b, b = tmp;
1167 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1168 }
1169 }
1170
1171 start_sequence ();
1172
1173 /* If either operand is complex, load it into a register first.
1174 The best way to do this is to copy the original insn. In this
1175 way we preserve any clobbers etc that the insn may have had.
1176 This is of course not possible in the IS_MEM case. */
1177 if (! general_operand (a, GET_MODE (a)))
1178 {
1179 rtx set;
1180
1181 if (no_new_pseudos)
1182 goto end_seq_and_fail;
1183
1184 if (is_mem)
1185 {
1186 tmp = gen_reg_rtx (GET_MODE (a));
1187 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1188 }
1189 else if (! insn_a)
1190 goto end_seq_and_fail;
1191 else
1192 {
1193 a = gen_reg_rtx (GET_MODE (a));
1194 tmp = copy_rtx (insn_a);
1195 set = single_set (tmp);
1196 SET_DEST (set) = a;
1197 tmp = emit_insn (PATTERN (tmp));
1198 }
1199 if (recog_memoized (tmp) < 0)
1200 goto end_seq_and_fail;
1201 }
1202 if (! general_operand (b, GET_MODE (b)))
1203 {
1204 rtx set;
1205
1206 if (no_new_pseudos)
1207 goto end_seq_and_fail;
1208
1209 if (is_mem)
1210 {
1211 tmp = gen_reg_rtx (GET_MODE (b));
1212 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1213 }
1214 else if (! insn_b)
1215 goto end_seq_and_fail;
1216 else
1217 {
1218 b = gen_reg_rtx (GET_MODE (b));
1219 tmp = copy_rtx (insn_b);
1220 set = single_set (tmp);
1221 SET_DEST (set) = b;
1222 tmp = emit_insn (PATTERN (tmp));
1223 }
1224 if (recog_memoized (tmp) < 0)
1225 goto end_seq_and_fail;
1226 }
1227
1228 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1229 XEXP (if_info->cond, 1), a, b);
1230
1231 if (! target)
1232 goto end_seq_and_fail;
1233
1234 /* If we're handling a memory for above, emit the load now. */
1235 if (is_mem)
1236 {
1237 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1238
1239 /* Copy over flags as appropriate. */
1240 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1241 MEM_VOLATILE_P (tmp) = 1;
1242 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1243 MEM_IN_STRUCT_P (tmp) = 1;
1244 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1245 MEM_SCALAR_P (tmp) = 1;
1246 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1247 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1248 set_mem_align (tmp,
1249 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1250
1251 noce_emit_move_insn (if_info->x, tmp);
1252 }
1253 else if (target != x)
1254 noce_emit_move_insn (x, target);
1255
1256 tmp = get_insns ();
1257 end_sequence ();
1258 emit_insn_before_scope (tmp, if_info->jump, INSN_SCOPE (if_info->insn_a));
1259 return TRUE;
1260
1261 end_seq_and_fail:
1262 end_sequence ();
1263 return FALSE;
1264 }
1265
1266 /* For most cases, the simplified condition we found is the best
1267 choice, but this is not the case for the min/max/abs transforms.
1268 For these we wish to know that it is A or B in the condition. */
1269
1270 static rtx
1271 noce_get_alt_condition (if_info, target, earliest)
1272 struct noce_if_info *if_info;
1273 rtx target;
1274 rtx *earliest;
1275 {
1276 rtx cond, set, insn;
1277 int reverse;
1278
1279 /* If target is already mentioned in the known condition, return it. */
1280 if (reg_mentioned_p (target, if_info->cond))
1281 {
1282 *earliest = if_info->cond_earliest;
1283 return if_info->cond;
1284 }
1285
1286 set = pc_set (if_info->jump);
1287 cond = XEXP (SET_SRC (set), 0);
1288 reverse
1289 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1290 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1291
1292 /* If we're looking for a constant, try to make the conditional
1293 have that constant in it. There are two reasons why it may
1294 not have the constant we want:
1295
1296 1. GCC may have needed to put the constant in a register, because
1297 the target can't compare directly against that constant. For
1298 this case, we look for a SET immediately before the comparison
1299 that puts a constant in that register.
1300
1301 2. GCC may have canonicalized the conditional, for example
1302 replacing "if x < 4" with "if x <= 3". We can undo that (or
1303 make equivalent types of changes) to get the constants we need
1304 if they're off by one in the right direction. */
1305
1306 if (GET_CODE (target) == CONST_INT)
1307 {
1308 enum rtx_code code = GET_CODE (if_info->cond);
1309 rtx op_a = XEXP (if_info->cond, 0);
1310 rtx op_b = XEXP (if_info->cond, 1);
1311 rtx prev_insn;
1312
1313 /* First, look to see if we put a constant in a register. */
1314 prev_insn = PREV_INSN (if_info->cond_earliest);
1315 if (prev_insn
1316 && INSN_P (prev_insn)
1317 && GET_CODE (PATTERN (prev_insn)) == SET)
1318 {
1319 rtx src = find_reg_equal_equiv_note (prev_insn);
1320 if (!src)
1321 src = SET_SRC (PATTERN (prev_insn));
1322 if (GET_CODE (src) == CONST_INT)
1323 {
1324 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1325 op_a = src;
1326 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1327 op_b = src;
1328
1329 if (GET_CODE (op_a) == CONST_INT)
1330 {
1331 rtx tmp = op_a;
1332 op_a = op_b;
1333 op_b = tmp;
1334 code = swap_condition (code);
1335 }
1336 }
1337 }
1338
1339 /* Now, look to see if we can get the right constant by
1340 adjusting the conditional. */
1341 if (GET_CODE (op_b) == CONST_INT)
1342 {
1343 HOST_WIDE_INT desired_val = INTVAL (target);
1344 HOST_WIDE_INT actual_val = INTVAL (op_b);
1345
1346 switch (code)
1347 {
1348 case LT:
1349 if (actual_val == desired_val + 1)
1350 {
1351 code = LE;
1352 op_b = GEN_INT (desired_val);
1353 }
1354 break;
1355 case LE:
1356 if (actual_val == desired_val - 1)
1357 {
1358 code = LT;
1359 op_b = GEN_INT (desired_val);
1360 }
1361 break;
1362 case GT:
1363 if (actual_val == desired_val - 1)
1364 {
1365 code = GE;
1366 op_b = GEN_INT (desired_val);
1367 }
1368 break;
1369 case GE:
1370 if (actual_val == desired_val + 1)
1371 {
1372 code = GT;
1373 op_b = GEN_INT (desired_val);
1374 }
1375 break;
1376 default:
1377 break;
1378 }
1379 }
1380
1381 /* If we made any changes, generate a new conditional that is
1382 equivalent to what we started with, but has the right
1383 constants in it. */
1384 if (code != GET_CODE (if_info->cond)
1385 || op_a != XEXP (if_info->cond, 0)
1386 || op_b != XEXP (if_info->cond, 1))
1387 {
1388 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1389 *earliest = if_info->cond_earliest;
1390 return cond;
1391 }
1392 }
1393
1394 cond = canonicalize_condition (if_info->jump, cond, reverse,
1395 earliest, target);
1396 if (! cond || ! reg_mentioned_p (target, cond))
1397 return NULL;
1398
1399 /* We almost certainly searched back to a different place.
1400 Need to re-verify correct lifetimes. */
1401
1402 /* X may not be mentioned in the range (cond_earliest, jump]. */
1403 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1404 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1405 return NULL;
1406
1407 /* A and B may not be modified in the range [cond_earliest, jump). */
1408 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1409 if (INSN_P (insn)
1410 && (modified_in_p (if_info->a, insn)
1411 || modified_in_p (if_info->b, insn)))
1412 return NULL;
1413
1414 return cond;
1415 }
1416
1417 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1418
1419 static int
1420 noce_try_minmax (if_info)
1421 struct noce_if_info *if_info;
1422 {
1423 rtx cond, earliest, target, seq;
1424 enum rtx_code code, op;
1425 int unsignedp;
1426
1427 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1428 if (no_new_pseudos)
1429 return FALSE;
1430
1431 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1432 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1433 to get the target to tell us... */
1434 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1435 || HONOR_NANS (GET_MODE (if_info->x)))
1436 return FALSE;
1437
1438 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1439 if (!cond)
1440 return FALSE;
1441
1442 /* Verify the condition is of the form we expect, and canonicalize
1443 the comparison code. */
1444 code = GET_CODE (cond);
1445 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1446 {
1447 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1448 return FALSE;
1449 }
1450 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1451 {
1452 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1453 return FALSE;
1454 code = swap_condition (code);
1455 }
1456 else
1457 return FALSE;
1458
1459 /* Determine what sort of operation this is. Note that the code is for
1460 a taken branch, so the code->operation mapping appears backwards. */
1461 switch (code)
1462 {
1463 case LT:
1464 case LE:
1465 case UNLT:
1466 case UNLE:
1467 op = SMAX;
1468 unsignedp = 0;
1469 break;
1470 case GT:
1471 case GE:
1472 case UNGT:
1473 case UNGE:
1474 op = SMIN;
1475 unsignedp = 0;
1476 break;
1477 case LTU:
1478 case LEU:
1479 op = UMAX;
1480 unsignedp = 1;
1481 break;
1482 case GTU:
1483 case GEU:
1484 op = UMIN;
1485 unsignedp = 1;
1486 break;
1487 default:
1488 return FALSE;
1489 }
1490
1491 start_sequence ();
1492
1493 target = expand_simple_binop (GET_MODE (if_info->x), op,
1494 if_info->a, if_info->b,
1495 if_info->x, unsignedp, OPTAB_WIDEN);
1496 if (! target)
1497 {
1498 end_sequence ();
1499 return FALSE;
1500 }
1501 if (target != if_info->x)
1502 noce_emit_move_insn (if_info->x, target);
1503
1504 seq = get_insns ();
1505 end_sequence ();
1506
1507 if (seq_contains_jump (seq))
1508 return FALSE;
1509
1510 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1511 if_info->cond = cond;
1512 if_info->cond_earliest = earliest;
1513
1514 return TRUE;
1515 }
1516
1517 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1518
1519 static int
1520 noce_try_abs (if_info)
1521 struct noce_if_info *if_info;
1522 {
1523 rtx cond, earliest, target, seq, a, b, c;
1524 int negate;
1525
1526 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1527 if (no_new_pseudos)
1528 return FALSE;
1529
1530 /* Recognize A and B as constituting an ABS or NABS. */
1531 a = if_info->a;
1532 b = if_info->b;
1533 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1534 negate = 0;
1535 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1536 {
1537 c = a; a = b; b = c;
1538 negate = 1;
1539 }
1540 else
1541 return FALSE;
1542
1543 cond = noce_get_alt_condition (if_info, b, &earliest);
1544 if (!cond)
1545 return FALSE;
1546
1547 /* Verify the condition is of the form we expect. */
1548 if (rtx_equal_p (XEXP (cond, 0), b))
1549 c = XEXP (cond, 1);
1550 else if (rtx_equal_p (XEXP (cond, 1), b))
1551 c = XEXP (cond, 0);
1552 else
1553 return FALSE;
1554
1555 /* Verify that C is zero. Search backward through the block for
1556 a REG_EQUAL note if necessary. */
1557 if (REG_P (c))
1558 {
1559 rtx insn, note = NULL;
1560 for (insn = earliest;
1561 insn != if_info->test_bb->head;
1562 insn = PREV_INSN (insn))
1563 if (INSN_P (insn)
1564 && ((note = find_reg_note (insn, REG_EQUAL, c))
1565 || (note = find_reg_note (insn, REG_EQUIV, c))))
1566 break;
1567 if (! note)
1568 return FALSE;
1569 c = XEXP (note, 0);
1570 }
1571 if (GET_CODE (c) == MEM
1572 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1573 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1574 c = get_pool_constant (XEXP (c, 0));
1575
1576 /* Work around funny ideas get_condition has wrt canonicalization.
1577 Note that these rtx constants are known to be CONST_INT, and
1578 therefore imply integer comparisons. */
1579 if (c == constm1_rtx && GET_CODE (cond) == GT)
1580 ;
1581 else if (c == const1_rtx && GET_CODE (cond) == LT)
1582 ;
1583 else if (c != CONST0_RTX (GET_MODE (b)))
1584 return FALSE;
1585
1586 /* Determine what sort of operation this is. */
1587 switch (GET_CODE (cond))
1588 {
1589 case LT:
1590 case LE:
1591 case UNLT:
1592 case UNLE:
1593 negate = !negate;
1594 break;
1595 case GT:
1596 case GE:
1597 case UNGT:
1598 case UNGE:
1599 break;
1600 default:
1601 return FALSE;
1602 }
1603
1604 start_sequence ();
1605
1606 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1607
1608 /* ??? It's a quandry whether cmove would be better here, especially
1609 for integers. Perhaps combine will clean things up. */
1610 if (target && negate)
1611 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1612
1613 if (! target)
1614 {
1615 end_sequence ();
1616 return FALSE;
1617 }
1618
1619 if (target != if_info->x)
1620 noce_emit_move_insn (if_info->x, target);
1621
1622 seq = get_insns ();
1623 end_sequence ();
1624
1625 if (seq_contains_jump (seq))
1626 return FALSE;
1627
1628 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1629 if_info->cond = cond;
1630 if_info->cond_earliest = earliest;
1631
1632 return TRUE;
1633 }
1634
1635 /* Similar to get_condition, only the resulting condition must be
1636 valid at JUMP, instead of at EARLIEST. */
1637
1638 static rtx
1639 noce_get_condition (jump, earliest)
1640 rtx jump;
1641 rtx *earliest;
1642 {
1643 rtx cond, set, tmp, insn;
1644 bool reverse;
1645
1646 if (! any_condjump_p (jump))
1647 return NULL_RTX;
1648
1649 set = pc_set (jump);
1650
1651 /* If this branches to JUMP_LABEL when the condition is false,
1652 reverse the condition. */
1653 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1654 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1655
1656 /* If the condition variable is a register and is MODE_INT, accept it. */
1657
1658 cond = XEXP (SET_SRC (set), 0);
1659 tmp = XEXP (cond, 0);
1660 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1661 {
1662 *earliest = jump;
1663
1664 if (reverse)
1665 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1666 GET_MODE (cond), tmp, XEXP (cond, 1));
1667 return cond;
1668 }
1669
1670 /* Otherwise, fall back on canonicalize_condition to do the dirty
1671 work of manipulating MODE_CC values and COMPARE rtx codes. */
1672
1673 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX);
1674 if (!tmp)
1675 return NULL_RTX;
1676
1677 /* We are going to insert code before JUMP, not before EARLIEST.
1678 We must therefore be certain that the given condition is valid
1679 at JUMP by virtue of not having been modified since. */
1680 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1681 if (INSN_P (insn) && modified_in_p (tmp, insn))
1682 break;
1683 if (insn == jump)
1684 return tmp;
1685
1686 /* The condition was modified. See if we can get a partial result
1687 that doesn't follow all the reversals. Perhaps combine can fold
1688 them together later. */
1689 tmp = XEXP (tmp, 0);
1690 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1691 return NULL_RTX;
1692 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp);
1693 if (!tmp)
1694 return NULL_RTX;
1695
1696 /* For sanity's sake, re-validate the new result. */
1697 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1698 if (INSN_P (insn) && modified_in_p (tmp, insn))
1699 return NULL_RTX;
1700
1701 return tmp;
1702 }
1703
1704 /* Return true if OP is ok for if-then-else processing. */
1705
1706 static int
1707 noce_operand_ok (op)
1708 rtx op;
1709 {
1710 /* We special-case memories, so handle any of them with
1711 no address side effects. */
1712 if (GET_CODE (op) == MEM)
1713 return ! side_effects_p (XEXP (op, 0));
1714
1715 if (side_effects_p (op))
1716 return FALSE;
1717
1718 return ! may_trap_p (op);
1719 }
1720
1721 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1722 without using conditional execution. Return TRUE if we were
1723 successful at converting the block. */
1724
1725 static int
1726 noce_process_if_block (ce_info)
1727 struct ce_if_block * ce_info;
1728 {
1729 basic_block test_bb = ce_info->test_bb; /* test block */
1730 basic_block then_bb = ce_info->then_bb; /* THEN */
1731 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1732 struct noce_if_info if_info;
1733 rtx insn_a, insn_b;
1734 rtx set_a, set_b;
1735 rtx orig_x, x, a, b;
1736 rtx jump, cond;
1737
1738 /* We're looking for patterns of the form
1739
1740 (1) if (...) x = a; else x = b;
1741 (2) x = b; if (...) x = a;
1742 (3) if (...) x = a; // as if with an initial x = x.
1743
1744 The later patterns require jumps to be more expensive.
1745
1746 ??? For future expansion, look for multiple X in such patterns. */
1747
1748 /* If test is comprised of && or || elements, don't handle it unless it is
1749 the special case of && elements without an ELSE block. */
1750 if (ce_info->num_multiple_test_blocks)
1751 {
1752 if (else_bb || ! ce_info->and_and_p)
1753 return FALSE;
1754
1755 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1756 ce_info->num_multiple_test_blocks = 0;
1757 ce_info->num_and_and_blocks = 0;
1758 ce_info->num_or_or_blocks = 0;
1759 }
1760
1761 /* If this is not a standard conditional jump, we can't parse it. */
1762 jump = test_bb->end;
1763 cond = noce_get_condition (jump, &if_info.cond_earliest);
1764 if (! cond)
1765 return FALSE;
1766
1767 /* If the conditional jump is more than just a conditional
1768 jump, then we can not do if-conversion on this block. */
1769 if (! onlyjump_p (jump))
1770 return FALSE;
1771
1772 /* We must be comparing objects whose modes imply the size. */
1773 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1774 return FALSE;
1775
1776 /* Look for one of the potential sets. */
1777 insn_a = first_active_insn (then_bb);
1778 if (! insn_a
1779 || insn_a != last_active_insn (then_bb, FALSE)
1780 || (set_a = single_set (insn_a)) == NULL_RTX)
1781 return FALSE;
1782
1783 x = SET_DEST (set_a);
1784 a = SET_SRC (set_a);
1785
1786 /* Look for the other potential set. Make sure we've got equivalent
1787 destinations. */
1788 /* ??? This is overconservative. Storing to two different mems is
1789 as easy as conditionally computing the address. Storing to a
1790 single mem merely requires a scratch memory to use as one of the
1791 destination addresses; often the memory immediately below the
1792 stack pointer is available for this. */
1793 set_b = NULL_RTX;
1794 if (else_bb)
1795 {
1796 insn_b = first_active_insn (else_bb);
1797 if (! insn_b
1798 || insn_b != last_active_insn (else_bb, FALSE)
1799 || (set_b = single_set (insn_b)) == NULL_RTX
1800 || ! rtx_equal_p (x, SET_DEST (set_b)))
1801 return FALSE;
1802 }
1803 else
1804 {
1805 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1806 /* We're going to be moving the evaluation of B down from above
1807 COND_EARLIEST to JUMP. Make sure the relevant data is still
1808 intact. */
1809 if (! insn_b
1810 || GET_CODE (insn_b) != INSN
1811 || (set_b = single_set (insn_b)) == NULL_RTX
1812 || ! rtx_equal_p (x, SET_DEST (set_b))
1813 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1814 || modified_between_p (SET_SRC (set_b),
1815 PREV_INSN (if_info.cond_earliest), jump)
1816 /* Likewise with X. In particular this can happen when
1817 noce_get_condition looks farther back in the instruction
1818 stream than one might expect. */
1819 || reg_overlap_mentioned_p (x, cond)
1820 || reg_overlap_mentioned_p (x, a)
1821 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1822 insn_b = set_b = NULL_RTX;
1823 }
1824 b = (set_b ? SET_SRC (set_b) : x);
1825
1826 /* Only operate on register destinations, and even then avoid extending
1827 the lifetime of hard registers on small register class machines. */
1828 orig_x = x;
1829 if (GET_CODE (x) != REG
1830 || (SMALL_REGISTER_CLASSES
1831 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1832 {
1833 if (no_new_pseudos || GET_MODE (x) == BLKmode)
1834 return FALSE;
1835 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1836 ? XEXP (x, 0) : x));
1837 }
1838
1839 /* Don't operate on sources that may trap or are volatile. */
1840 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1841 return FALSE;
1842
1843 /* Set up the info block for our subroutines. */
1844 if_info.test_bb = test_bb;
1845 if_info.cond = cond;
1846 if_info.jump = jump;
1847 if_info.insn_a = insn_a;
1848 if_info.insn_b = insn_b;
1849 if_info.x = x;
1850 if_info.a = a;
1851 if_info.b = b;
1852
1853 /* Try optimizations in some approximation of a useful order. */
1854 /* ??? Should first look to see if X is live incoming at all. If it
1855 isn't, we don't need anything but an unconditional set. */
1856
1857 /* Look and see if A and B are really the same. Avoid creating silly
1858 cmove constructs that no one will fix up later. */
1859 if (rtx_equal_p (a, b))
1860 {
1861 /* If we have an INSN_B, we don't have to create any new rtl. Just
1862 move the instruction that we already have. If we don't have an
1863 INSN_B, that means that A == X, and we've got a noop move. In
1864 that case don't do anything and let the code below delete INSN_A. */
1865 if (insn_b && else_bb)
1866 {
1867 rtx note;
1868
1869 if (else_bb && insn_b == else_bb->end)
1870 else_bb->end = PREV_INSN (insn_b);
1871 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
1872
1873 /* If there was a REG_EQUAL note, delete it since it may have been
1874 true due to this insn being after a jump. */
1875 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1876 remove_note (insn_b, note);
1877
1878 insn_b = NULL_RTX;
1879 }
1880 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1881 x must be executed twice. */
1882 else if (insn_b && side_effects_p (orig_x))
1883 return FALSE;
1884
1885 x = orig_x;
1886 goto success;
1887 }
1888
1889 if (noce_try_store_flag (&if_info))
1890 goto success;
1891 if (noce_try_minmax (&if_info))
1892 goto success;
1893 if (noce_try_abs (&if_info))
1894 goto success;
1895 if (HAVE_conditional_move
1896 && noce_try_cmove (&if_info))
1897 goto success;
1898 if (! HAVE_conditional_execution)
1899 {
1900 if (noce_try_store_flag_constants (&if_info))
1901 goto success;
1902 if (noce_try_addcc (&if_info))
1903 goto success;
1904 if (noce_try_store_flag_mask (&if_info))
1905 goto success;
1906 if (HAVE_conditional_move
1907 && noce_try_cmove_arith (&if_info))
1908 goto success;
1909 }
1910
1911 return FALSE;
1912
1913 success:
1914 /* The original sets may now be killed. */
1915 delete_insn (insn_a);
1916
1917 /* Several special cases here: First, we may have reused insn_b above,
1918 in which case insn_b is now NULL. Second, we want to delete insn_b
1919 if it came from the ELSE block, because follows the now correct
1920 write that appears in the TEST block. However, if we got insn_b from
1921 the TEST block, it may in fact be loading data needed for the comparison.
1922 We'll let life_analysis remove the insn if it's really dead. */
1923 if (insn_b && else_bb)
1924 delete_insn (insn_b);
1925
1926 /* The new insns will have been inserted immediately before the jump. We
1927 should be able to remove the jump with impunity, but the condition itself
1928 may have been modified by gcse to be shared across basic blocks. */
1929 delete_insn (jump);
1930
1931 /* If we used a temporary, fix it up now. */
1932 if (orig_x != x)
1933 {
1934 start_sequence ();
1935 noce_emit_move_insn (copy_rtx (orig_x), x);
1936 insn_b = get_insns ();
1937 end_sequence ();
1938
1939 emit_insn_after_scope (insn_b, test_bb->end, INSN_SCOPE (insn_a));
1940 }
1941
1942 /* Merge the blocks! */
1943 merge_if_block (ce_info);
1944
1945 return TRUE;
1946 }
1947 \f
1948 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1949 straight line code. Return true if successful. */
1950
1951 static int
1952 process_if_block (ce_info)
1953 struct ce_if_block * ce_info;
1954 {
1955 if (! reload_completed
1956 && noce_process_if_block (ce_info))
1957 return TRUE;
1958
1959 if (HAVE_conditional_execution && reload_completed)
1960 {
1961 /* If we have && and || tests, try to first handle combining the && and
1962 || tests into the conditional code, and if that fails, go back and
1963 handle it without the && and ||, which at present handles the && case
1964 if there was no ELSE block. */
1965 if (cond_exec_process_if_block (ce_info, TRUE))
1966 return TRUE;
1967
1968 if (ce_info->num_multiple_test_blocks)
1969 {
1970 cancel_changes (0);
1971
1972 if (cond_exec_process_if_block (ce_info, FALSE))
1973 return TRUE;
1974 }
1975 }
1976
1977 return FALSE;
1978 }
1979
1980 /* Merge the blocks and mark for local life update. */
1981
1982 static void
1983 merge_if_block (ce_info)
1984 struct ce_if_block * ce_info;
1985 {
1986 basic_block test_bb = ce_info->test_bb; /* last test block */
1987 basic_block then_bb = ce_info->then_bb; /* THEN */
1988 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1989 basic_block join_bb = ce_info->join_bb; /* join block */
1990 basic_block combo_bb;
1991
1992 /* All block merging is done into the lower block numbers. */
1993
1994 combo_bb = test_bb;
1995
1996 /* Merge any basic blocks to handle && and || subtests. Each of
1997 the blocks are on the fallthru path from the predecessor block. */
1998 if (ce_info->num_multiple_test_blocks > 0)
1999 {
2000 basic_block bb = test_bb;
2001 basic_block last_test_bb = ce_info->last_test_bb;
2002 basic_block fallthru = block_fallthru (bb);
2003
2004 do
2005 {
2006 bb = fallthru;
2007 fallthru = block_fallthru (bb);
2008 if (post_dominators)
2009 delete_from_dominance_info (post_dominators, bb);
2010 merge_blocks_nomove (combo_bb, bb);
2011 num_removed_blocks++;
2012 }
2013 while (bb != last_test_bb);
2014 }
2015
2016 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2017 label, but it might if there were || tests. That label's count should be
2018 zero, and it normally should be removed. */
2019
2020 if (then_bb)
2021 {
2022 if (combo_bb->global_live_at_end)
2023 COPY_REG_SET (combo_bb->global_live_at_end,
2024 then_bb->global_live_at_end);
2025 if (post_dominators)
2026 delete_from_dominance_info (post_dominators, then_bb);
2027 merge_blocks_nomove (combo_bb, then_bb);
2028 num_removed_blocks++;
2029 }
2030
2031 /* The ELSE block, if it existed, had a label. That label count
2032 will almost always be zero, but odd things can happen when labels
2033 get their addresses taken. */
2034 if (else_bb)
2035 {
2036 if (post_dominators)
2037 delete_from_dominance_info (post_dominators, else_bb);
2038 merge_blocks_nomove (combo_bb, else_bb);
2039 num_removed_blocks++;
2040 }
2041
2042 /* If there was no join block reported, that means it was not adjacent
2043 to the others, and so we cannot merge them. */
2044
2045 if (! join_bb)
2046 {
2047 rtx last = combo_bb->end;
2048
2049 /* The outgoing edge for the current COMBO block should already
2050 be correct. Verify this. */
2051 if (combo_bb->succ == NULL_EDGE)
2052 {
2053 if (find_reg_note (last, REG_NORETURN, NULL))
2054 ;
2055 else if (GET_CODE (last) == INSN
2056 && GET_CODE (PATTERN (last)) == TRAP_IF
2057 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2058 ;
2059 else
2060 abort ();
2061 }
2062
2063 /* There should still be something at the end of the THEN or ELSE
2064 blocks taking us to our final destination. */
2065 else if (GET_CODE (last) == JUMP_INSN)
2066 ;
2067 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2068 && GET_CODE (last) == CALL_INSN
2069 && SIBLING_CALL_P (last))
2070 ;
2071 else if ((combo_bb->succ->flags & EDGE_EH)
2072 && can_throw_internal (last))
2073 ;
2074 else
2075 abort ();
2076 }
2077
2078 /* The JOIN block may have had quite a number of other predecessors too.
2079 Since we've already merged the TEST, THEN and ELSE blocks, we should
2080 have only one remaining edge from our if-then-else diamond. If there
2081 is more than one remaining edge, it must come from elsewhere. There
2082 may be zero incoming edges if the THEN block didn't actually join
2083 back up (as with a call to abort). */
2084 else if ((join_bb->pred == NULL
2085 || join_bb->pred->pred_next == NULL)
2086 && join_bb != EXIT_BLOCK_PTR)
2087 {
2088 /* We can merge the JOIN. */
2089 if (combo_bb->global_live_at_end)
2090 COPY_REG_SET (combo_bb->global_live_at_end,
2091 join_bb->global_live_at_end);
2092
2093 if (post_dominators)
2094 delete_from_dominance_info (post_dominators, join_bb);
2095 merge_blocks_nomove (combo_bb, join_bb);
2096 num_removed_blocks++;
2097 }
2098 else
2099 {
2100 /* We cannot merge the JOIN. */
2101
2102 /* The outgoing edge for the current COMBO block should already
2103 be correct. Verify this. */
2104 if (combo_bb->succ->succ_next != NULL_EDGE
2105 || combo_bb->succ->dest != join_bb)
2106 abort ();
2107
2108 /* Remove the jump and cruft from the end of the COMBO block. */
2109 if (join_bb != EXIT_BLOCK_PTR)
2110 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2111 }
2112
2113 num_updated_if_blocks++;
2114 }
2115 \f
2116 /* Find a block ending in a simple IF condition and try to transform it
2117 in some way. When converting a multi-block condition, put the new code
2118 in the first such block and delete the rest. Return a pointer to this
2119 first block if some transformation was done. Return NULL otherwise. */
2120
2121 static basic_block
2122 find_if_header (test_bb, pass)
2123 basic_block test_bb;
2124 int pass;
2125 {
2126 ce_if_block_t ce_info;
2127 edge then_edge;
2128 edge else_edge;
2129
2130 /* The kind of block we're looking for has exactly two successors. */
2131 if ((then_edge = test_bb->succ) == NULL_EDGE
2132 || (else_edge = then_edge->succ_next) == NULL_EDGE
2133 || else_edge->succ_next != NULL_EDGE)
2134 return NULL;
2135
2136 /* Neither edge should be abnormal. */
2137 if ((then_edge->flags & EDGE_COMPLEX)
2138 || (else_edge->flags & EDGE_COMPLEX))
2139 return NULL;
2140
2141 /* The THEN edge is canonically the one that falls through. */
2142 if (then_edge->flags & EDGE_FALLTHRU)
2143 ;
2144 else if (else_edge->flags & EDGE_FALLTHRU)
2145 {
2146 edge e = else_edge;
2147 else_edge = then_edge;
2148 then_edge = e;
2149 }
2150 else
2151 /* Otherwise this must be a multiway branch of some sort. */
2152 return NULL;
2153
2154 memset ((PTR) &ce_info, '\0', sizeof (ce_info));
2155 ce_info.test_bb = test_bb;
2156 ce_info.then_bb = then_edge->dest;
2157 ce_info.else_bb = else_edge->dest;
2158 ce_info.pass = pass;
2159
2160 #ifdef IFCVT_INIT_EXTRA_FIELDS
2161 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2162 #endif
2163
2164 if (find_if_block (&ce_info))
2165 goto success;
2166
2167 if (HAVE_trap && HAVE_conditional_trap
2168 && find_cond_trap (test_bb, then_edge, else_edge))
2169 goto success;
2170
2171 if (post_dominators
2172 && (! HAVE_conditional_execution || reload_completed))
2173 {
2174 if (find_if_case_1 (test_bb, then_edge, else_edge))
2175 goto success;
2176 if (find_if_case_2 (test_bb, then_edge, else_edge))
2177 goto success;
2178 }
2179
2180 return NULL;
2181
2182 success:
2183 if (rtl_dump_file)
2184 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2185 return ce_info.test_bb;
2186 }
2187
2188 /* Return true if a block has two edges, one of which falls through to the next
2189 block, and the other jumps to a specific block, so that we can tell if the
2190 block is part of an && test or an || test. Returns either -1 or the number
2191 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2192
2193 static int
2194 block_jumps_and_fallthru_p (cur_bb, target_bb)
2195 basic_block cur_bb;
2196 basic_block target_bb;
2197 {
2198 edge cur_edge;
2199 int fallthru_p = FALSE;
2200 int jump_p = FALSE;
2201 rtx insn;
2202 rtx end;
2203 int n_insns = 0;
2204
2205 if (!cur_bb || !target_bb)
2206 return -1;
2207
2208 /* If no edges, obviously it doesn't jump or fallthru. */
2209 if (cur_bb->succ == NULL_EDGE)
2210 return FALSE;
2211
2212 for (cur_edge = cur_bb->succ;
2213 cur_edge != NULL_EDGE;
2214 cur_edge = cur_edge->succ_next)
2215 {
2216 if (cur_edge->flags & EDGE_COMPLEX)
2217 /* Anything complex isn't what we want. */
2218 return -1;
2219
2220 else if (cur_edge->flags & EDGE_FALLTHRU)
2221 fallthru_p = TRUE;
2222
2223 else if (cur_edge->dest == target_bb)
2224 jump_p = TRUE;
2225
2226 else
2227 return -1;
2228 }
2229
2230 if ((jump_p & fallthru_p) == 0)
2231 return -1;
2232
2233 /* Don't allow calls in the block, since this is used to group && and ||
2234 together for conditional execution support. ??? we should support
2235 conditional execution support across calls for IA-64 some day, but
2236 for now it makes the code simpler. */
2237 end = cur_bb->end;
2238 insn = cur_bb->head;
2239
2240 while (insn != NULL_RTX)
2241 {
2242 if (GET_CODE (insn) == CALL_INSN)
2243 return -1;
2244
2245 if (INSN_P (insn)
2246 && GET_CODE (insn) != JUMP_INSN
2247 && GET_CODE (PATTERN (insn)) != USE
2248 && GET_CODE (PATTERN (insn)) != CLOBBER)
2249 n_insns++;
2250
2251 if (insn == end)
2252 break;
2253
2254 insn = NEXT_INSN (insn);
2255 }
2256
2257 return n_insns;
2258 }
2259
2260 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2261 block. If so, we'll try to convert the insns to not require the branch.
2262 Return TRUE if we were successful at converting the block. */
2263
2264 static int
2265 find_if_block (ce_info)
2266 struct ce_if_block * ce_info;
2267 {
2268 basic_block test_bb = ce_info->test_bb;
2269 basic_block then_bb = ce_info->then_bb;
2270 basic_block else_bb = ce_info->else_bb;
2271 basic_block join_bb = NULL_BLOCK;
2272 edge then_succ = then_bb->succ;
2273 edge else_succ = else_bb->succ;
2274 int then_predecessors;
2275 int else_predecessors;
2276 edge cur_edge;
2277 basic_block next;
2278
2279 ce_info->last_test_bb = test_bb;
2280
2281 /* Discover if any fall through predecessors of the current test basic block
2282 were && tests (which jump to the else block) or || tests (which jump to
2283 the then block). */
2284 if (HAVE_conditional_execution && reload_completed
2285 && test_bb->pred != NULL_EDGE
2286 && test_bb->pred->pred_next == NULL_EDGE
2287 && test_bb->pred->flags == EDGE_FALLTHRU)
2288 {
2289 basic_block bb = test_bb->pred->src;
2290 basic_block target_bb;
2291 int max_insns = MAX_CONDITIONAL_EXECUTE;
2292 int n_insns;
2293
2294 /* Determine if the preceding block is an && or || block. */
2295 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2296 {
2297 ce_info->and_and_p = TRUE;
2298 target_bb = else_bb;
2299 }
2300 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2301 {
2302 ce_info->and_and_p = FALSE;
2303 target_bb = then_bb;
2304 }
2305 else
2306 target_bb = NULL_BLOCK;
2307
2308 if (target_bb && n_insns <= max_insns)
2309 {
2310 int total_insns = 0;
2311 int blocks = 0;
2312
2313 ce_info->last_test_bb = test_bb;
2314
2315 /* Found at least one && or || block, look for more. */
2316 do
2317 {
2318 ce_info->test_bb = test_bb = bb;
2319 total_insns += n_insns;
2320 blocks++;
2321
2322 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2323 break;
2324
2325 bb = bb->pred->src;
2326 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2327 }
2328 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2329
2330 ce_info->num_multiple_test_blocks = blocks;
2331 ce_info->num_multiple_test_insns = total_insns;
2332
2333 if (ce_info->and_and_p)
2334 ce_info->num_and_and_blocks = blocks;
2335 else
2336 ce_info->num_or_or_blocks = blocks;
2337 }
2338 }
2339
2340 /* Count the number of edges the THEN and ELSE blocks have. */
2341 then_predecessors = 0;
2342 for (cur_edge = then_bb->pred;
2343 cur_edge != NULL_EDGE;
2344 cur_edge = cur_edge->pred_next)
2345 {
2346 then_predecessors++;
2347 if (cur_edge->flags & EDGE_COMPLEX)
2348 return FALSE;
2349 }
2350
2351 else_predecessors = 0;
2352 for (cur_edge = else_bb->pred;
2353 cur_edge != NULL_EDGE;
2354 cur_edge = cur_edge->pred_next)
2355 {
2356 else_predecessors++;
2357 if (cur_edge->flags & EDGE_COMPLEX)
2358 return FALSE;
2359 }
2360
2361 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2362 other than any || blocks which jump to the THEN block. */
2363 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2364 return FALSE;
2365
2366 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2367 if (then_succ != NULL_EDGE
2368 && (then_succ->succ_next != NULL_EDGE
2369 || (then_succ->flags & EDGE_COMPLEX)
2370 || (flow2_completed && tablejump_p (then_bb->end, NULL, NULL))))
2371 return FALSE;
2372
2373 /* If the THEN block has no successors, conditional execution can still
2374 make a conditional call. Don't do this unless the ELSE block has
2375 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2376 Check for the last insn of the THEN block being an indirect jump, which
2377 is listed as not having any successors, but confuses the rest of the CE
2378 code processing. ??? we should fix this in the future. */
2379 if (then_succ == NULL)
2380 {
2381 if (else_bb->pred->pred_next == NULL_EDGE)
2382 {
2383 rtx last_insn = then_bb->end;
2384
2385 while (last_insn
2386 && GET_CODE (last_insn) == NOTE
2387 && last_insn != then_bb->head)
2388 last_insn = PREV_INSN (last_insn);
2389
2390 if (last_insn
2391 && GET_CODE (last_insn) == JUMP_INSN
2392 && ! simplejump_p (last_insn))
2393 return FALSE;
2394
2395 join_bb = else_bb;
2396 else_bb = NULL_BLOCK;
2397 }
2398 else
2399 return FALSE;
2400 }
2401
2402 /* If the THEN block's successor is the other edge out of the TEST block,
2403 then we have an IF-THEN combo without an ELSE. */
2404 else if (then_succ->dest == else_bb)
2405 {
2406 join_bb = else_bb;
2407 else_bb = NULL_BLOCK;
2408 }
2409
2410 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2411 has exactly one predecessor and one successor, and the outgoing edge
2412 is not complex, then we have an IF-THEN-ELSE combo. */
2413 else if (else_succ != NULL_EDGE
2414 && then_succ->dest == else_succ->dest
2415 && else_bb->pred->pred_next == NULL_EDGE
2416 && else_succ->succ_next == NULL_EDGE
2417 && ! (else_succ->flags & EDGE_COMPLEX)
2418 && ! (flow2_completed && tablejump_p (else_bb->end, NULL, NULL)))
2419 join_bb = else_succ->dest;
2420
2421 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2422 else
2423 return FALSE;
2424
2425 num_possible_if_blocks++;
2426
2427 if (rtl_dump_file)
2428 {
2429 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2430 (else_bb) ? "-ELSE" : "",
2431 ce_info->pass,
2432 test_bb->index, (test_bb->head) ? (int)INSN_UID (test_bb->head) : -1,
2433 then_bb->index, (then_bb->head) ? (int)INSN_UID (then_bb->head) : -1);
2434
2435 if (else_bb)
2436 fprintf (rtl_dump_file, ", else %d [%d]",
2437 else_bb->index, (else_bb->head) ? (int)INSN_UID (else_bb->head) : -1);
2438
2439 fprintf (rtl_dump_file, ", join %d [%d]",
2440 join_bb->index, (join_bb->head) ? (int)INSN_UID (join_bb->head) : -1);
2441
2442 if (ce_info->num_multiple_test_blocks > 0)
2443 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2444 ce_info->num_multiple_test_blocks,
2445 (ce_info->and_and_p) ? "&&" : "||",
2446 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2447 ce_info->last_test_bb->index,
2448 ((ce_info->last_test_bb->head)
2449 ? (int)INSN_UID (ce_info->last_test_bb->head)
2450 : -1));
2451
2452 fputc ('\n', rtl_dump_file);
2453 }
2454
2455 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2456 first condition for free, since we've already asserted that there's a
2457 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2458 we checked the FALLTHRU flag, those are already adjacent to the last IF
2459 block. */
2460 /* ??? As an enhancement, move the ELSE block. Have to deal with
2461 BLOCK notes, if by no other means than aborting the merge if they
2462 exist. Sticky enough I don't want to think about it now. */
2463 next = then_bb;
2464 if (else_bb && (next = next->next_bb) != else_bb)
2465 return FALSE;
2466 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2467 {
2468 if (else_bb)
2469 join_bb = NULL;
2470 else
2471 return FALSE;
2472 }
2473
2474 /* Do the real work. */
2475 ce_info->else_bb = else_bb;
2476 ce_info->join_bb = join_bb;
2477
2478 return process_if_block (ce_info);
2479 }
2480
2481 /* Convert a branch over a trap, or a branch
2482 to a trap, into a conditional trap. */
2483
2484 static int
2485 find_cond_trap (test_bb, then_edge, else_edge)
2486 basic_block test_bb;
2487 edge then_edge, else_edge;
2488 {
2489 basic_block then_bb = then_edge->dest;
2490 basic_block else_bb = else_edge->dest;
2491 basic_block other_bb, trap_bb;
2492 rtx trap, jump, cond, cond_earliest, seq;
2493 enum rtx_code code;
2494
2495 /* Locate the block with the trap instruction. */
2496 /* ??? While we look for no successors, we really ought to allow
2497 EH successors. Need to fix merge_if_block for that to work. */
2498 if ((trap = block_has_only_trap (then_bb)) != NULL)
2499 trap_bb = then_bb, other_bb = else_bb;
2500 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2501 trap_bb = else_bb, other_bb = then_bb;
2502 else
2503 return FALSE;
2504
2505 if (rtl_dump_file)
2506 {
2507 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2508 test_bb->index, trap_bb->index);
2509 }
2510
2511 /* If this is not a standard conditional jump, we can't parse it. */
2512 jump = test_bb->end;
2513 cond = noce_get_condition (jump, &cond_earliest);
2514 if (! cond)
2515 return FALSE;
2516
2517 /* If the conditional jump is more than just a conditional jump, then
2518 we can not do if-conversion on this block. */
2519 if (! onlyjump_p (jump))
2520 return FALSE;
2521
2522 /* We must be comparing objects whose modes imply the size. */
2523 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2524 return FALSE;
2525
2526 /* Reverse the comparison code, if necessary. */
2527 code = GET_CODE (cond);
2528 if (then_bb == trap_bb)
2529 {
2530 code = reversed_comparison_code (cond, jump);
2531 if (code == UNKNOWN)
2532 return FALSE;
2533 }
2534
2535 /* Attempt to generate the conditional trap. */
2536 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2537 TRAP_CODE (PATTERN (trap)));
2538 if (seq == NULL)
2539 return FALSE;
2540
2541 /* Emit the new insns before cond_earliest. */
2542 emit_insn_before_scope (seq, cond_earliest, INSN_SCOPE (trap));
2543
2544 /* Delete the trap block if possible. */
2545 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2546 if (trap_bb->pred == NULL)
2547 {
2548 if (post_dominators)
2549 delete_from_dominance_info (post_dominators, trap_bb);
2550 flow_delete_block (trap_bb);
2551 num_removed_blocks++;
2552 }
2553
2554 /* If the non-trap block and the test are now adjacent, merge them.
2555 Otherwise we must insert a direct branch. */
2556 if (test_bb->next_bb == other_bb)
2557 {
2558 struct ce_if_block new_ce_info;
2559 delete_insn (jump);
2560 memset ((PTR) &new_ce_info, '\0', sizeof (new_ce_info));
2561 new_ce_info.test_bb = test_bb;
2562 new_ce_info.then_bb = NULL;
2563 new_ce_info.else_bb = NULL;
2564 new_ce_info.join_bb = other_bb;
2565 merge_if_block (&new_ce_info);
2566 }
2567 else
2568 {
2569 rtx lab, newjump;
2570
2571 lab = JUMP_LABEL (jump);
2572 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2573 LABEL_NUSES (lab) += 1;
2574 JUMP_LABEL (newjump) = lab;
2575 emit_barrier_after (newjump);
2576
2577 delete_insn (jump);
2578 }
2579
2580 return TRUE;
2581 }
2582
2583 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2584 return it. */
2585
2586 static rtx
2587 block_has_only_trap (bb)
2588 basic_block bb;
2589 {
2590 rtx trap;
2591
2592 /* We're not the exit block. */
2593 if (bb == EXIT_BLOCK_PTR)
2594 return NULL_RTX;
2595
2596 /* The block must have no successors. */
2597 if (bb->succ)
2598 return NULL_RTX;
2599
2600 /* The only instruction in the THEN block must be the trap. */
2601 trap = first_active_insn (bb);
2602 if (! (trap == bb->end
2603 && GET_CODE (PATTERN (trap)) == TRAP_IF
2604 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2605 return NULL_RTX;
2606
2607 return trap;
2608 }
2609
2610 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2611 transformable, but not necessarily the other. There need be no
2612 JOIN block.
2613
2614 Return TRUE if we were successful at converting the block.
2615
2616 Cases we'd like to look at:
2617
2618 (1)
2619 if (test) goto over; // x not live
2620 x = a;
2621 goto label;
2622 over:
2623
2624 becomes
2625
2626 x = a;
2627 if (! test) goto label;
2628
2629 (2)
2630 if (test) goto E; // x not live
2631 x = big();
2632 goto L;
2633 E:
2634 x = b;
2635 goto M;
2636
2637 becomes
2638
2639 x = b;
2640 if (test) goto M;
2641 x = big();
2642 goto L;
2643
2644 (3) // This one's really only interesting for targets that can do
2645 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2646 // it results in multiple branches on a cache line, which often
2647 // does not sit well with predictors.
2648
2649 if (test1) goto E; // predicted not taken
2650 x = a;
2651 if (test2) goto F;
2652 ...
2653 E:
2654 x = b;
2655 J:
2656
2657 becomes
2658
2659 x = a;
2660 if (test1) goto E;
2661 if (test2) goto F;
2662
2663 Notes:
2664
2665 (A) Don't do (2) if the branch is predicted against the block we're
2666 eliminating. Do it anyway if we can eliminate a branch; this requires
2667 that the sole successor of the eliminated block postdominate the other
2668 side of the if.
2669
2670 (B) With CE, on (3) we can steal from both sides of the if, creating
2671
2672 if (test1) x = a;
2673 if (!test1) x = b;
2674 if (test1) goto J;
2675 if (test2) goto F;
2676 ...
2677 J:
2678
2679 Again, this is most useful if J postdominates.
2680
2681 (C) CE substitutes for helpful life information.
2682
2683 (D) These heuristics need a lot of work. */
2684
2685 /* Tests for case 1 above. */
2686
2687 static int
2688 find_if_case_1 (test_bb, then_edge, else_edge)
2689 basic_block test_bb;
2690 edge then_edge, else_edge;
2691 {
2692 basic_block then_bb = then_edge->dest;
2693 basic_block else_bb = else_edge->dest, new_bb;
2694 edge then_succ = then_bb->succ;
2695 int then_bb_index;
2696
2697 /* THEN has one successor. */
2698 if (!then_succ || then_succ->succ_next != NULL)
2699 return FALSE;
2700
2701 /* THEN does not fall through, but is not strange either. */
2702 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2703 return FALSE;
2704
2705 /* THEN has one predecessor. */
2706 if (then_bb->pred->pred_next != NULL)
2707 return FALSE;
2708
2709 /* THEN must do something. */
2710 if (forwarder_block_p (then_bb))
2711 return FALSE;
2712
2713 num_possible_if_blocks++;
2714 if (rtl_dump_file)
2715 fprintf (rtl_dump_file,
2716 "\nIF-CASE-1 found, start %d, then %d\n",
2717 test_bb->index, then_bb->index);
2718
2719 /* THEN is small. */
2720 if (count_bb_insns (then_bb) > BRANCH_COST)
2721 return FALSE;
2722
2723 /* Registers set are dead, or are predicable. */
2724 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2725 then_bb->succ->dest, 1))
2726 return FALSE;
2727
2728 /* Conversion went ok, including moving the insns and fixing up the
2729 jump. Adjust the CFG to match. */
2730
2731 bitmap_operation (test_bb->global_live_at_end,
2732 else_bb->global_live_at_start,
2733 then_bb->global_live_at_end, BITMAP_IOR);
2734
2735 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2736 then_bb_index = then_bb->index;
2737 if (post_dominators)
2738 delete_from_dominance_info (post_dominators, then_bb);
2739 flow_delete_block (then_bb);
2740
2741 /* Make rest of code believe that the newly created block is the THEN_BB
2742 block we removed. */
2743 if (new_bb)
2744 {
2745 new_bb->index = then_bb_index;
2746 BASIC_BLOCK (then_bb_index) = new_bb;
2747 if (post_dominators)
2748 add_to_dominance_info (post_dominators, new_bb);
2749 }
2750 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2751 later. */
2752
2753 num_removed_blocks++;
2754 num_updated_if_blocks++;
2755
2756 return TRUE;
2757 }
2758
2759 /* Test for case 2 above. */
2760
2761 static int
2762 find_if_case_2 (test_bb, then_edge, else_edge)
2763 basic_block test_bb;
2764 edge then_edge, else_edge;
2765 {
2766 basic_block then_bb = then_edge->dest;
2767 basic_block else_bb = else_edge->dest;
2768 edge else_succ = else_bb->succ;
2769 rtx note;
2770
2771 /* ELSE has one successor. */
2772 if (!else_succ || else_succ->succ_next != NULL)
2773 return FALSE;
2774
2775 /* ELSE outgoing edge is not complex. */
2776 if (else_succ->flags & EDGE_COMPLEX)
2777 return FALSE;
2778
2779 /* ELSE has one predecessor. */
2780 if (else_bb->pred->pred_next != NULL)
2781 return FALSE;
2782
2783 /* THEN is not EXIT. */
2784 if (then_bb->index < 0)
2785 return FALSE;
2786
2787 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2788 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2789 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2790 ;
2791 else if (else_succ->dest->index < 0
2792 || dominated_by_p (post_dominators, then_bb,
2793 else_succ->dest))
2794 ;
2795 else
2796 return FALSE;
2797
2798 num_possible_if_blocks++;
2799 if (rtl_dump_file)
2800 fprintf (rtl_dump_file,
2801 "\nIF-CASE-2 found, start %d, else %d\n",
2802 test_bb->index, else_bb->index);
2803
2804 /* ELSE is small. */
2805 if (count_bb_insns (else_bb) > BRANCH_COST)
2806 return FALSE;
2807
2808 /* Registers set are dead, or are predicable. */
2809 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2810 return FALSE;
2811
2812 /* Conversion went ok, including moving the insns and fixing up the
2813 jump. Adjust the CFG to match. */
2814
2815 bitmap_operation (test_bb->global_live_at_end,
2816 then_bb->global_live_at_start,
2817 else_bb->global_live_at_end, BITMAP_IOR);
2818
2819 if (post_dominators)
2820 delete_from_dominance_info (post_dominators, else_bb);
2821 flow_delete_block (else_bb);
2822
2823 num_removed_blocks++;
2824 num_updated_if_blocks++;
2825
2826 /* ??? We may now fallthru from one of THEN's successors into a join
2827 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2828
2829 return TRUE;
2830 }
2831
2832 /* A subroutine of dead_or_predicable called through for_each_rtx.
2833 Return 1 if a memory is found. */
2834
2835 static int
2836 find_memory (px, data)
2837 rtx *px;
2838 void *data ATTRIBUTE_UNUSED;
2839 {
2840 return GET_CODE (*px) == MEM;
2841 }
2842
2843 /* Used by the code above to perform the actual rtl transformations.
2844 Return TRUE if successful.
2845
2846 TEST_BB is the block containing the conditional branch. MERGE_BB
2847 is the block containing the code to manipulate. NEW_DEST is the
2848 label TEST_BB should be branching to after the conversion.
2849 REVERSEP is true if the sense of the branch should be reversed. */
2850
2851 static int
2852 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2853 basic_block test_bb, merge_bb, other_bb;
2854 basic_block new_dest;
2855 int reversep;
2856 {
2857 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2858
2859 jump = test_bb->end;
2860
2861 /* Find the extent of the real code in the merge block. */
2862 head = merge_bb->head;
2863 end = merge_bb->end;
2864
2865 if (GET_CODE (head) == CODE_LABEL)
2866 head = NEXT_INSN (head);
2867 if (GET_CODE (head) == NOTE)
2868 {
2869 if (head == end)
2870 {
2871 head = end = NULL_RTX;
2872 goto no_body;
2873 }
2874 head = NEXT_INSN (head);
2875 }
2876
2877 if (GET_CODE (end) == JUMP_INSN)
2878 {
2879 if (head == end)
2880 {
2881 head = end = NULL_RTX;
2882 goto no_body;
2883 }
2884 end = PREV_INSN (end);
2885 }
2886
2887 /* Disable handling dead code by conditional execution if the machine needs
2888 to do anything funny with the tests, etc. */
2889 #ifndef IFCVT_MODIFY_TESTS
2890 if (HAVE_conditional_execution)
2891 {
2892 /* In the conditional execution case, we have things easy. We know
2893 the condition is reversible. We don't have to check life info,
2894 becase we're going to conditionally execute the code anyway.
2895 All that's left is making sure the insns involved can actually
2896 be predicated. */
2897
2898 rtx cond, prob_val;
2899
2900 cond = cond_exec_get_condition (jump);
2901 if (! cond)
2902 return FALSE;
2903
2904 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2905 if (prob_val)
2906 prob_val = XEXP (prob_val, 0);
2907
2908 if (reversep)
2909 {
2910 enum rtx_code rev = reversed_comparison_code (cond, jump);
2911 if (rev == UNKNOWN)
2912 return FALSE;
2913 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2914 XEXP (cond, 1));
2915 if (prob_val)
2916 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2917 }
2918
2919 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
2920 prob_val, 0))
2921 goto cancel;
2922
2923 earliest = jump;
2924 }
2925 else
2926 #endif
2927 {
2928 /* In the non-conditional execution case, we have to verify that there
2929 are no trapping operations, no calls, no references to memory, and
2930 that any registers modified are dead at the branch site. */
2931
2932 rtx insn, cond, prev;
2933 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2934 regset merge_set, tmp, test_live, test_set;
2935 struct propagate_block_info *pbi;
2936 int i, fail = 0;
2937
2938 /* Check for no calls or trapping operations. */
2939 for (insn = head; ; insn = NEXT_INSN (insn))
2940 {
2941 if (GET_CODE (insn) == CALL_INSN)
2942 return FALSE;
2943 if (INSN_P (insn))
2944 {
2945 if (may_trap_p (PATTERN (insn)))
2946 return FALSE;
2947
2948 /* ??? Even non-trapping memories such as stack frame
2949 references must be avoided. For stores, we collect
2950 no lifetime info; for reads, we'd have to assert
2951 true_dependence false against every store in the
2952 TEST range. */
2953 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2954 return FALSE;
2955 }
2956 if (insn == end)
2957 break;
2958 }
2959
2960 if (! any_condjump_p (jump))
2961 return FALSE;
2962
2963 /* Find the extent of the conditional. */
2964 cond = noce_get_condition (jump, &earliest);
2965 if (! cond)
2966 return FALSE;
2967
2968 /* Collect:
2969 MERGE_SET = set of registers set in MERGE_BB
2970 TEST_LIVE = set of registers live at EARLIEST
2971 TEST_SET = set of registers set between EARLIEST and the
2972 end of the block. */
2973
2974 tmp = INITIALIZE_REG_SET (tmp_head);
2975 merge_set = INITIALIZE_REG_SET (merge_set_head);
2976 test_live = INITIALIZE_REG_SET (test_live_head);
2977 test_set = INITIALIZE_REG_SET (test_set_head);
2978
2979 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2980 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2981 since we've already asserted that MERGE_BB is small. */
2982 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2983
2984 /* For small register class machines, don't lengthen lifetimes of
2985 hard registers before reload. */
2986 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2987 {
2988 EXECUTE_IF_SET_IN_BITMAP
2989 (merge_set, 0, i,
2990 {
2991 if (i < FIRST_PSEUDO_REGISTER
2992 && ! fixed_regs[i]
2993 && ! global_regs[i])
2994 fail = 1;
2995 });
2996 }
2997
2998 /* For TEST, we're interested in a range of insns, not a whole block.
2999 Moreover, we're interested in the insns live from OTHER_BB. */
3000
3001 COPY_REG_SET (test_live, other_bb->global_live_at_start);
3002 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3003 0);
3004
3005 for (insn = jump; ; insn = prev)
3006 {
3007 prev = propagate_one_insn (pbi, insn);
3008 if (insn == earliest)
3009 break;
3010 }
3011
3012 free_propagate_block_info (pbi);
3013
3014 /* We can perform the transformation if
3015 MERGE_SET & (TEST_SET | TEST_LIVE)
3016 and
3017 TEST_SET & merge_bb->global_live_at_start
3018 are empty. */
3019
3020 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3021 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3022 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3023
3024 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3025 BITMAP_AND);
3026 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3027
3028 FREE_REG_SET (tmp);
3029 FREE_REG_SET (merge_set);
3030 FREE_REG_SET (test_live);
3031 FREE_REG_SET (test_set);
3032
3033 if (fail)
3034 return FALSE;
3035 }
3036
3037 no_body:
3038 /* We don't want to use normal invert_jump or redirect_jump because
3039 we don't want to delete_insn called. Also, we want to do our own
3040 change group management. */
3041
3042 old_dest = JUMP_LABEL (jump);
3043 if (other_bb != new_dest)
3044 {
3045 new_label = block_label (new_dest);
3046 if (reversep
3047 ? ! invert_jump_1 (jump, new_label)
3048 : ! redirect_jump_1 (jump, new_label))
3049 goto cancel;
3050 }
3051
3052 if (! apply_change_group ())
3053 return FALSE;
3054
3055 if (other_bb != new_dest)
3056 {
3057 if (old_dest)
3058 LABEL_NUSES (old_dest) -= 1;
3059 if (new_label)
3060 LABEL_NUSES (new_label) += 1;
3061 JUMP_LABEL (jump) = new_label;
3062 if (reversep)
3063 invert_br_probabilities (jump);
3064
3065 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3066 if (reversep)
3067 {
3068 gcov_type count, probability;
3069 count = BRANCH_EDGE (test_bb)->count;
3070 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3071 FALLTHRU_EDGE (test_bb)->count = count;
3072 probability = BRANCH_EDGE (test_bb)->probability;
3073 BRANCH_EDGE (test_bb)->probability
3074 = FALLTHRU_EDGE (test_bb)->probability;
3075 FALLTHRU_EDGE (test_bb)->probability = probability;
3076 update_br_prob_note (test_bb);
3077 }
3078 }
3079
3080 /* Move the insns out of MERGE_BB to before the branch. */
3081 if (head != NULL)
3082 {
3083 if (end == merge_bb->end)
3084 merge_bb->end = PREV_INSN (head);
3085
3086 if (squeeze_notes (&head, &end))
3087 return TRUE;
3088
3089 reorder_insns (head, end, PREV_INSN (earliest));
3090 }
3091
3092 /* Remove the jump and edge if we can. */
3093 if (other_bb == new_dest)
3094 {
3095 delete_insn (jump);
3096 remove_edge (BRANCH_EDGE (test_bb));
3097 /* ??? Can't merge blocks here, as then_bb is still in use.
3098 At minimum, the merge will get done just before bb-reorder. */
3099 }
3100
3101 return TRUE;
3102
3103 cancel:
3104 cancel_changes (0);
3105 return FALSE;
3106 }
3107 \f
3108 /* Main entry point for all if-conversion. */
3109
3110 void
3111 if_convert (x_life_data_ok)
3112 int x_life_data_ok;
3113 {
3114 basic_block bb;
3115 int pass;
3116
3117 num_possible_if_blocks = 0;
3118 num_updated_if_blocks = 0;
3119 num_removed_blocks = 0;
3120 life_data_ok = (x_life_data_ok != 0);
3121
3122 /* Free up basic_block_for_insn so that we don't have to keep it
3123 up to date, either here or in merge_blocks_nomove. */
3124 free_basic_block_vars (1);
3125
3126 /* Compute postdominators if we think we'll use them. */
3127 post_dominators = NULL;
3128 if (HAVE_conditional_execution || life_data_ok)
3129 {
3130 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
3131 }
3132 if (life_data_ok)
3133 clear_bb_flags ();
3134
3135 /* Go through each of the basic blocks looking for things to convert. If we
3136 have conditional execution, we make multiple passes to allow us to handle
3137 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3138 pass = 0;
3139 do
3140 {
3141 cond_exec_changed_p = FALSE;
3142 pass++;
3143
3144 #ifdef IFCVT_MULTIPLE_DUMPS
3145 if (rtl_dump_file && pass > 1)
3146 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3147 #endif
3148
3149 FOR_EACH_BB (bb)
3150 {
3151 basic_block new_bb;
3152 while ((new_bb = find_if_header (bb, pass)))
3153 bb = new_bb;
3154 }
3155
3156 #ifdef IFCVT_MULTIPLE_DUMPS
3157 if (rtl_dump_file && cond_exec_changed_p)
3158 print_rtl_with_bb (rtl_dump_file, get_insns ());
3159 #endif
3160 }
3161 while (cond_exec_changed_p);
3162
3163 #ifdef IFCVT_MULTIPLE_DUMPS
3164 if (rtl_dump_file)
3165 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3166 #endif
3167
3168 if (post_dominators)
3169 free_dominance_info (post_dominators);
3170
3171 if (rtl_dump_file)
3172 fflush (rtl_dump_file);
3173
3174 clear_aux_for_blocks ();
3175
3176 /* Rebuild life info for basic blocks that require it. */
3177 if (num_removed_blocks && life_data_ok)
3178 {
3179 /* If we allocated new pseudos, we must resize the array for sched1. */
3180 if (max_regno < max_reg_num ())
3181 {
3182 max_regno = max_reg_num ();
3183 allocate_reg_info (max_regno, FALSE, FALSE);
3184 }
3185 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3186 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3187 | PROP_KILL_DEAD_CODE);
3188 }
3189
3190 /* Write the final stats. */
3191 if (rtl_dump_file && num_possible_if_blocks > 0)
3192 {
3193 fprintf (rtl_dump_file,
3194 "\n%d possible IF blocks searched.\n",
3195 num_possible_if_blocks);
3196 fprintf (rtl_dump_file,
3197 "%d IF blocks converted.\n",
3198 num_updated_if_blocks);
3199 fprintf (rtl_dump_file,
3200 "%d basic blocks deleted.\n\n\n",
3201 num_removed_blocks);
3202 }
3203
3204 #ifdef ENABLE_CHECKING
3205 verify_flow_info ();
3206 #endif
3207 }