gcc.c: Convert prototypes to ISO C90.
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41
42
43 #ifndef HAVE_conditional_execution
44 #define HAVE_conditional_execution 0
45 #endif
46 #ifndef HAVE_conditional_move
47 #define HAVE_conditional_move 0
48 #endif
49 #ifndef HAVE_incscc
50 #define HAVE_incscc 0
51 #endif
52 #ifndef HAVE_decscc
53 #define HAVE_decscc 0
54 #endif
55 #ifndef HAVE_trap
56 #define HAVE_trap 0
57 #endif
58 #ifndef HAVE_conditional_trap
59 #define HAVE_conditional_trap 0
60 #endif
61
62 #ifndef MAX_CONDITIONAL_EXECUTE
63 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
64 #endif
65
66 #define NULL_EDGE ((struct edge_def *)NULL)
67 #define NULL_BLOCK ((struct basic_block_def *)NULL)
68
69 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
70 static int num_possible_if_blocks;
71
72 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
73 execution. */
74 static int num_updated_if_blocks;
75
76 /* # of basic blocks that were removed. */
77 static int num_removed_blocks;
78
79 /* Whether conditional execution changes were made. */
80 static int cond_exec_changed_p;
81
82 /* True if life data ok at present. */
83 static bool life_data_ok;
84
85 /* The post-dominator relation on the original block numbers. */
86 static dominance_info post_dominators;
87
88 /* Forward references. */
89 static int count_bb_insns (basic_block);
90 static rtx first_active_insn (basic_block);
91 static rtx last_active_insn (basic_block, int);
92 static int seq_contains_jump (rtx);
93 static basic_block block_fallthru (basic_block);
94 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
95 static rtx cond_exec_get_condition (rtx);
96 static int cond_exec_process_if_block (ce_if_block_t *, int);
97 static rtx noce_get_condition (rtx, rtx *);
98 static int noce_operand_ok (rtx);
99 static int noce_process_if_block (ce_if_block_t *);
100 static int process_if_block (ce_if_block_t *);
101 static void merge_if_block (ce_if_block_t *);
102 static int find_cond_trap (basic_block, edge, edge);
103 static basic_block find_if_header (basic_block, int);
104 static int block_jumps_and_fallthru_p (basic_block, basic_block);
105 static int find_if_block (ce_if_block_t *);
106 static int find_if_case_1 (basic_block, edge, edge);
107 static int find_if_case_2 (basic_block, edge, edge);
108 static int find_memory (rtx *, void *);
109 static int dead_or_predicable (basic_block, basic_block, basic_block,
110 basic_block, int);
111 static void noce_emit_move_insn (rtx, rtx);
112 static rtx block_has_only_trap (basic_block);
113 \f
114 /* Count the number of non-jump active insns in BB. */
115
116 static int
117 count_bb_insns (basic_block bb)
118 {
119 int count = 0;
120 rtx insn = bb->head;
121
122 while (1)
123 {
124 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
125 count++;
126
127 if (insn == bb->end)
128 break;
129 insn = NEXT_INSN (insn);
130 }
131
132 return count;
133 }
134
135 /* Return the first non-jump active insn in the basic block. */
136
137 static rtx
138 first_active_insn (basic_block bb)
139 {
140 rtx insn = bb->head;
141
142 if (GET_CODE (insn) == CODE_LABEL)
143 {
144 if (insn == bb->end)
145 return NULL_RTX;
146 insn = NEXT_INSN (insn);
147 }
148
149 while (GET_CODE (insn) == NOTE)
150 {
151 if (insn == bb->end)
152 return NULL_RTX;
153 insn = NEXT_INSN (insn);
154 }
155
156 if (GET_CODE (insn) == JUMP_INSN)
157 return NULL_RTX;
158
159 return insn;
160 }
161
162 /* Return the last non-jump active (non-jump) insn in the basic block. */
163
164 static rtx
165 last_active_insn (basic_block bb, int skip_use_p)
166 {
167 rtx insn = bb->end;
168 rtx head = bb->head;
169
170 while (GET_CODE (insn) == NOTE
171 || GET_CODE (insn) == JUMP_INSN
172 || (skip_use_p
173 && GET_CODE (insn) == INSN
174 && GET_CODE (PATTERN (insn)) == USE))
175 {
176 if (insn == head)
177 return NULL_RTX;
178 insn = PREV_INSN (insn);
179 }
180
181 if (GET_CODE (insn) == CODE_LABEL)
182 return NULL_RTX;
183
184 return insn;
185 }
186
187 /* It is possible, especially when having dealt with multi-word
188 arithmetic, for the expanders to have emitted jumps. Search
189 through the sequence and return TRUE if a jump exists so that
190 we can abort the conversion. */
191
192 static int
193 seq_contains_jump (rtx insn)
194 {
195 while (insn)
196 {
197 if (GET_CODE (insn) == JUMP_INSN)
198 return 1;
199 insn = NEXT_INSN (insn);
200 }
201 return 0;
202 }
203
204 static basic_block
205 block_fallthru (basic_block bb)
206 {
207 edge e;
208
209 for (e = bb->succ;
210 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
211 e = e->succ_next)
212 ;
213
214 return (e) ? e->dest : NULL_BLOCK;
215 }
216 \f
217 /* Go through a bunch of insns, converting them to conditional
218 execution format if possible. Return TRUE if all of the non-note
219 insns were processed. */
220
221 static int
222 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
223 /* if block information */rtx start,
224 /* first insn to look at */rtx end,
225 /* last insn to look at */rtx test,
226 /* conditional execution test */rtx prob_val,
227 /* probability of branch taken. */int mod_ok)
228 {
229 int must_be_last = FALSE;
230 rtx insn;
231 rtx xtest;
232 rtx pattern;
233
234 if (!start || !end)
235 return FALSE;
236
237 for (insn = start; ; insn = NEXT_INSN (insn))
238 {
239 if (GET_CODE (insn) == NOTE)
240 goto insn_done;
241
242 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
243 abort ();
244
245 /* Remove USE insns that get in the way. */
246 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
247 {
248 /* ??? Ug. Actually unlinking the thing is problematic,
249 given what we'd have to coordinate with our callers. */
250 PUT_CODE (insn, NOTE);
251 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
252 NOTE_SOURCE_FILE (insn) = 0;
253 goto insn_done;
254 }
255
256 /* Last insn wasn't last? */
257 if (must_be_last)
258 return FALSE;
259
260 if (modified_in_p (test, insn))
261 {
262 if (!mod_ok)
263 return FALSE;
264 must_be_last = TRUE;
265 }
266
267 /* Now build the conditional form of the instruction. */
268 pattern = PATTERN (insn);
269 xtest = copy_rtx (test);
270
271 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
272 two conditions. */
273 if (GET_CODE (pattern) == COND_EXEC)
274 {
275 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
276 return FALSE;
277
278 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
279 COND_EXEC_TEST (pattern));
280 pattern = COND_EXEC_CODE (pattern);
281 }
282
283 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
284
285 /* If the machine needs to modify the insn being conditionally executed,
286 say for example to force a constant integer operand into a temp
287 register, do so here. */
288 #ifdef IFCVT_MODIFY_INSN
289 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
290 if (! pattern)
291 return FALSE;
292 #endif
293
294 validate_change (insn, &PATTERN (insn), pattern, 1);
295
296 if (GET_CODE (insn) == CALL_INSN && prob_val)
297 validate_change (insn, &REG_NOTES (insn),
298 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
299 REG_NOTES (insn)), 1);
300
301 insn_done:
302 if (insn == end)
303 break;
304 }
305
306 return TRUE;
307 }
308
309 /* Return the condition for a jump. Do not do any special processing. */
310
311 static rtx
312 cond_exec_get_condition (rtx jump)
313 {
314 rtx test_if, cond;
315
316 if (any_condjump_p (jump))
317 test_if = SET_SRC (pc_set (jump));
318 else
319 return NULL_RTX;
320 cond = XEXP (test_if, 0);
321
322 /* If this branches to JUMP_LABEL when the condition is false,
323 reverse the condition. */
324 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
325 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
326 {
327 enum rtx_code rev = reversed_comparison_code (cond, jump);
328 if (rev == UNKNOWN)
329 return NULL_RTX;
330
331 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
332 XEXP (cond, 1));
333 }
334
335 return cond;
336 }
337
338 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
339 to conditional execution. Return TRUE if we were successful at
340 converting the block. */
341
342 static int
343 cond_exec_process_if_block (ce_if_block_t * ce_info,
344 /* if block information */int do_multiple_p)
345 {
346 basic_block test_bb = ce_info->test_bb; /* last test block */
347 basic_block then_bb = ce_info->then_bb; /* THEN */
348 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
349 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
350 rtx then_start; /* first insn in THEN block */
351 rtx then_end; /* last insn + 1 in THEN block */
352 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
353 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
354 int max; /* max # of insns to convert. */
355 int then_mod_ok; /* whether conditional mods are ok in THEN */
356 rtx true_expr; /* test for else block insns */
357 rtx false_expr; /* test for then block insns */
358 rtx true_prob_val; /* probability of else block */
359 rtx false_prob_val; /* probability of then block */
360 int n_insns;
361 enum rtx_code false_code;
362
363 /* If test is comprised of && or || elements, and we've failed at handling
364 all of them together, just use the last test if it is the special case of
365 && elements without an ELSE block. */
366 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
367 {
368 if (else_bb || ! ce_info->and_and_p)
369 return FALSE;
370
371 ce_info->test_bb = test_bb = ce_info->last_test_bb;
372 ce_info->num_multiple_test_blocks = 0;
373 ce_info->num_and_and_blocks = 0;
374 ce_info->num_or_or_blocks = 0;
375 }
376
377 /* Find the conditional jump to the ELSE or JOIN part, and isolate
378 the test. */
379 test_expr = cond_exec_get_condition (test_bb->end);
380 if (! test_expr)
381 return FALSE;
382
383 /* If the conditional jump is more than just a conditional jump,
384 then we can not do conditional execution conversion on this block. */
385 if (! onlyjump_p (test_bb->end))
386 return FALSE;
387
388 /* Collect the bounds of where we're to search, skipping any labels, jumps
389 and notes at the beginning and end of the block. Then count the total
390 number of insns and see if it is small enough to convert. */
391 then_start = first_active_insn (then_bb);
392 then_end = last_active_insn (then_bb, TRUE);
393 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
394 max = MAX_CONDITIONAL_EXECUTE;
395
396 if (else_bb)
397 {
398 max *= 2;
399 else_start = first_active_insn (else_bb);
400 else_end = last_active_insn (else_bb, TRUE);
401 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
402 }
403
404 if (n_insns > max)
405 return FALSE;
406
407 /* Map test_expr/test_jump into the appropriate MD tests to use on
408 the conditionally executed code. */
409
410 true_expr = test_expr;
411
412 false_code = reversed_comparison_code (true_expr, test_bb->end);
413 if (false_code != UNKNOWN)
414 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
415 XEXP (true_expr, 0), XEXP (true_expr, 1));
416 else
417 false_expr = NULL_RTX;
418
419 #ifdef IFCVT_MODIFY_TESTS
420 /* If the machine description needs to modify the tests, such as setting a
421 conditional execution register from a comparison, it can do so here. */
422 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
423
424 /* See if the conversion failed */
425 if (!true_expr || !false_expr)
426 goto fail;
427 #endif
428
429 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
430 if (true_prob_val)
431 {
432 true_prob_val = XEXP (true_prob_val, 0);
433 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
434 }
435 else
436 false_prob_val = NULL_RTX;
437
438 /* If we have && or || tests, do them here. These tests are in the adjacent
439 blocks after the first block containing the test. */
440 if (ce_info->num_multiple_test_blocks > 0)
441 {
442 basic_block bb = test_bb;
443 basic_block last_test_bb = ce_info->last_test_bb;
444
445 if (! false_expr)
446 goto fail;
447
448 do
449 {
450 rtx start, end;
451 rtx t, f;
452
453 bb = block_fallthru (bb);
454 start = first_active_insn (bb);
455 end = last_active_insn (bb, TRUE);
456 if (start
457 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
458 false_prob_val, FALSE))
459 goto fail;
460
461 /* If the conditional jump is more than just a conditional jump, then
462 we can not do conditional execution conversion on this block. */
463 if (! onlyjump_p (bb->end))
464 goto fail;
465
466 /* Find the conditional jump and isolate the test. */
467 t = cond_exec_get_condition (bb->end);
468 if (! t)
469 goto fail;
470
471 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
472 GET_MODE (t),
473 XEXP (t, 0),
474 XEXP (t, 1));
475
476 if (ce_info->and_and_p)
477 {
478 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
479 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
480 }
481 else
482 {
483 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
484 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
485 }
486
487 /* If the machine description needs to modify the tests, such as
488 setting a conditional execution register from a comparison, it can
489 do so here. */
490 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
491 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
492
493 /* See if the conversion failed */
494 if (!t || !f)
495 goto fail;
496 #endif
497
498 true_expr = t;
499 false_expr = f;
500 }
501 while (bb != last_test_bb);
502 }
503
504 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
505 on then THEN block. */
506 then_mod_ok = (else_bb == NULL_BLOCK);
507
508 /* Go through the THEN and ELSE blocks converting the insns if possible
509 to conditional execution. */
510
511 if (then_end
512 && (! false_expr
513 || ! cond_exec_process_insns (ce_info, then_start, then_end,
514 false_expr, false_prob_val,
515 then_mod_ok)))
516 goto fail;
517
518 if (else_bb && else_end
519 && ! cond_exec_process_insns (ce_info, else_start, else_end,
520 true_expr, true_prob_val, TRUE))
521 goto fail;
522
523 /* If we cannot apply the changes, fail. Do not go through the normal fail
524 processing, since apply_change_group will call cancel_changes. */
525 if (! apply_change_group ())
526 {
527 #ifdef IFCVT_MODIFY_CANCEL
528 /* Cancel any machine dependent changes. */
529 IFCVT_MODIFY_CANCEL (ce_info);
530 #endif
531 return FALSE;
532 }
533
534 #ifdef IFCVT_MODIFY_FINAL
535 /* Do any machine dependent final modifications */
536 IFCVT_MODIFY_FINAL (ce_info);
537 #endif
538
539 /* Conversion succeeded. */
540 if (rtl_dump_file)
541 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
542 n_insns, (n_insns == 1) ? " was" : "s were");
543
544 /* Merge the blocks! */
545 merge_if_block (ce_info);
546 cond_exec_changed_p = TRUE;
547 return TRUE;
548
549 fail:
550 #ifdef IFCVT_MODIFY_CANCEL
551 /* Cancel any machine dependent changes. */
552 IFCVT_MODIFY_CANCEL (ce_info);
553 #endif
554
555 cancel_changes (0);
556 return FALSE;
557 }
558 \f
559 /* Used by noce_process_if_block to communicate with its subroutines.
560
561 The subroutines know that A and B may be evaluated freely. They
562 know that X is a register. They should insert new instructions
563 before cond_earliest. */
564
565 struct noce_if_info
566 {
567 basic_block test_bb;
568 rtx insn_a, insn_b;
569 rtx x, a, b;
570 rtx jump, cond, cond_earliest;
571 };
572
573 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
574 static int noce_try_store_flag (struct noce_if_info *);
575 static int noce_try_addcc (struct noce_if_info *);
576 static int noce_try_store_flag_constants (struct noce_if_info *);
577 static int noce_try_store_flag_mask (struct noce_if_info *);
578 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
579 rtx, rtx, rtx);
580 static int noce_try_cmove (struct noce_if_info *);
581 static int noce_try_cmove_arith (struct noce_if_info *);
582 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
583 static int noce_try_minmax (struct noce_if_info *);
584 static int noce_try_abs (struct noce_if_info *);
585
586 /* Helper function for noce_try_store_flag*. */
587
588 static rtx
589 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
590 int normalize)
591 {
592 rtx cond = if_info->cond;
593 int cond_complex;
594 enum rtx_code code;
595
596 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
597 || ! general_operand (XEXP (cond, 1), VOIDmode));
598
599 /* If earliest == jump, or when the condition is complex, try to
600 build the store_flag insn directly. */
601
602 if (cond_complex)
603 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
604
605 if (reversep)
606 code = reversed_comparison_code (cond, if_info->jump);
607 else
608 code = GET_CODE (cond);
609
610 if ((if_info->cond_earliest == if_info->jump || cond_complex)
611 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
612 {
613 rtx tmp;
614
615 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
616 XEXP (cond, 1));
617 tmp = gen_rtx_SET (VOIDmode, x, tmp);
618
619 start_sequence ();
620 tmp = emit_insn (tmp);
621
622 if (recog_memoized (tmp) >= 0)
623 {
624 tmp = get_insns ();
625 end_sequence ();
626 emit_insn (tmp);
627
628 if_info->cond_earliest = if_info->jump;
629
630 return x;
631 }
632
633 end_sequence ();
634 }
635
636 /* Don't even try if the comparison operands or the mode of X are weird. */
637 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
638 return NULL_RTX;
639
640 return emit_store_flag (x, code, XEXP (cond, 0),
641 XEXP (cond, 1), VOIDmode,
642 (code == LTU || code == LEU
643 || code == GEU || code == GTU), normalize);
644 }
645
646 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
647 static void
648 noce_emit_move_insn (rtx x, rtx y)
649 {
650 enum machine_mode outmode, inmode;
651 rtx outer, inner;
652 int bitpos;
653
654 if (GET_CODE (x) != STRICT_LOW_PART)
655 {
656 emit_move_insn (x, y);
657 return;
658 }
659
660 outer = XEXP (x, 0);
661 inner = XEXP (outer, 0);
662 outmode = GET_MODE (outer);
663 inmode = GET_MODE (inner);
664 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
665 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
666 GET_MODE_BITSIZE (inmode));
667 }
668
669 /* Convert "if (test) x = 1; else x = 0".
670
671 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
672 tried in noce_try_store_flag_constants after noce_try_cmove has had
673 a go at the conversion. */
674
675 static int
676 noce_try_store_flag (struct noce_if_info *if_info)
677 {
678 int reversep;
679 rtx target, seq;
680
681 if (GET_CODE (if_info->b) == CONST_INT
682 && INTVAL (if_info->b) == STORE_FLAG_VALUE
683 && if_info->a == const0_rtx)
684 reversep = 0;
685 else if (if_info->b == const0_rtx
686 && GET_CODE (if_info->a) == CONST_INT
687 && INTVAL (if_info->a) == STORE_FLAG_VALUE
688 && (reversed_comparison_code (if_info->cond, if_info->jump)
689 != UNKNOWN))
690 reversep = 1;
691 else
692 return FALSE;
693
694 start_sequence ();
695
696 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
697 if (target)
698 {
699 if (target != if_info->x)
700 noce_emit_move_insn (if_info->x, target);
701
702 seq = get_insns ();
703 end_sequence ();
704 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
705
706 return TRUE;
707 }
708 else
709 {
710 end_sequence ();
711 return FALSE;
712 }
713 }
714
715 /* Convert "if (test) x = a; else x = b", for A and B constant. */
716
717 static int
718 noce_try_store_flag_constants (struct noce_if_info *if_info)
719 {
720 rtx target, seq;
721 int reversep;
722 HOST_WIDE_INT itrue, ifalse, diff, tmp;
723 int normalize, can_reverse;
724 enum machine_mode mode;
725
726 if (! no_new_pseudos
727 && GET_CODE (if_info->a) == CONST_INT
728 && GET_CODE (if_info->b) == CONST_INT)
729 {
730 mode = GET_MODE (if_info->x);
731 ifalse = INTVAL (if_info->a);
732 itrue = INTVAL (if_info->b);
733
734 /* Make sure we can represent the difference between the two values. */
735 if ((itrue - ifalse > 0)
736 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
737 return FALSE;
738
739 diff = trunc_int_for_mode (itrue - ifalse, mode);
740
741 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
742 != UNKNOWN);
743
744 reversep = 0;
745 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
746 normalize = 0;
747 else if (ifalse == 0 && exact_log2 (itrue) >= 0
748 && (STORE_FLAG_VALUE == 1
749 || BRANCH_COST >= 2))
750 normalize = 1;
751 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
752 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
753 normalize = 1, reversep = 1;
754 else if (itrue == -1
755 && (STORE_FLAG_VALUE == -1
756 || BRANCH_COST >= 2))
757 normalize = -1;
758 else if (ifalse == -1 && can_reverse
759 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
760 normalize = -1, reversep = 1;
761 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
762 || BRANCH_COST >= 3)
763 normalize = -1;
764 else
765 return FALSE;
766
767 if (reversep)
768 {
769 tmp = itrue; itrue = ifalse; ifalse = tmp;
770 diff = trunc_int_for_mode (-diff, mode);
771 }
772
773 start_sequence ();
774 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
775 if (! target)
776 {
777 end_sequence ();
778 return FALSE;
779 }
780
781 /* if (test) x = 3; else x = 4;
782 => x = 3 + (test == 0); */
783 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
784 {
785 target = expand_simple_binop (mode,
786 (diff == STORE_FLAG_VALUE
787 ? PLUS : MINUS),
788 GEN_INT (ifalse), target, if_info->x, 0,
789 OPTAB_WIDEN);
790 }
791
792 /* if (test) x = 8; else x = 0;
793 => x = (test != 0) << 3; */
794 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
795 {
796 target = expand_simple_binop (mode, ASHIFT,
797 target, GEN_INT (tmp), if_info->x, 0,
798 OPTAB_WIDEN);
799 }
800
801 /* if (test) x = -1; else x = b;
802 => x = -(test != 0) | b; */
803 else if (itrue == -1)
804 {
805 target = expand_simple_binop (mode, IOR,
806 target, GEN_INT (ifalse), if_info->x, 0,
807 OPTAB_WIDEN);
808 }
809
810 /* if (test) x = a; else x = b;
811 => x = (-(test != 0) & (b - a)) + a; */
812 else
813 {
814 target = expand_simple_binop (mode, AND,
815 target, GEN_INT (diff), if_info->x, 0,
816 OPTAB_WIDEN);
817 if (target)
818 target = expand_simple_binop (mode, PLUS,
819 target, GEN_INT (ifalse),
820 if_info->x, 0, OPTAB_WIDEN);
821 }
822
823 if (! target)
824 {
825 end_sequence ();
826 return FALSE;
827 }
828
829 if (target != if_info->x)
830 noce_emit_move_insn (if_info->x, target);
831
832 seq = get_insns ();
833 end_sequence ();
834
835 if (seq_contains_jump (seq))
836 return FALSE;
837
838 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
839
840 return TRUE;
841 }
842
843 return FALSE;
844 }
845
846 /* Convert "if (test) foo++" into "foo += (test != 0)", and
847 similarly for "foo--". */
848
849 static int
850 noce_try_addcc (struct noce_if_info *if_info)
851 {
852 rtx target, seq;
853 int subtract, normalize;
854
855 if (! no_new_pseudos
856 /* Should be no `else' case to worry about. */
857 && if_info->b == if_info->x
858 && GET_CODE (if_info->a) == PLUS
859 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
860 && (reversed_comparison_code (if_info->cond, if_info->jump)
861 != UNKNOWN))
862 {
863 rtx cond = if_info->cond;
864 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
865
866 /* First try to use addcc pattern. */
867 if (general_operand (XEXP (cond, 0), VOIDmode)
868 && general_operand (XEXP (cond, 1), VOIDmode))
869 {
870 start_sequence ();
871 target = emit_conditional_add (if_info->x, code,
872 XEXP (cond, 0), XEXP (cond, 1),
873 VOIDmode,
874 if_info->b, XEXP (if_info->a, 1),
875 GET_MODE (if_info->x),
876 (code == LTU || code == GEU
877 || code == LEU || code == GTU));
878 if (target)
879 {
880 if (target != if_info->x)
881 noce_emit_move_insn (if_info->x, target);
882
883 seq = get_insns ();
884 end_sequence ();
885 emit_insn_before_setloc (seq, if_info->jump,
886 INSN_LOCATOR (if_info->insn_a));
887 return TRUE;
888 }
889 end_sequence ();
890 }
891
892 /* If that fails, construct conditional increment or decrement using
893 setcc. */
894 if (BRANCH_COST >= 2
895 && (XEXP (if_info->a, 1) == const1_rtx
896 || XEXP (if_info->a, 1) == constm1_rtx))
897 {
898 start_sequence ();
899 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
900 subtract = 0, normalize = 0;
901 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
902 subtract = 1, normalize = 0;
903 else
904 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
905
906
907 target = noce_emit_store_flag (if_info,
908 gen_reg_rtx (GET_MODE (if_info->x)),
909 1, normalize);
910
911 if (target)
912 target = expand_simple_binop (GET_MODE (if_info->x),
913 subtract ? MINUS : PLUS,
914 if_info->x, target, if_info->x,
915 0, OPTAB_WIDEN);
916 if (target)
917 {
918 if (target != if_info->x)
919 noce_emit_move_insn (if_info->x, target);
920
921 seq = get_insns ();
922 end_sequence ();
923
924 if (seq_contains_jump (seq))
925 return FALSE;
926
927 emit_insn_before_setloc (seq, if_info->jump,
928 INSN_LOCATOR (if_info->insn_a));
929
930 return TRUE;
931 }
932 end_sequence ();
933 }
934 }
935
936 return FALSE;
937 }
938
939 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
940
941 static int
942 noce_try_store_flag_mask (struct noce_if_info *if_info)
943 {
944 rtx target, seq;
945 int reversep;
946
947 reversep = 0;
948 if (! no_new_pseudos
949 && (BRANCH_COST >= 2
950 || STORE_FLAG_VALUE == -1)
951 && ((if_info->a == const0_rtx
952 && rtx_equal_p (if_info->b, if_info->x))
953 || ((reversep = (reversed_comparison_code (if_info->cond,
954 if_info->jump)
955 != UNKNOWN))
956 && if_info->b == const0_rtx
957 && rtx_equal_p (if_info->a, if_info->x))))
958 {
959 start_sequence ();
960 target = noce_emit_store_flag (if_info,
961 gen_reg_rtx (GET_MODE (if_info->x)),
962 reversep, -1);
963 if (target)
964 target = expand_simple_binop (GET_MODE (if_info->x), AND,
965 if_info->x, target, if_info->x, 0,
966 OPTAB_WIDEN);
967
968 if (target)
969 {
970 if (target != if_info->x)
971 noce_emit_move_insn (if_info->x, target);
972
973 seq = get_insns ();
974 end_sequence ();
975
976 if (seq_contains_jump (seq))
977 return FALSE;
978
979 emit_insn_before_setloc (seq, if_info->jump,
980 INSN_LOCATOR (if_info->insn_a));
981
982 return TRUE;
983 }
984
985 end_sequence ();
986 }
987
988 return FALSE;
989 }
990
991 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
992
993 static rtx
994 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
995 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
996 {
997 /* If earliest == jump, try to build the cmove insn directly.
998 This is helpful when combine has created some complex condition
999 (like for alpha's cmovlbs) that we can't hope to regenerate
1000 through the normal interface. */
1001
1002 if (if_info->cond_earliest == if_info->jump)
1003 {
1004 rtx tmp;
1005
1006 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1007 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1008 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1009
1010 start_sequence ();
1011 tmp = emit_insn (tmp);
1012
1013 if (recog_memoized (tmp) >= 0)
1014 {
1015 tmp = get_insns ();
1016 end_sequence ();
1017 emit_insn (tmp);
1018
1019 return x;
1020 }
1021
1022 end_sequence ();
1023 }
1024
1025 /* Don't even try if the comparison operands are weird. */
1026 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1027 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1028 return NULL_RTX;
1029
1030 #if HAVE_conditional_move
1031 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1032 vtrue, vfalse, GET_MODE (x),
1033 (code == LTU || code == GEU
1034 || code == LEU || code == GTU));
1035 #else
1036 /* We'll never get here, as noce_process_if_block doesn't call the
1037 functions involved. Ifdef code, however, should be discouraged
1038 because it leads to typos in the code not selected. However,
1039 emit_conditional_move won't exist either. */
1040 return NULL_RTX;
1041 #endif
1042 }
1043
1044 /* Try only simple constants and registers here. More complex cases
1045 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1046 has had a go at it. */
1047
1048 static int
1049 noce_try_cmove (struct noce_if_info *if_info)
1050 {
1051 enum rtx_code code;
1052 rtx target, seq;
1053
1054 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1055 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1056 {
1057 start_sequence ();
1058
1059 code = GET_CODE (if_info->cond);
1060 target = noce_emit_cmove (if_info, if_info->x, code,
1061 XEXP (if_info->cond, 0),
1062 XEXP (if_info->cond, 1),
1063 if_info->a, if_info->b);
1064
1065 if (target)
1066 {
1067 if (target != if_info->x)
1068 noce_emit_move_insn (if_info->x, target);
1069
1070 seq = get_insns ();
1071 end_sequence ();
1072 emit_insn_before_setloc (seq, if_info->jump,
1073 INSN_LOCATOR (if_info->insn_a));
1074 return TRUE;
1075 }
1076 else
1077 {
1078 end_sequence ();
1079 return FALSE;
1080 }
1081 }
1082
1083 return FALSE;
1084 }
1085
1086 /* Try more complex cases involving conditional_move. */
1087
1088 static int
1089 noce_try_cmove_arith (struct noce_if_info *if_info)
1090 {
1091 rtx a = if_info->a;
1092 rtx b = if_info->b;
1093 rtx x = if_info->x;
1094 rtx insn_a, insn_b;
1095 rtx tmp, target;
1096 int is_mem = 0;
1097 enum rtx_code code;
1098
1099 /* A conditional move from two memory sources is equivalent to a
1100 conditional on their addresses followed by a load. Don't do this
1101 early because it'll screw alias analysis. Note that we've
1102 already checked for no side effects. */
1103 if (! no_new_pseudos && cse_not_expected
1104 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1105 && BRANCH_COST >= 5)
1106 {
1107 a = XEXP (a, 0);
1108 b = XEXP (b, 0);
1109 x = gen_reg_rtx (Pmode);
1110 is_mem = 1;
1111 }
1112
1113 /* ??? We could handle this if we knew that a load from A or B could
1114 not fault. This is also true if we've already loaded
1115 from the address along the path from ENTRY. */
1116 else if (may_trap_p (a) || may_trap_p (b))
1117 return FALSE;
1118
1119 /* if (test) x = a + b; else x = c - d;
1120 => y = a + b;
1121 x = c - d;
1122 if (test)
1123 x = y;
1124 */
1125
1126 code = GET_CODE (if_info->cond);
1127 insn_a = if_info->insn_a;
1128 insn_b = if_info->insn_b;
1129
1130 /* Possibly rearrange operands to make things come out more natural. */
1131 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1132 {
1133 int reversep = 0;
1134 if (rtx_equal_p (b, x))
1135 reversep = 1;
1136 else if (general_operand (b, GET_MODE (b)))
1137 reversep = 1;
1138
1139 if (reversep)
1140 {
1141 code = reversed_comparison_code (if_info->cond, if_info->jump);
1142 tmp = a, a = b, b = tmp;
1143 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1144 }
1145 }
1146
1147 start_sequence ();
1148
1149 /* If either operand is complex, load it into a register first.
1150 The best way to do this is to copy the original insn. In this
1151 way we preserve any clobbers etc that the insn may have had.
1152 This is of course not possible in the IS_MEM case. */
1153 if (! general_operand (a, GET_MODE (a)))
1154 {
1155 rtx set;
1156
1157 if (no_new_pseudos)
1158 goto end_seq_and_fail;
1159
1160 if (is_mem)
1161 {
1162 tmp = gen_reg_rtx (GET_MODE (a));
1163 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1164 }
1165 else if (! insn_a)
1166 goto end_seq_and_fail;
1167 else
1168 {
1169 a = gen_reg_rtx (GET_MODE (a));
1170 tmp = copy_rtx (insn_a);
1171 set = single_set (tmp);
1172 SET_DEST (set) = a;
1173 tmp = emit_insn (PATTERN (tmp));
1174 }
1175 if (recog_memoized (tmp) < 0)
1176 goto end_seq_and_fail;
1177 }
1178 if (! general_operand (b, GET_MODE (b)))
1179 {
1180 rtx set;
1181
1182 if (no_new_pseudos)
1183 goto end_seq_and_fail;
1184
1185 if (is_mem)
1186 {
1187 tmp = gen_reg_rtx (GET_MODE (b));
1188 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1189 }
1190 else if (! insn_b)
1191 goto end_seq_and_fail;
1192 else
1193 {
1194 b = gen_reg_rtx (GET_MODE (b));
1195 tmp = copy_rtx (insn_b);
1196 set = single_set (tmp);
1197 SET_DEST (set) = b;
1198 tmp = emit_insn (PATTERN (tmp));
1199 }
1200 if (recog_memoized (tmp) < 0)
1201 goto end_seq_and_fail;
1202 }
1203
1204 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1205 XEXP (if_info->cond, 1), a, b);
1206
1207 if (! target)
1208 goto end_seq_and_fail;
1209
1210 /* If we're handling a memory for above, emit the load now. */
1211 if (is_mem)
1212 {
1213 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1214
1215 /* Copy over flags as appropriate. */
1216 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1217 MEM_VOLATILE_P (tmp) = 1;
1218 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1219 MEM_IN_STRUCT_P (tmp) = 1;
1220 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1221 MEM_SCALAR_P (tmp) = 1;
1222 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1223 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1224 set_mem_align (tmp,
1225 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1226
1227 noce_emit_move_insn (if_info->x, tmp);
1228 }
1229 else if (target != x)
1230 noce_emit_move_insn (x, target);
1231
1232 tmp = get_insns ();
1233 end_sequence ();
1234 emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1235 return TRUE;
1236
1237 end_seq_and_fail:
1238 end_sequence ();
1239 return FALSE;
1240 }
1241
1242 /* For most cases, the simplified condition we found is the best
1243 choice, but this is not the case for the min/max/abs transforms.
1244 For these we wish to know that it is A or B in the condition. */
1245
1246 static rtx
1247 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1248 rtx *earliest)
1249 {
1250 rtx cond, set, insn;
1251 int reverse;
1252
1253 /* If target is already mentioned in the known condition, return it. */
1254 if (reg_mentioned_p (target, if_info->cond))
1255 {
1256 *earliest = if_info->cond_earliest;
1257 return if_info->cond;
1258 }
1259
1260 set = pc_set (if_info->jump);
1261 cond = XEXP (SET_SRC (set), 0);
1262 reverse
1263 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1264 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1265
1266 /* If we're looking for a constant, try to make the conditional
1267 have that constant in it. There are two reasons why it may
1268 not have the constant we want:
1269
1270 1. GCC may have needed to put the constant in a register, because
1271 the target can't compare directly against that constant. For
1272 this case, we look for a SET immediately before the comparison
1273 that puts a constant in that register.
1274
1275 2. GCC may have canonicalized the conditional, for example
1276 replacing "if x < 4" with "if x <= 3". We can undo that (or
1277 make equivalent types of changes) to get the constants we need
1278 if they're off by one in the right direction. */
1279
1280 if (GET_CODE (target) == CONST_INT)
1281 {
1282 enum rtx_code code = GET_CODE (if_info->cond);
1283 rtx op_a = XEXP (if_info->cond, 0);
1284 rtx op_b = XEXP (if_info->cond, 1);
1285 rtx prev_insn;
1286
1287 /* First, look to see if we put a constant in a register. */
1288 prev_insn = PREV_INSN (if_info->cond_earliest);
1289 if (prev_insn
1290 && INSN_P (prev_insn)
1291 && GET_CODE (PATTERN (prev_insn)) == SET)
1292 {
1293 rtx src = find_reg_equal_equiv_note (prev_insn);
1294 if (!src)
1295 src = SET_SRC (PATTERN (prev_insn));
1296 if (GET_CODE (src) == CONST_INT)
1297 {
1298 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1299 op_a = src;
1300 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1301 op_b = src;
1302
1303 if (GET_CODE (op_a) == CONST_INT)
1304 {
1305 rtx tmp = op_a;
1306 op_a = op_b;
1307 op_b = tmp;
1308 code = swap_condition (code);
1309 }
1310 }
1311 }
1312
1313 /* Now, look to see if we can get the right constant by
1314 adjusting the conditional. */
1315 if (GET_CODE (op_b) == CONST_INT)
1316 {
1317 HOST_WIDE_INT desired_val = INTVAL (target);
1318 HOST_WIDE_INT actual_val = INTVAL (op_b);
1319
1320 switch (code)
1321 {
1322 case LT:
1323 if (actual_val == desired_val + 1)
1324 {
1325 code = LE;
1326 op_b = GEN_INT (desired_val);
1327 }
1328 break;
1329 case LE:
1330 if (actual_val == desired_val - 1)
1331 {
1332 code = LT;
1333 op_b = GEN_INT (desired_val);
1334 }
1335 break;
1336 case GT:
1337 if (actual_val == desired_val - 1)
1338 {
1339 code = GE;
1340 op_b = GEN_INT (desired_val);
1341 }
1342 break;
1343 case GE:
1344 if (actual_val == desired_val + 1)
1345 {
1346 code = GT;
1347 op_b = GEN_INT (desired_val);
1348 }
1349 break;
1350 default:
1351 break;
1352 }
1353 }
1354
1355 /* If we made any changes, generate a new conditional that is
1356 equivalent to what we started with, but has the right
1357 constants in it. */
1358 if (code != GET_CODE (if_info->cond)
1359 || op_a != XEXP (if_info->cond, 0)
1360 || op_b != XEXP (if_info->cond, 1))
1361 {
1362 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1363 *earliest = if_info->cond_earliest;
1364 return cond;
1365 }
1366 }
1367
1368 cond = canonicalize_condition (if_info->jump, cond, reverse,
1369 earliest, target);
1370 if (! cond || ! reg_mentioned_p (target, cond))
1371 return NULL;
1372
1373 /* We almost certainly searched back to a different place.
1374 Need to re-verify correct lifetimes. */
1375
1376 /* X may not be mentioned in the range (cond_earliest, jump]. */
1377 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1378 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1379 return NULL;
1380
1381 /* A and B may not be modified in the range [cond_earliest, jump). */
1382 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1383 if (INSN_P (insn)
1384 && (modified_in_p (if_info->a, insn)
1385 || modified_in_p (if_info->b, insn)))
1386 return NULL;
1387
1388 return cond;
1389 }
1390
1391 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1392
1393 static int
1394 noce_try_minmax (struct noce_if_info *if_info)
1395 {
1396 rtx cond, earliest, target, seq;
1397 enum rtx_code code, op;
1398 int unsignedp;
1399
1400 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1401 if (no_new_pseudos)
1402 return FALSE;
1403
1404 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1405 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1406 to get the target to tell us... */
1407 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1408 || HONOR_NANS (GET_MODE (if_info->x)))
1409 return FALSE;
1410
1411 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1412 if (!cond)
1413 return FALSE;
1414
1415 /* Verify the condition is of the form we expect, and canonicalize
1416 the comparison code. */
1417 code = GET_CODE (cond);
1418 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1419 {
1420 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1421 return FALSE;
1422 }
1423 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1424 {
1425 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1426 return FALSE;
1427 code = swap_condition (code);
1428 }
1429 else
1430 return FALSE;
1431
1432 /* Determine what sort of operation this is. Note that the code is for
1433 a taken branch, so the code->operation mapping appears backwards. */
1434 switch (code)
1435 {
1436 case LT:
1437 case LE:
1438 case UNLT:
1439 case UNLE:
1440 op = SMAX;
1441 unsignedp = 0;
1442 break;
1443 case GT:
1444 case GE:
1445 case UNGT:
1446 case UNGE:
1447 op = SMIN;
1448 unsignedp = 0;
1449 break;
1450 case LTU:
1451 case LEU:
1452 op = UMAX;
1453 unsignedp = 1;
1454 break;
1455 case GTU:
1456 case GEU:
1457 op = UMIN;
1458 unsignedp = 1;
1459 break;
1460 default:
1461 return FALSE;
1462 }
1463
1464 start_sequence ();
1465
1466 target = expand_simple_binop (GET_MODE (if_info->x), op,
1467 if_info->a, if_info->b,
1468 if_info->x, unsignedp, OPTAB_WIDEN);
1469 if (! target)
1470 {
1471 end_sequence ();
1472 return FALSE;
1473 }
1474 if (target != if_info->x)
1475 noce_emit_move_insn (if_info->x, target);
1476
1477 seq = get_insns ();
1478 end_sequence ();
1479
1480 if (seq_contains_jump (seq))
1481 return FALSE;
1482
1483 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1484 if_info->cond = cond;
1485 if_info->cond_earliest = earliest;
1486
1487 return TRUE;
1488 }
1489
1490 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1491
1492 static int
1493 noce_try_abs (struct noce_if_info *if_info)
1494 {
1495 rtx cond, earliest, target, seq, a, b, c;
1496 int negate;
1497
1498 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1499 if (no_new_pseudos)
1500 return FALSE;
1501
1502 /* Recognize A and B as constituting an ABS or NABS. */
1503 a = if_info->a;
1504 b = if_info->b;
1505 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1506 negate = 0;
1507 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1508 {
1509 c = a; a = b; b = c;
1510 negate = 1;
1511 }
1512 else
1513 return FALSE;
1514
1515 cond = noce_get_alt_condition (if_info, b, &earliest);
1516 if (!cond)
1517 return FALSE;
1518
1519 /* Verify the condition is of the form we expect. */
1520 if (rtx_equal_p (XEXP (cond, 0), b))
1521 c = XEXP (cond, 1);
1522 else if (rtx_equal_p (XEXP (cond, 1), b))
1523 c = XEXP (cond, 0);
1524 else
1525 return FALSE;
1526
1527 /* Verify that C is zero. Search backward through the block for
1528 a REG_EQUAL note if necessary. */
1529 if (REG_P (c))
1530 {
1531 rtx insn, note = NULL;
1532 for (insn = earliest;
1533 insn != if_info->test_bb->head;
1534 insn = PREV_INSN (insn))
1535 if (INSN_P (insn)
1536 && ((note = find_reg_note (insn, REG_EQUAL, c))
1537 || (note = find_reg_note (insn, REG_EQUIV, c))))
1538 break;
1539 if (! note)
1540 return FALSE;
1541 c = XEXP (note, 0);
1542 }
1543 if (GET_CODE (c) == MEM
1544 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1545 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1546 c = get_pool_constant (XEXP (c, 0));
1547
1548 /* Work around funny ideas get_condition has wrt canonicalization.
1549 Note that these rtx constants are known to be CONST_INT, and
1550 therefore imply integer comparisons. */
1551 if (c == constm1_rtx && GET_CODE (cond) == GT)
1552 ;
1553 else if (c == const1_rtx && GET_CODE (cond) == LT)
1554 ;
1555 else if (c != CONST0_RTX (GET_MODE (b)))
1556 return FALSE;
1557
1558 /* Determine what sort of operation this is. */
1559 switch (GET_CODE (cond))
1560 {
1561 case LT:
1562 case LE:
1563 case UNLT:
1564 case UNLE:
1565 negate = !negate;
1566 break;
1567 case GT:
1568 case GE:
1569 case UNGT:
1570 case UNGE:
1571 break;
1572 default:
1573 return FALSE;
1574 }
1575
1576 start_sequence ();
1577
1578 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1579
1580 /* ??? It's a quandry whether cmove would be better here, especially
1581 for integers. Perhaps combine will clean things up. */
1582 if (target && negate)
1583 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1584
1585 if (! target)
1586 {
1587 end_sequence ();
1588 return FALSE;
1589 }
1590
1591 if (target != if_info->x)
1592 noce_emit_move_insn (if_info->x, target);
1593
1594 seq = get_insns ();
1595 end_sequence ();
1596
1597 if (seq_contains_jump (seq))
1598 return FALSE;
1599
1600 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1601 if_info->cond = cond;
1602 if_info->cond_earliest = earliest;
1603
1604 return TRUE;
1605 }
1606
1607 /* Similar to get_condition, only the resulting condition must be
1608 valid at JUMP, instead of at EARLIEST. */
1609
1610 static rtx
1611 noce_get_condition (rtx jump, rtx *earliest)
1612 {
1613 rtx cond, set, tmp, insn;
1614 bool reverse;
1615
1616 if (! any_condjump_p (jump))
1617 return NULL_RTX;
1618
1619 set = pc_set (jump);
1620
1621 /* If this branches to JUMP_LABEL when the condition is false,
1622 reverse the condition. */
1623 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1624 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1625
1626 /* If the condition variable is a register and is MODE_INT, accept it. */
1627
1628 cond = XEXP (SET_SRC (set), 0);
1629 tmp = XEXP (cond, 0);
1630 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1631 {
1632 *earliest = jump;
1633
1634 if (reverse)
1635 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1636 GET_MODE (cond), tmp, XEXP (cond, 1));
1637 return cond;
1638 }
1639
1640 /* Otherwise, fall back on canonicalize_condition to do the dirty
1641 work of manipulating MODE_CC values and COMPARE rtx codes. */
1642
1643 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX);
1644 if (!tmp)
1645 return NULL_RTX;
1646
1647 /* We are going to insert code before JUMP, not before EARLIEST.
1648 We must therefore be certain that the given condition is valid
1649 at JUMP by virtue of not having been modified since. */
1650 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1651 if (INSN_P (insn) && modified_in_p (tmp, insn))
1652 break;
1653 if (insn == jump)
1654 return tmp;
1655
1656 /* The condition was modified. See if we can get a partial result
1657 that doesn't follow all the reversals. Perhaps combine can fold
1658 them together later. */
1659 tmp = XEXP (tmp, 0);
1660 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1661 return NULL_RTX;
1662 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp);
1663 if (!tmp)
1664 return NULL_RTX;
1665
1666 /* For sanity's sake, re-validate the new result. */
1667 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1668 if (INSN_P (insn) && modified_in_p (tmp, insn))
1669 return NULL_RTX;
1670
1671 return tmp;
1672 }
1673
1674 /* Return true if OP is ok for if-then-else processing. */
1675
1676 static int
1677 noce_operand_ok (rtx op)
1678 {
1679 /* We special-case memories, so handle any of them with
1680 no address side effects. */
1681 if (GET_CODE (op) == MEM)
1682 return ! side_effects_p (XEXP (op, 0));
1683
1684 if (side_effects_p (op))
1685 return FALSE;
1686
1687 return ! may_trap_p (op);
1688 }
1689
1690 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1691 without using conditional execution. Return TRUE if we were
1692 successful at converting the block. */
1693
1694 static int
1695 noce_process_if_block (struct ce_if_block * ce_info)
1696 {
1697 basic_block test_bb = ce_info->test_bb; /* test block */
1698 basic_block then_bb = ce_info->then_bb; /* THEN */
1699 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1700 struct noce_if_info if_info;
1701 rtx insn_a, insn_b;
1702 rtx set_a, set_b;
1703 rtx orig_x, x, a, b;
1704 rtx jump, cond;
1705
1706 /* We're looking for patterns of the form
1707
1708 (1) if (...) x = a; else x = b;
1709 (2) x = b; if (...) x = a;
1710 (3) if (...) x = a; // as if with an initial x = x.
1711
1712 The later patterns require jumps to be more expensive.
1713
1714 ??? For future expansion, look for multiple X in such patterns. */
1715
1716 /* If test is comprised of && or || elements, don't handle it unless it is
1717 the special case of && elements without an ELSE block. */
1718 if (ce_info->num_multiple_test_blocks)
1719 {
1720 if (else_bb || ! ce_info->and_and_p)
1721 return FALSE;
1722
1723 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1724 ce_info->num_multiple_test_blocks = 0;
1725 ce_info->num_and_and_blocks = 0;
1726 ce_info->num_or_or_blocks = 0;
1727 }
1728
1729 /* If this is not a standard conditional jump, we can't parse it. */
1730 jump = test_bb->end;
1731 cond = noce_get_condition (jump, &if_info.cond_earliest);
1732 if (! cond)
1733 return FALSE;
1734
1735 /* If the conditional jump is more than just a conditional
1736 jump, then we can not do if-conversion on this block. */
1737 if (! onlyjump_p (jump))
1738 return FALSE;
1739
1740 /* We must be comparing objects whose modes imply the size. */
1741 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1742 return FALSE;
1743
1744 /* Look for one of the potential sets. */
1745 insn_a = first_active_insn (then_bb);
1746 if (! insn_a
1747 || insn_a != last_active_insn (then_bb, FALSE)
1748 || (set_a = single_set (insn_a)) == NULL_RTX)
1749 return FALSE;
1750
1751 x = SET_DEST (set_a);
1752 a = SET_SRC (set_a);
1753
1754 /* Look for the other potential set. Make sure we've got equivalent
1755 destinations. */
1756 /* ??? This is overconservative. Storing to two different mems is
1757 as easy as conditionally computing the address. Storing to a
1758 single mem merely requires a scratch memory to use as one of the
1759 destination addresses; often the memory immediately below the
1760 stack pointer is available for this. */
1761 set_b = NULL_RTX;
1762 if (else_bb)
1763 {
1764 insn_b = first_active_insn (else_bb);
1765 if (! insn_b
1766 || insn_b != last_active_insn (else_bb, FALSE)
1767 || (set_b = single_set (insn_b)) == NULL_RTX
1768 || ! rtx_equal_p (x, SET_DEST (set_b)))
1769 return FALSE;
1770 }
1771 else
1772 {
1773 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1774 /* We're going to be moving the evaluation of B down from above
1775 COND_EARLIEST to JUMP. Make sure the relevant data is still
1776 intact. */
1777 if (! insn_b
1778 || GET_CODE (insn_b) != INSN
1779 || (set_b = single_set (insn_b)) == NULL_RTX
1780 || ! rtx_equal_p (x, SET_DEST (set_b))
1781 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1782 || modified_between_p (SET_SRC (set_b),
1783 PREV_INSN (if_info.cond_earliest), jump)
1784 /* Likewise with X. In particular this can happen when
1785 noce_get_condition looks farther back in the instruction
1786 stream than one might expect. */
1787 || reg_overlap_mentioned_p (x, cond)
1788 || reg_overlap_mentioned_p (x, a)
1789 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1790 insn_b = set_b = NULL_RTX;
1791 }
1792
1793 /* If x has side effects then only the if-then-else form is safe to
1794 convert. But even in that case we would need to restore any notes
1795 (such as REG_INC) at then end. That can be tricky if
1796 noce_emit_move_insn expands to more than one insn, so disable the
1797 optimization entirely for now if there are side effects. */
1798 if (side_effects_p (x))
1799 return FALSE;
1800
1801 b = (set_b ? SET_SRC (set_b) : x);
1802
1803 /* Only operate on register destinations, and even then avoid extending
1804 the lifetime of hard registers on small register class machines. */
1805 orig_x = x;
1806 if (GET_CODE (x) != REG
1807 || (SMALL_REGISTER_CLASSES
1808 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1809 {
1810 if (no_new_pseudos || GET_MODE (x) == BLKmode)
1811 return FALSE;
1812 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1813 ? XEXP (x, 0) : x));
1814 }
1815
1816 /* Don't operate on sources that may trap or are volatile. */
1817 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1818 return FALSE;
1819
1820 /* Set up the info block for our subroutines. */
1821 if_info.test_bb = test_bb;
1822 if_info.cond = cond;
1823 if_info.jump = jump;
1824 if_info.insn_a = insn_a;
1825 if_info.insn_b = insn_b;
1826 if_info.x = x;
1827 if_info.a = a;
1828 if_info.b = b;
1829
1830 /* Try optimizations in some approximation of a useful order. */
1831 /* ??? Should first look to see if X is live incoming at all. If it
1832 isn't, we don't need anything but an unconditional set. */
1833
1834 /* Look and see if A and B are really the same. Avoid creating silly
1835 cmove constructs that no one will fix up later. */
1836 if (rtx_equal_p (a, b))
1837 {
1838 /* If we have an INSN_B, we don't have to create any new rtl. Just
1839 move the instruction that we already have. If we don't have an
1840 INSN_B, that means that A == X, and we've got a noop move. In
1841 that case don't do anything and let the code below delete INSN_A. */
1842 if (insn_b && else_bb)
1843 {
1844 rtx note;
1845
1846 if (else_bb && insn_b == else_bb->end)
1847 else_bb->end = PREV_INSN (insn_b);
1848 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
1849
1850 /* If there was a REG_EQUAL note, delete it since it may have been
1851 true due to this insn being after a jump. */
1852 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1853 remove_note (insn_b, note);
1854
1855 insn_b = NULL_RTX;
1856 }
1857 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1858 x must be executed twice. */
1859 else if (insn_b && side_effects_p (orig_x))
1860 return FALSE;
1861
1862 x = orig_x;
1863 goto success;
1864 }
1865
1866 if (noce_try_store_flag (&if_info))
1867 goto success;
1868 if (noce_try_minmax (&if_info))
1869 goto success;
1870 if (noce_try_abs (&if_info))
1871 goto success;
1872 if (HAVE_conditional_move
1873 && noce_try_cmove (&if_info))
1874 goto success;
1875 if (! HAVE_conditional_execution)
1876 {
1877 if (noce_try_store_flag_constants (&if_info))
1878 goto success;
1879 if (noce_try_addcc (&if_info))
1880 goto success;
1881 if (noce_try_store_flag_mask (&if_info))
1882 goto success;
1883 if (HAVE_conditional_move
1884 && noce_try_cmove_arith (&if_info))
1885 goto success;
1886 }
1887
1888 return FALSE;
1889
1890 success:
1891 /* The original sets may now be killed. */
1892 delete_insn (insn_a);
1893
1894 /* Several special cases here: First, we may have reused insn_b above,
1895 in which case insn_b is now NULL. Second, we want to delete insn_b
1896 if it came from the ELSE block, because follows the now correct
1897 write that appears in the TEST block. However, if we got insn_b from
1898 the TEST block, it may in fact be loading data needed for the comparison.
1899 We'll let life_analysis remove the insn if it's really dead. */
1900 if (insn_b && else_bb)
1901 delete_insn (insn_b);
1902
1903 /* The new insns will have been inserted immediately before the jump. We
1904 should be able to remove the jump with impunity, but the condition itself
1905 may have been modified by gcse to be shared across basic blocks. */
1906 delete_insn (jump);
1907
1908 /* If we used a temporary, fix it up now. */
1909 if (orig_x != x)
1910 {
1911 start_sequence ();
1912 noce_emit_move_insn (copy_rtx (orig_x), x);
1913 insn_b = get_insns ();
1914 end_sequence ();
1915
1916 emit_insn_after_setloc (insn_b, test_bb->end, INSN_LOCATOR (insn_a));
1917 }
1918
1919 /* Merge the blocks! */
1920 merge_if_block (ce_info);
1921
1922 return TRUE;
1923 }
1924 \f
1925 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1926 straight line code. Return true if successful. */
1927
1928 static int
1929 process_if_block (struct ce_if_block * ce_info)
1930 {
1931 if (! reload_completed
1932 && noce_process_if_block (ce_info))
1933 return TRUE;
1934
1935 if (HAVE_conditional_execution && reload_completed)
1936 {
1937 /* If we have && and || tests, try to first handle combining the && and
1938 || tests into the conditional code, and if that fails, go back and
1939 handle it without the && and ||, which at present handles the && case
1940 if there was no ELSE block. */
1941 if (cond_exec_process_if_block (ce_info, TRUE))
1942 return TRUE;
1943
1944 if (ce_info->num_multiple_test_blocks)
1945 {
1946 cancel_changes (0);
1947
1948 if (cond_exec_process_if_block (ce_info, FALSE))
1949 return TRUE;
1950 }
1951 }
1952
1953 return FALSE;
1954 }
1955
1956 /* Merge the blocks and mark for local life update. */
1957
1958 static void
1959 merge_if_block (struct ce_if_block * ce_info)
1960 {
1961 basic_block test_bb = ce_info->test_bb; /* last test block */
1962 basic_block then_bb = ce_info->then_bb; /* THEN */
1963 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1964 basic_block join_bb = ce_info->join_bb; /* join block */
1965 basic_block combo_bb;
1966
1967 /* All block merging is done into the lower block numbers. */
1968
1969 combo_bb = test_bb;
1970
1971 /* Merge any basic blocks to handle && and || subtests. Each of
1972 the blocks are on the fallthru path from the predecessor block. */
1973 if (ce_info->num_multiple_test_blocks > 0)
1974 {
1975 basic_block bb = test_bb;
1976 basic_block last_test_bb = ce_info->last_test_bb;
1977 basic_block fallthru = block_fallthru (bb);
1978
1979 do
1980 {
1981 bb = fallthru;
1982 fallthru = block_fallthru (bb);
1983 if (post_dominators)
1984 delete_from_dominance_info (post_dominators, bb);
1985 merge_blocks (combo_bb, bb);
1986 num_removed_blocks++;
1987 }
1988 while (bb != last_test_bb);
1989 }
1990
1991 /* Merge TEST block into THEN block. Normally the THEN block won't have a
1992 label, but it might if there were || tests. That label's count should be
1993 zero, and it normally should be removed. */
1994
1995 if (then_bb)
1996 {
1997 if (combo_bb->global_live_at_end)
1998 COPY_REG_SET (combo_bb->global_live_at_end,
1999 then_bb->global_live_at_end);
2000 if (post_dominators)
2001 delete_from_dominance_info (post_dominators, then_bb);
2002 merge_blocks (combo_bb, then_bb);
2003 num_removed_blocks++;
2004 }
2005
2006 /* The ELSE block, if it existed, had a label. That label count
2007 will almost always be zero, but odd things can happen when labels
2008 get their addresses taken. */
2009 if (else_bb)
2010 {
2011 if (post_dominators)
2012 delete_from_dominance_info (post_dominators, else_bb);
2013 merge_blocks (combo_bb, else_bb);
2014 num_removed_blocks++;
2015 }
2016
2017 /* If there was no join block reported, that means it was not adjacent
2018 to the others, and so we cannot merge them. */
2019
2020 if (! join_bb)
2021 {
2022 rtx last = combo_bb->end;
2023
2024 /* The outgoing edge for the current COMBO block should already
2025 be correct. Verify this. */
2026 if (combo_bb->succ == NULL_EDGE)
2027 {
2028 if (find_reg_note (last, REG_NORETURN, NULL))
2029 ;
2030 else if (GET_CODE (last) == INSN
2031 && GET_CODE (PATTERN (last)) == TRAP_IF
2032 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2033 ;
2034 else
2035 abort ();
2036 }
2037
2038 /* There should still be something at the end of the THEN or ELSE
2039 blocks taking us to our final destination. */
2040 else if (GET_CODE (last) == JUMP_INSN)
2041 ;
2042 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2043 && GET_CODE (last) == CALL_INSN
2044 && SIBLING_CALL_P (last))
2045 ;
2046 else if ((combo_bb->succ->flags & EDGE_EH)
2047 && can_throw_internal (last))
2048 ;
2049 else
2050 abort ();
2051 }
2052
2053 /* The JOIN block may have had quite a number of other predecessors too.
2054 Since we've already merged the TEST, THEN and ELSE blocks, we should
2055 have only one remaining edge from our if-then-else diamond. If there
2056 is more than one remaining edge, it must come from elsewhere. There
2057 may be zero incoming edges if the THEN block didn't actually join
2058 back up (as with a call to abort). */
2059 else if ((join_bb->pred == NULL
2060 || join_bb->pred->pred_next == NULL)
2061 && join_bb != EXIT_BLOCK_PTR)
2062 {
2063 /* We can merge the JOIN. */
2064 if (combo_bb->global_live_at_end)
2065 COPY_REG_SET (combo_bb->global_live_at_end,
2066 join_bb->global_live_at_end);
2067
2068 if (post_dominators)
2069 delete_from_dominance_info (post_dominators, join_bb);
2070 merge_blocks (combo_bb, join_bb);
2071 num_removed_blocks++;
2072 }
2073 else
2074 {
2075 /* We cannot merge the JOIN. */
2076
2077 /* The outgoing edge for the current COMBO block should already
2078 be correct. Verify this. */
2079 if (combo_bb->succ->succ_next != NULL_EDGE
2080 || combo_bb->succ->dest != join_bb)
2081 abort ();
2082
2083 /* Remove the jump and cruft from the end of the COMBO block. */
2084 if (join_bb != EXIT_BLOCK_PTR)
2085 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2086 }
2087
2088 num_updated_if_blocks++;
2089 }
2090 \f
2091 /* Find a block ending in a simple IF condition and try to transform it
2092 in some way. When converting a multi-block condition, put the new code
2093 in the first such block and delete the rest. Return a pointer to this
2094 first block if some transformation was done. Return NULL otherwise. */
2095
2096 static basic_block
2097 find_if_header (basic_block test_bb, int pass)
2098 {
2099 ce_if_block_t ce_info;
2100 edge then_edge;
2101 edge else_edge;
2102
2103 /* The kind of block we're looking for has exactly two successors. */
2104 if ((then_edge = test_bb->succ) == NULL_EDGE
2105 || (else_edge = then_edge->succ_next) == NULL_EDGE
2106 || else_edge->succ_next != NULL_EDGE)
2107 return NULL;
2108
2109 /* Neither edge should be abnormal. */
2110 if ((then_edge->flags & EDGE_COMPLEX)
2111 || (else_edge->flags & EDGE_COMPLEX))
2112 return NULL;
2113
2114 /* The THEN edge is canonically the one that falls through. */
2115 if (then_edge->flags & EDGE_FALLTHRU)
2116 ;
2117 else if (else_edge->flags & EDGE_FALLTHRU)
2118 {
2119 edge e = else_edge;
2120 else_edge = then_edge;
2121 then_edge = e;
2122 }
2123 else
2124 /* Otherwise this must be a multiway branch of some sort. */
2125 return NULL;
2126
2127 memset (&ce_info, '\0', sizeof (ce_info));
2128 ce_info.test_bb = test_bb;
2129 ce_info.then_bb = then_edge->dest;
2130 ce_info.else_bb = else_edge->dest;
2131 ce_info.pass = pass;
2132
2133 #ifdef IFCVT_INIT_EXTRA_FIELDS
2134 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2135 #endif
2136
2137 if (find_if_block (&ce_info))
2138 goto success;
2139
2140 if (HAVE_trap && HAVE_conditional_trap
2141 && find_cond_trap (test_bb, then_edge, else_edge))
2142 goto success;
2143
2144 if (post_dominators
2145 && (! HAVE_conditional_execution || reload_completed))
2146 {
2147 if (find_if_case_1 (test_bb, then_edge, else_edge))
2148 goto success;
2149 if (find_if_case_2 (test_bb, then_edge, else_edge))
2150 goto success;
2151 }
2152
2153 return NULL;
2154
2155 success:
2156 if (rtl_dump_file)
2157 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2158 return ce_info.test_bb;
2159 }
2160
2161 /* Return true if a block has two edges, one of which falls through to the next
2162 block, and the other jumps to a specific block, so that we can tell if the
2163 block is part of an && test or an || test. Returns either -1 or the number
2164 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2165
2166 static int
2167 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2168 {
2169 edge cur_edge;
2170 int fallthru_p = FALSE;
2171 int jump_p = FALSE;
2172 rtx insn;
2173 rtx end;
2174 int n_insns = 0;
2175
2176 if (!cur_bb || !target_bb)
2177 return -1;
2178
2179 /* If no edges, obviously it doesn't jump or fallthru. */
2180 if (cur_bb->succ == NULL_EDGE)
2181 return FALSE;
2182
2183 for (cur_edge = cur_bb->succ;
2184 cur_edge != NULL_EDGE;
2185 cur_edge = cur_edge->succ_next)
2186 {
2187 if (cur_edge->flags & EDGE_COMPLEX)
2188 /* Anything complex isn't what we want. */
2189 return -1;
2190
2191 else if (cur_edge->flags & EDGE_FALLTHRU)
2192 fallthru_p = TRUE;
2193
2194 else if (cur_edge->dest == target_bb)
2195 jump_p = TRUE;
2196
2197 else
2198 return -1;
2199 }
2200
2201 if ((jump_p & fallthru_p) == 0)
2202 return -1;
2203
2204 /* Don't allow calls in the block, since this is used to group && and ||
2205 together for conditional execution support. ??? we should support
2206 conditional execution support across calls for IA-64 some day, but
2207 for now it makes the code simpler. */
2208 end = cur_bb->end;
2209 insn = cur_bb->head;
2210
2211 while (insn != NULL_RTX)
2212 {
2213 if (GET_CODE (insn) == CALL_INSN)
2214 return -1;
2215
2216 if (INSN_P (insn)
2217 && GET_CODE (insn) != JUMP_INSN
2218 && GET_CODE (PATTERN (insn)) != USE
2219 && GET_CODE (PATTERN (insn)) != CLOBBER)
2220 n_insns++;
2221
2222 if (insn == end)
2223 break;
2224
2225 insn = NEXT_INSN (insn);
2226 }
2227
2228 return n_insns;
2229 }
2230
2231 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2232 block. If so, we'll try to convert the insns to not require the branch.
2233 Return TRUE if we were successful at converting the block. */
2234
2235 static int
2236 find_if_block (struct ce_if_block * ce_info)
2237 {
2238 basic_block test_bb = ce_info->test_bb;
2239 basic_block then_bb = ce_info->then_bb;
2240 basic_block else_bb = ce_info->else_bb;
2241 basic_block join_bb = NULL_BLOCK;
2242 edge then_succ = then_bb->succ;
2243 edge else_succ = else_bb->succ;
2244 int then_predecessors;
2245 int else_predecessors;
2246 edge cur_edge;
2247 basic_block next;
2248
2249 ce_info->last_test_bb = test_bb;
2250
2251 /* Discover if any fall through predecessors of the current test basic block
2252 were && tests (which jump to the else block) or || tests (which jump to
2253 the then block). */
2254 if (HAVE_conditional_execution && reload_completed
2255 && test_bb->pred != NULL_EDGE
2256 && test_bb->pred->pred_next == NULL_EDGE
2257 && test_bb->pred->flags == EDGE_FALLTHRU)
2258 {
2259 basic_block bb = test_bb->pred->src;
2260 basic_block target_bb;
2261 int max_insns = MAX_CONDITIONAL_EXECUTE;
2262 int n_insns;
2263
2264 /* Determine if the preceding block is an && or || block. */
2265 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2266 {
2267 ce_info->and_and_p = TRUE;
2268 target_bb = else_bb;
2269 }
2270 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2271 {
2272 ce_info->and_and_p = FALSE;
2273 target_bb = then_bb;
2274 }
2275 else
2276 target_bb = NULL_BLOCK;
2277
2278 if (target_bb && n_insns <= max_insns)
2279 {
2280 int total_insns = 0;
2281 int blocks = 0;
2282
2283 ce_info->last_test_bb = test_bb;
2284
2285 /* Found at least one && or || block, look for more. */
2286 do
2287 {
2288 ce_info->test_bb = test_bb = bb;
2289 total_insns += n_insns;
2290 blocks++;
2291
2292 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2293 break;
2294
2295 bb = bb->pred->src;
2296 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2297 }
2298 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2299
2300 ce_info->num_multiple_test_blocks = blocks;
2301 ce_info->num_multiple_test_insns = total_insns;
2302
2303 if (ce_info->and_and_p)
2304 ce_info->num_and_and_blocks = blocks;
2305 else
2306 ce_info->num_or_or_blocks = blocks;
2307 }
2308 }
2309
2310 /* Count the number of edges the THEN and ELSE blocks have. */
2311 then_predecessors = 0;
2312 for (cur_edge = then_bb->pred;
2313 cur_edge != NULL_EDGE;
2314 cur_edge = cur_edge->pred_next)
2315 {
2316 then_predecessors++;
2317 if (cur_edge->flags & EDGE_COMPLEX)
2318 return FALSE;
2319 }
2320
2321 else_predecessors = 0;
2322 for (cur_edge = else_bb->pred;
2323 cur_edge != NULL_EDGE;
2324 cur_edge = cur_edge->pred_next)
2325 {
2326 else_predecessors++;
2327 if (cur_edge->flags & EDGE_COMPLEX)
2328 return FALSE;
2329 }
2330
2331 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2332 other than any || blocks which jump to the THEN block. */
2333 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2334 return FALSE;
2335
2336 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2337 if (then_succ != NULL_EDGE
2338 && (then_succ->succ_next != NULL_EDGE
2339 || (then_succ->flags & EDGE_COMPLEX)
2340 || (flow2_completed && tablejump_p (then_bb->end, NULL, NULL))))
2341 return FALSE;
2342
2343 /* If the THEN block has no successors, conditional execution can still
2344 make a conditional call. Don't do this unless the ELSE block has
2345 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2346 Check for the last insn of the THEN block being an indirect jump, which
2347 is listed as not having any successors, but confuses the rest of the CE
2348 code processing. ??? we should fix this in the future. */
2349 if (then_succ == NULL)
2350 {
2351 if (else_bb->pred->pred_next == NULL_EDGE)
2352 {
2353 rtx last_insn = then_bb->end;
2354
2355 while (last_insn
2356 && GET_CODE (last_insn) == NOTE
2357 && last_insn != then_bb->head)
2358 last_insn = PREV_INSN (last_insn);
2359
2360 if (last_insn
2361 && GET_CODE (last_insn) == JUMP_INSN
2362 && ! simplejump_p (last_insn))
2363 return FALSE;
2364
2365 join_bb = else_bb;
2366 else_bb = NULL_BLOCK;
2367 }
2368 else
2369 return FALSE;
2370 }
2371
2372 /* If the THEN block's successor is the other edge out of the TEST block,
2373 then we have an IF-THEN combo without an ELSE. */
2374 else if (then_succ->dest == else_bb)
2375 {
2376 join_bb = else_bb;
2377 else_bb = NULL_BLOCK;
2378 }
2379
2380 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2381 has exactly one predecessor and one successor, and the outgoing edge
2382 is not complex, then we have an IF-THEN-ELSE combo. */
2383 else if (else_succ != NULL_EDGE
2384 && then_succ->dest == else_succ->dest
2385 && else_bb->pred->pred_next == NULL_EDGE
2386 && else_succ->succ_next == NULL_EDGE
2387 && ! (else_succ->flags & EDGE_COMPLEX)
2388 && ! (flow2_completed && tablejump_p (else_bb->end, NULL, NULL)))
2389 join_bb = else_succ->dest;
2390
2391 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2392 else
2393 return FALSE;
2394
2395 num_possible_if_blocks++;
2396
2397 if (rtl_dump_file)
2398 {
2399 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2400 (else_bb) ? "-ELSE" : "",
2401 ce_info->pass,
2402 test_bb->index, (test_bb->head) ? (int)INSN_UID (test_bb->head) : -1,
2403 then_bb->index, (then_bb->head) ? (int)INSN_UID (then_bb->head) : -1);
2404
2405 if (else_bb)
2406 fprintf (rtl_dump_file, ", else %d [%d]",
2407 else_bb->index, (else_bb->head) ? (int)INSN_UID (else_bb->head) : -1);
2408
2409 fprintf (rtl_dump_file, ", join %d [%d]",
2410 join_bb->index, (join_bb->head) ? (int)INSN_UID (join_bb->head) : -1);
2411
2412 if (ce_info->num_multiple_test_blocks > 0)
2413 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2414 ce_info->num_multiple_test_blocks,
2415 (ce_info->and_and_p) ? "&&" : "||",
2416 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2417 ce_info->last_test_bb->index,
2418 ((ce_info->last_test_bb->head)
2419 ? (int)INSN_UID (ce_info->last_test_bb->head)
2420 : -1));
2421
2422 fputc ('\n', rtl_dump_file);
2423 }
2424
2425 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2426 first condition for free, since we've already asserted that there's a
2427 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2428 we checked the FALLTHRU flag, those are already adjacent to the last IF
2429 block. */
2430 /* ??? As an enhancement, move the ELSE block. Have to deal with
2431 BLOCK notes, if by no other means than aborting the merge if they
2432 exist. Sticky enough I don't want to think about it now. */
2433 next = then_bb;
2434 if (else_bb && (next = next->next_bb) != else_bb)
2435 return FALSE;
2436 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2437 {
2438 if (else_bb)
2439 join_bb = NULL;
2440 else
2441 return FALSE;
2442 }
2443
2444 /* Do the real work. */
2445 ce_info->else_bb = else_bb;
2446 ce_info->join_bb = join_bb;
2447
2448 return process_if_block (ce_info);
2449 }
2450
2451 /* Convert a branch over a trap, or a branch
2452 to a trap, into a conditional trap. */
2453
2454 static int
2455 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2456 {
2457 basic_block then_bb = then_edge->dest;
2458 basic_block else_bb = else_edge->dest;
2459 basic_block other_bb, trap_bb;
2460 rtx trap, jump, cond, cond_earliest, seq;
2461 enum rtx_code code;
2462
2463 /* Locate the block with the trap instruction. */
2464 /* ??? While we look for no successors, we really ought to allow
2465 EH successors. Need to fix merge_if_block for that to work. */
2466 if ((trap = block_has_only_trap (then_bb)) != NULL)
2467 trap_bb = then_bb, other_bb = else_bb;
2468 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2469 trap_bb = else_bb, other_bb = then_bb;
2470 else
2471 return FALSE;
2472
2473 if (rtl_dump_file)
2474 {
2475 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2476 test_bb->index, trap_bb->index);
2477 }
2478
2479 /* If this is not a standard conditional jump, we can't parse it. */
2480 jump = test_bb->end;
2481 cond = noce_get_condition (jump, &cond_earliest);
2482 if (! cond)
2483 return FALSE;
2484
2485 /* If the conditional jump is more than just a conditional jump, then
2486 we can not do if-conversion on this block. */
2487 if (! onlyjump_p (jump))
2488 return FALSE;
2489
2490 /* We must be comparing objects whose modes imply the size. */
2491 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2492 return FALSE;
2493
2494 /* Reverse the comparison code, if necessary. */
2495 code = GET_CODE (cond);
2496 if (then_bb == trap_bb)
2497 {
2498 code = reversed_comparison_code (cond, jump);
2499 if (code == UNKNOWN)
2500 return FALSE;
2501 }
2502
2503 /* Attempt to generate the conditional trap. */
2504 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2505 TRAP_CODE (PATTERN (trap)));
2506 if (seq == NULL)
2507 return FALSE;
2508
2509 /* Emit the new insns before cond_earliest. */
2510 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2511
2512 /* Delete the trap block if possible. */
2513 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2514 if (trap_bb->pred == NULL)
2515 {
2516 if (post_dominators)
2517 delete_from_dominance_info (post_dominators, trap_bb);
2518 delete_block (trap_bb);
2519 num_removed_blocks++;
2520 }
2521
2522 /* If the non-trap block and the test are now adjacent, merge them.
2523 Otherwise we must insert a direct branch. */
2524 if (test_bb->next_bb == other_bb)
2525 {
2526 struct ce_if_block new_ce_info;
2527 delete_insn (jump);
2528 memset (&new_ce_info, '\0', sizeof (new_ce_info));
2529 new_ce_info.test_bb = test_bb;
2530 new_ce_info.then_bb = NULL;
2531 new_ce_info.else_bb = NULL;
2532 new_ce_info.join_bb = other_bb;
2533 merge_if_block (&new_ce_info);
2534 }
2535 else
2536 {
2537 rtx lab, newjump;
2538
2539 lab = JUMP_LABEL (jump);
2540 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2541 LABEL_NUSES (lab) += 1;
2542 JUMP_LABEL (newjump) = lab;
2543 emit_barrier_after (newjump);
2544
2545 delete_insn (jump);
2546 }
2547
2548 return TRUE;
2549 }
2550
2551 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2552 return it. */
2553
2554 static rtx
2555 block_has_only_trap (basic_block bb)
2556 {
2557 rtx trap;
2558
2559 /* We're not the exit block. */
2560 if (bb == EXIT_BLOCK_PTR)
2561 return NULL_RTX;
2562
2563 /* The block must have no successors. */
2564 if (bb->succ)
2565 return NULL_RTX;
2566
2567 /* The only instruction in the THEN block must be the trap. */
2568 trap = first_active_insn (bb);
2569 if (! (trap == bb->end
2570 && GET_CODE (PATTERN (trap)) == TRAP_IF
2571 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2572 return NULL_RTX;
2573
2574 return trap;
2575 }
2576
2577 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2578 transformable, but not necessarily the other. There need be no
2579 JOIN block.
2580
2581 Return TRUE if we were successful at converting the block.
2582
2583 Cases we'd like to look at:
2584
2585 (1)
2586 if (test) goto over; // x not live
2587 x = a;
2588 goto label;
2589 over:
2590
2591 becomes
2592
2593 x = a;
2594 if (! test) goto label;
2595
2596 (2)
2597 if (test) goto E; // x not live
2598 x = big();
2599 goto L;
2600 E:
2601 x = b;
2602 goto M;
2603
2604 becomes
2605
2606 x = b;
2607 if (test) goto M;
2608 x = big();
2609 goto L;
2610
2611 (3) // This one's really only interesting for targets that can do
2612 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2613 // it results in multiple branches on a cache line, which often
2614 // does not sit well with predictors.
2615
2616 if (test1) goto E; // predicted not taken
2617 x = a;
2618 if (test2) goto F;
2619 ...
2620 E:
2621 x = b;
2622 J:
2623
2624 becomes
2625
2626 x = a;
2627 if (test1) goto E;
2628 if (test2) goto F;
2629
2630 Notes:
2631
2632 (A) Don't do (2) if the branch is predicted against the block we're
2633 eliminating. Do it anyway if we can eliminate a branch; this requires
2634 that the sole successor of the eliminated block postdominate the other
2635 side of the if.
2636
2637 (B) With CE, on (3) we can steal from both sides of the if, creating
2638
2639 if (test1) x = a;
2640 if (!test1) x = b;
2641 if (test1) goto J;
2642 if (test2) goto F;
2643 ...
2644 J:
2645
2646 Again, this is most useful if J postdominates.
2647
2648 (C) CE substitutes for helpful life information.
2649
2650 (D) These heuristics need a lot of work. */
2651
2652 /* Tests for case 1 above. */
2653
2654 static int
2655 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
2656 {
2657 basic_block then_bb = then_edge->dest;
2658 basic_block else_bb = else_edge->dest, new_bb;
2659 edge then_succ = then_bb->succ;
2660 int then_bb_index;
2661
2662 /* THEN has one successor. */
2663 if (!then_succ || then_succ->succ_next != NULL)
2664 return FALSE;
2665
2666 /* THEN does not fall through, but is not strange either. */
2667 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2668 return FALSE;
2669
2670 /* THEN has one predecessor. */
2671 if (then_bb->pred->pred_next != NULL)
2672 return FALSE;
2673
2674 /* THEN must do something. */
2675 if (forwarder_block_p (then_bb))
2676 return FALSE;
2677
2678 num_possible_if_blocks++;
2679 if (rtl_dump_file)
2680 fprintf (rtl_dump_file,
2681 "\nIF-CASE-1 found, start %d, then %d\n",
2682 test_bb->index, then_bb->index);
2683
2684 /* THEN is small. */
2685 if (count_bb_insns (then_bb) > BRANCH_COST)
2686 return FALSE;
2687
2688 /* Registers set are dead, or are predicable. */
2689 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2690 then_bb->succ->dest, 1))
2691 return FALSE;
2692
2693 /* Conversion went ok, including moving the insns and fixing up the
2694 jump. Adjust the CFG to match. */
2695
2696 bitmap_operation (test_bb->global_live_at_end,
2697 else_bb->global_live_at_start,
2698 then_bb->global_live_at_end, BITMAP_IOR);
2699
2700 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2701 then_bb_index = then_bb->index;
2702 if (post_dominators)
2703 delete_from_dominance_info (post_dominators, then_bb);
2704 delete_block (then_bb);
2705
2706 /* Make rest of code believe that the newly created block is the THEN_BB
2707 block we removed. */
2708 if (new_bb)
2709 {
2710 new_bb->index = then_bb_index;
2711 BASIC_BLOCK (then_bb_index) = new_bb;
2712 if (post_dominators)
2713 add_to_dominance_info (post_dominators, new_bb);
2714 }
2715 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2716 later. */
2717
2718 num_removed_blocks++;
2719 num_updated_if_blocks++;
2720
2721 return TRUE;
2722 }
2723
2724 /* Test for case 2 above. */
2725
2726 static int
2727 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
2728 {
2729 basic_block then_bb = then_edge->dest;
2730 basic_block else_bb = else_edge->dest;
2731 edge else_succ = else_bb->succ;
2732 rtx note;
2733
2734 /* ELSE has one successor. */
2735 if (!else_succ || else_succ->succ_next != NULL)
2736 return FALSE;
2737
2738 /* ELSE outgoing edge is not complex. */
2739 if (else_succ->flags & EDGE_COMPLEX)
2740 return FALSE;
2741
2742 /* ELSE has one predecessor. */
2743 if (else_bb->pred->pred_next != NULL)
2744 return FALSE;
2745
2746 /* THEN is not EXIT. */
2747 if (then_bb->index < 0)
2748 return FALSE;
2749
2750 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2751 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2752 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2753 ;
2754 else if (else_succ->dest->index < 0
2755 || dominated_by_p (post_dominators, then_bb,
2756 else_succ->dest))
2757 ;
2758 else
2759 return FALSE;
2760
2761 num_possible_if_blocks++;
2762 if (rtl_dump_file)
2763 fprintf (rtl_dump_file,
2764 "\nIF-CASE-2 found, start %d, else %d\n",
2765 test_bb->index, else_bb->index);
2766
2767 /* ELSE is small. */
2768 if (count_bb_insns (else_bb) > BRANCH_COST)
2769 return FALSE;
2770
2771 /* Registers set are dead, or are predicable. */
2772 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2773 return FALSE;
2774
2775 /* Conversion went ok, including moving the insns and fixing up the
2776 jump. Adjust the CFG to match. */
2777
2778 bitmap_operation (test_bb->global_live_at_end,
2779 then_bb->global_live_at_start,
2780 else_bb->global_live_at_end, BITMAP_IOR);
2781
2782 if (post_dominators)
2783 delete_from_dominance_info (post_dominators, else_bb);
2784 delete_block (else_bb);
2785
2786 num_removed_blocks++;
2787 num_updated_if_blocks++;
2788
2789 /* ??? We may now fallthru from one of THEN's successors into a join
2790 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2791
2792 return TRUE;
2793 }
2794
2795 /* A subroutine of dead_or_predicable called through for_each_rtx.
2796 Return 1 if a memory is found. */
2797
2798 static int
2799 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
2800 {
2801 return GET_CODE (*px) == MEM;
2802 }
2803
2804 /* Used by the code above to perform the actual rtl transformations.
2805 Return TRUE if successful.
2806
2807 TEST_BB is the block containing the conditional branch. MERGE_BB
2808 is the block containing the code to manipulate. NEW_DEST is the
2809 label TEST_BB should be branching to after the conversion.
2810 REVERSEP is true if the sense of the branch should be reversed. */
2811
2812 static int
2813 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
2814 basic_block other_bb, basic_block new_dest, int reversep)
2815 {
2816 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2817
2818 jump = test_bb->end;
2819
2820 /* Find the extent of the real code in the merge block. */
2821 head = merge_bb->head;
2822 end = merge_bb->end;
2823
2824 if (GET_CODE (head) == CODE_LABEL)
2825 head = NEXT_INSN (head);
2826 if (GET_CODE (head) == NOTE)
2827 {
2828 if (head == end)
2829 {
2830 head = end = NULL_RTX;
2831 goto no_body;
2832 }
2833 head = NEXT_INSN (head);
2834 }
2835
2836 if (GET_CODE (end) == JUMP_INSN)
2837 {
2838 if (head == end)
2839 {
2840 head = end = NULL_RTX;
2841 goto no_body;
2842 }
2843 end = PREV_INSN (end);
2844 }
2845
2846 /* Disable handling dead code by conditional execution if the machine needs
2847 to do anything funny with the tests, etc. */
2848 #ifndef IFCVT_MODIFY_TESTS
2849 if (HAVE_conditional_execution)
2850 {
2851 /* In the conditional execution case, we have things easy. We know
2852 the condition is reversible. We don't have to check life info,
2853 becase we're going to conditionally execute the code anyway.
2854 All that's left is making sure the insns involved can actually
2855 be predicated. */
2856
2857 rtx cond, prob_val;
2858
2859 cond = cond_exec_get_condition (jump);
2860 if (! cond)
2861 return FALSE;
2862
2863 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2864 if (prob_val)
2865 prob_val = XEXP (prob_val, 0);
2866
2867 if (reversep)
2868 {
2869 enum rtx_code rev = reversed_comparison_code (cond, jump);
2870 if (rev == UNKNOWN)
2871 return FALSE;
2872 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2873 XEXP (cond, 1));
2874 if (prob_val)
2875 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2876 }
2877
2878 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
2879 prob_val, 0))
2880 goto cancel;
2881
2882 earliest = jump;
2883 }
2884 else
2885 #endif
2886 {
2887 /* In the non-conditional execution case, we have to verify that there
2888 are no trapping operations, no calls, no references to memory, and
2889 that any registers modified are dead at the branch site. */
2890
2891 rtx insn, cond, prev;
2892 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2893 regset merge_set, tmp, test_live, test_set;
2894 struct propagate_block_info *pbi;
2895 int i, fail = 0;
2896
2897 /* Check for no calls or trapping operations. */
2898 for (insn = head; ; insn = NEXT_INSN (insn))
2899 {
2900 if (GET_CODE (insn) == CALL_INSN)
2901 return FALSE;
2902 if (INSN_P (insn))
2903 {
2904 if (may_trap_p (PATTERN (insn)))
2905 return FALSE;
2906
2907 /* ??? Even non-trapping memories such as stack frame
2908 references must be avoided. For stores, we collect
2909 no lifetime info; for reads, we'd have to assert
2910 true_dependence false against every store in the
2911 TEST range. */
2912 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2913 return FALSE;
2914 }
2915 if (insn == end)
2916 break;
2917 }
2918
2919 if (! any_condjump_p (jump))
2920 return FALSE;
2921
2922 /* Find the extent of the conditional. */
2923 cond = noce_get_condition (jump, &earliest);
2924 if (! cond)
2925 return FALSE;
2926
2927 /* Collect:
2928 MERGE_SET = set of registers set in MERGE_BB
2929 TEST_LIVE = set of registers live at EARLIEST
2930 TEST_SET = set of registers set between EARLIEST and the
2931 end of the block. */
2932
2933 tmp = INITIALIZE_REG_SET (tmp_head);
2934 merge_set = INITIALIZE_REG_SET (merge_set_head);
2935 test_live = INITIALIZE_REG_SET (test_live_head);
2936 test_set = INITIALIZE_REG_SET (test_set_head);
2937
2938 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2939 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2940 since we've already asserted that MERGE_BB is small. */
2941 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2942
2943 /* For small register class machines, don't lengthen lifetimes of
2944 hard registers before reload. */
2945 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2946 {
2947 EXECUTE_IF_SET_IN_BITMAP
2948 (merge_set, 0, i,
2949 {
2950 if (i < FIRST_PSEUDO_REGISTER
2951 && ! fixed_regs[i]
2952 && ! global_regs[i])
2953 fail = 1;
2954 });
2955 }
2956
2957 /* For TEST, we're interested in a range of insns, not a whole block.
2958 Moreover, we're interested in the insns live from OTHER_BB. */
2959
2960 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2961 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2962 0);
2963
2964 for (insn = jump; ; insn = prev)
2965 {
2966 prev = propagate_one_insn (pbi, insn);
2967 if (insn == earliest)
2968 break;
2969 }
2970
2971 free_propagate_block_info (pbi);
2972
2973 /* We can perform the transformation if
2974 MERGE_SET & (TEST_SET | TEST_LIVE)
2975 and
2976 TEST_SET & merge_bb->global_live_at_start
2977 are empty. */
2978
2979 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2980 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2981 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2982
2983 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2984 BITMAP_AND);
2985 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2986
2987 FREE_REG_SET (tmp);
2988 FREE_REG_SET (merge_set);
2989 FREE_REG_SET (test_live);
2990 FREE_REG_SET (test_set);
2991
2992 if (fail)
2993 return FALSE;
2994 }
2995
2996 no_body:
2997 /* We don't want to use normal invert_jump or redirect_jump because
2998 we don't want to delete_insn called. Also, we want to do our own
2999 change group management. */
3000
3001 old_dest = JUMP_LABEL (jump);
3002 if (other_bb != new_dest)
3003 {
3004 new_label = block_label (new_dest);
3005 if (reversep
3006 ? ! invert_jump_1 (jump, new_label)
3007 : ! redirect_jump_1 (jump, new_label))
3008 goto cancel;
3009 }
3010
3011 if (! apply_change_group ())
3012 return FALSE;
3013
3014 if (other_bb != new_dest)
3015 {
3016 if (old_dest)
3017 LABEL_NUSES (old_dest) -= 1;
3018 if (new_label)
3019 LABEL_NUSES (new_label) += 1;
3020 JUMP_LABEL (jump) = new_label;
3021 if (reversep)
3022 invert_br_probabilities (jump);
3023
3024 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3025 if (reversep)
3026 {
3027 gcov_type count, probability;
3028 count = BRANCH_EDGE (test_bb)->count;
3029 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3030 FALLTHRU_EDGE (test_bb)->count = count;
3031 probability = BRANCH_EDGE (test_bb)->probability;
3032 BRANCH_EDGE (test_bb)->probability
3033 = FALLTHRU_EDGE (test_bb)->probability;
3034 FALLTHRU_EDGE (test_bb)->probability = probability;
3035 update_br_prob_note (test_bb);
3036 }
3037 }
3038
3039 /* Move the insns out of MERGE_BB to before the branch. */
3040 if (head != NULL)
3041 {
3042 if (end == merge_bb->end)
3043 merge_bb->end = PREV_INSN (head);
3044
3045 if (squeeze_notes (&head, &end))
3046 return TRUE;
3047
3048 reorder_insns (head, end, PREV_INSN (earliest));
3049 }
3050
3051 /* Remove the jump and edge if we can. */
3052 if (other_bb == new_dest)
3053 {
3054 delete_insn (jump);
3055 remove_edge (BRANCH_EDGE (test_bb));
3056 /* ??? Can't merge blocks here, as then_bb is still in use.
3057 At minimum, the merge will get done just before bb-reorder. */
3058 }
3059
3060 return TRUE;
3061
3062 cancel:
3063 cancel_changes (0);
3064 return FALSE;
3065 }
3066 \f
3067 /* Main entry point for all if-conversion. */
3068
3069 void
3070 if_convert (int x_life_data_ok)
3071 {
3072 basic_block bb;
3073 int pass;
3074
3075 num_possible_if_blocks = 0;
3076 num_updated_if_blocks = 0;
3077 num_removed_blocks = 0;
3078 life_data_ok = (x_life_data_ok != 0);
3079
3080 /* Free up basic_block_for_insn so that we don't have to keep it
3081 up to date, either here or in merge_blocks. */
3082 free_basic_block_vars (1);
3083
3084 /* Compute postdominators if we think we'll use them. */
3085 post_dominators = NULL;
3086 if (HAVE_conditional_execution || life_data_ok)
3087 {
3088 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
3089 }
3090 if (life_data_ok)
3091 clear_bb_flags ();
3092
3093 /* Go through each of the basic blocks looking for things to convert. If we
3094 have conditional execution, we make multiple passes to allow us to handle
3095 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3096 pass = 0;
3097 do
3098 {
3099 cond_exec_changed_p = FALSE;
3100 pass++;
3101
3102 #ifdef IFCVT_MULTIPLE_DUMPS
3103 if (rtl_dump_file && pass > 1)
3104 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3105 #endif
3106
3107 FOR_EACH_BB (bb)
3108 {
3109 basic_block new_bb;
3110 while ((new_bb = find_if_header (bb, pass)))
3111 bb = new_bb;
3112 }
3113
3114 #ifdef IFCVT_MULTIPLE_DUMPS
3115 if (rtl_dump_file && cond_exec_changed_p)
3116 print_rtl_with_bb (rtl_dump_file, get_insns ());
3117 #endif
3118 }
3119 while (cond_exec_changed_p);
3120
3121 #ifdef IFCVT_MULTIPLE_DUMPS
3122 if (rtl_dump_file)
3123 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3124 #endif
3125
3126 if (post_dominators)
3127 free_dominance_info (post_dominators);
3128
3129 if (rtl_dump_file)
3130 fflush (rtl_dump_file);
3131
3132 clear_aux_for_blocks ();
3133
3134 /* Rebuild life info for basic blocks that require it. */
3135 if (num_removed_blocks && life_data_ok)
3136 {
3137 /* If we allocated new pseudos, we must resize the array for sched1. */
3138 if (max_regno < max_reg_num ())
3139 {
3140 max_regno = max_reg_num ();
3141 allocate_reg_info (max_regno, FALSE, FALSE);
3142 }
3143 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3144 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3145 | PROP_KILL_DEAD_CODE);
3146 }
3147
3148 /* Write the final stats. */
3149 if (rtl_dump_file && num_possible_if_blocks > 0)
3150 {
3151 fprintf (rtl_dump_file,
3152 "\n%d possible IF blocks searched.\n",
3153 num_possible_if_blocks);
3154 fprintf (rtl_dump_file,
3155 "%d IF blocks converted.\n",
3156 num_updated_if_blocks);
3157 fprintf (rtl_dump_file,
3158 "%d basic blocks deleted.\n\n\n",
3159 num_removed_blocks);
3160 }
3161
3162 #ifdef ENABLE_CHECKING
3163 verify_flow_info ();
3164 #endif
3165 }