Enhancements for the if-conversion pass
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "except.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "expr.h"
34 #include "real.h"
35 #include "output.h"
36 #include "toplev.h"
37 #include "tm_p.h"
38
39
40 #ifndef HAVE_conditional_execution
41 #define HAVE_conditional_execution 0
42 #endif
43 #ifndef HAVE_conditional_move
44 #define HAVE_conditional_move 0
45 #endif
46 #ifndef HAVE_incscc
47 #define HAVE_incscc 0
48 #endif
49 #ifndef HAVE_decscc
50 #define HAVE_decscc 0
51 #endif
52 #ifndef HAVE_trap
53 #define HAVE_trap 0
54 #endif
55 #ifndef HAVE_conditional_trap
56 #define HAVE_conditional_trap 0
57 #endif
58
59 #ifndef MAX_CONDITIONAL_EXECUTE
60 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
61 #endif
62
63 #define NULL_EDGE ((struct edge_def *)NULL)
64 #define NULL_BLOCK ((struct basic_block_def *)NULL)
65
66 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
67 static int num_possible_if_blocks;
68
69 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
70 execution. */
71 static int num_updated_if_blocks;
72
73 /* # of basic blocks that were removed. */
74 static int num_removed_blocks;
75
76 /* Whether conditional execution changes were made. */
77 static int cond_exec_changed_p;
78
79 /* True if life data ok at present. */
80 static bool life_data_ok;
81
82 /* The post-dominator relation on the original block numbers. */
83 static dominance_info post_dominators;
84
85 /* Forward references. */
86 static int count_bb_insns PARAMS ((basic_block));
87 static rtx first_active_insn PARAMS ((basic_block));
88 static rtx last_active_insn PARAMS ((basic_block, int));
89 static int seq_contains_jump PARAMS ((rtx));
90 static basic_block block_fallthru PARAMS ((basic_block));
91 static int cond_exec_process_insns PARAMS ((ce_if_block_t *,
92 rtx, rtx, rtx, rtx, int));
93 static rtx cond_exec_get_condition PARAMS ((rtx));
94 static int cond_exec_process_if_block PARAMS ((ce_if_block_t *, int));
95 static rtx noce_get_condition PARAMS ((rtx, rtx *));
96 static int noce_operand_ok PARAMS ((rtx));
97 static int noce_process_if_block PARAMS ((ce_if_block_t *));
98 static int process_if_block PARAMS ((ce_if_block_t *));
99 static void merge_if_block PARAMS ((ce_if_block_t *));
100 static int find_cond_trap PARAMS ((basic_block, edge, edge));
101 static basic_block find_if_header PARAMS ((basic_block, int));
102 static int block_jumps_and_fallthru_p PARAMS ((basic_block, basic_block));
103 static int find_if_block PARAMS ((ce_if_block_t *));
104 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
105 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
106 static int find_memory PARAMS ((rtx *, void *));
107 static int dead_or_predicable PARAMS ((basic_block, basic_block,
108 basic_block, basic_block, int));
109 static void noce_emit_move_insn PARAMS ((rtx, rtx));
110 static rtx block_has_only_trap PARAMS ((basic_block));
111 \f
112 /* Count the number of non-jump active insns in BB. */
113
114 static int
115 count_bb_insns (bb)
116 basic_block bb;
117 {
118 int count = 0;
119 rtx insn = bb->head;
120
121 while (1)
122 {
123 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
124 count++;
125
126 if (insn == bb->end)
127 break;
128 insn = NEXT_INSN (insn);
129 }
130
131 return count;
132 }
133
134 /* Return the first non-jump active insn in the basic block. */
135
136 static rtx
137 first_active_insn (bb)
138 basic_block bb;
139 {
140 rtx insn = bb->head;
141
142 if (GET_CODE (insn) == CODE_LABEL)
143 {
144 if (insn == bb->end)
145 return NULL_RTX;
146 insn = NEXT_INSN (insn);
147 }
148
149 while (GET_CODE (insn) == NOTE)
150 {
151 if (insn == bb->end)
152 return NULL_RTX;
153 insn = NEXT_INSN (insn);
154 }
155
156 if (GET_CODE (insn) == JUMP_INSN)
157 return NULL_RTX;
158
159 return insn;
160 }
161
162 /* Return the last non-jump active (non-jump) insn in the basic block. */
163
164 static rtx
165 last_active_insn (bb, skip_use_p)
166 basic_block bb;
167 int skip_use_p;
168 {
169 rtx insn = bb->end;
170 rtx head = bb->head;
171
172 while (GET_CODE (insn) == NOTE
173 || GET_CODE (insn) == JUMP_INSN
174 || (skip_use_p
175 && GET_CODE (insn) == INSN
176 && GET_CODE (PATTERN (insn)) == USE))
177 {
178 if (insn == head)
179 return NULL_RTX;
180 insn = PREV_INSN (insn);
181 }
182
183 if (GET_CODE (insn) == CODE_LABEL)
184 return NULL_RTX;
185
186 return insn;
187 }
188
189 /* It is possible, especially when having dealt with multi-word
190 arithmetic, for the expanders to have emitted jumps. Search
191 through the sequence and return TRUE if a jump exists so that
192 we can abort the conversion. */
193
194 static int
195 seq_contains_jump (insn)
196 rtx insn;
197 {
198 while (insn)
199 {
200 if (GET_CODE (insn) == JUMP_INSN)
201 return 1;
202 insn = NEXT_INSN (insn);
203 }
204 return 0;
205 }
206
207 static basic_block
208 block_fallthru (bb)
209 basic_block bb;
210 {
211 edge e;
212
213 for (e = bb->succ;
214 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
215 e = e->succ_next)
216 ;
217
218 return (e) ? e->dest : NULL_BLOCK;
219 }
220 \f
221 /* Go through a bunch of insns, converting them to conditional
222 execution format if possible. Return TRUE if all of the non-note
223 insns were processed. */
224
225 static int
226 cond_exec_process_insns (ce_info, start, end, test, prob_val, mod_ok)
227 ce_if_block_t *ce_info; /* if block information */
228 rtx start; /* first insn to look at */
229 rtx end; /* last insn to look at */
230 rtx test; /* conditional execution test */
231 rtx prob_val; /* probability of branch taken. */
232 int mod_ok; /* true if modifications ok last insn. */
233 {
234 int must_be_last = FALSE;
235 rtx insn;
236 rtx xtest;
237 rtx pattern;
238
239 if (!start || !end)
240 return FALSE;
241
242 for (insn = start; ; insn = NEXT_INSN (insn))
243 {
244 if (GET_CODE (insn) == NOTE)
245 goto insn_done;
246
247 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
248 abort ();
249
250 /* Remove USE insns that get in the way. */
251 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
252 {
253 /* ??? Ug. Actually unlinking the thing is problematic,
254 given what we'd have to coordinate with our callers. */
255 PUT_CODE (insn, NOTE);
256 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
257 NOTE_SOURCE_FILE (insn) = 0;
258 goto insn_done;
259 }
260
261 /* Last insn wasn't last? */
262 if (must_be_last)
263 return FALSE;
264
265 if (modified_in_p (test, insn))
266 {
267 if (!mod_ok)
268 return FALSE;
269 must_be_last = TRUE;
270 }
271
272 /* Now build the conditional form of the instruction. */
273 pattern = PATTERN (insn);
274 xtest = copy_rtx (test);
275
276 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
277 two conditions. */
278 if (GET_CODE (pattern) == COND_EXEC)
279 {
280 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
281 return FALSE;
282
283 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
284 COND_EXEC_TEST (pattern));
285 pattern = COND_EXEC_CODE (pattern);
286 }
287
288 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
289
290 /* If the machine needs to modify the insn being conditionally executed,
291 say for example to force a constant integer operand into a temp
292 register, do so here. */
293 #ifdef IFCVT_MODIFY_INSN
294 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
295 if (! pattern)
296 return FALSE;
297 #endif
298
299 validate_change (insn, &PATTERN (insn), pattern, 1);
300
301 if (GET_CODE (insn) == CALL_INSN && prob_val)
302 validate_change (insn, &REG_NOTES (insn),
303 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
304 REG_NOTES (insn)), 1);
305
306 insn_done:
307 if (insn == end)
308 break;
309 }
310
311 return TRUE;
312 }
313
314 /* Return the condition for a jump. Do not do any special processing. */
315
316 static rtx
317 cond_exec_get_condition (jump)
318 rtx jump;
319 {
320 rtx test_if, cond;
321
322 if (any_condjump_p (jump))
323 test_if = SET_SRC (pc_set (jump));
324 else
325 return NULL_RTX;
326 cond = XEXP (test_if, 0);
327
328 /* If this branches to JUMP_LABEL when the condition is false,
329 reverse the condition. */
330 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
331 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
332 {
333 enum rtx_code rev = reversed_comparison_code (cond, jump);
334 if (rev == UNKNOWN)
335 return NULL_RTX;
336
337 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
338 XEXP (cond, 1));
339 }
340
341 return cond;
342 }
343
344 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
345 to conditional execution. Return TRUE if we were successful at
346 converting the the block. */
347
348 static int
349 cond_exec_process_if_block (ce_info, do_multiple_p)
350 ce_if_block_t * ce_info; /* if block information */
351 int do_multiple_p; /* != 0 if we should handle && and || blocks */
352 {
353 basic_block test_bb = ce_info->test_bb; /* last test block */
354 basic_block then_bb = ce_info->then_bb; /* THEN */
355 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
356 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
357 rtx then_start; /* first insn in THEN block */
358 rtx then_end; /* last insn + 1 in THEN block */
359 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
360 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
361 int max; /* max # of insns to convert. */
362 int then_mod_ok; /* whether conditional mods are ok in THEN */
363 rtx true_expr; /* test for else block insns */
364 rtx false_expr; /* test for then block insns */
365 rtx true_prob_val; /* probability of else block */
366 rtx false_prob_val; /* probability of then block */
367 int n_insns;
368 enum rtx_code false_code;
369
370 /* If test is comprised of && or || elements, and we've failed at handling
371 all of them together, just use the last test if it is the special case of
372 && elements without an ELSE block. */
373 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
374 {
375 if (else_bb || ! ce_info->and_and_p)
376 return FALSE;
377
378 ce_info->test_bb = test_bb = ce_info->last_test_bb;
379 ce_info->num_multiple_test_blocks = 0;
380 ce_info->num_and_and_blocks = 0;
381 ce_info->num_or_or_blocks = 0;
382 }
383
384 /* Find the conditional jump to the ELSE or JOIN part, and isolate
385 the test. */
386 test_expr = cond_exec_get_condition (test_bb->end);
387 if (! test_expr)
388 return FALSE;
389
390 /* If the conditional jump is more than just a conditional jump,
391 then we can not do conditional execution conversion on this block. */
392 if (! onlyjump_p (test_bb->end))
393 return FALSE;
394
395 /* Collect the bounds of where we're to search, skipping any labels, jumps
396 and notes at the beginning and end of the block. Then count the total
397 number of insns and see if it is small enough to convert. */
398 then_start = first_active_insn (then_bb);
399 then_end = last_active_insn (then_bb, TRUE);
400 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
401 max = MAX_CONDITIONAL_EXECUTE;
402
403 if (else_bb)
404 {
405 max *= 2;
406 else_start = first_active_insn (else_bb);
407 else_end = last_active_insn (else_bb, TRUE);
408 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
409 }
410
411 if (n_insns > max)
412 return FALSE;
413
414 /* Map test_expr/test_jump into the appropriate MD tests to use on
415 the conditionally executed code. */
416
417 true_expr = test_expr;
418
419 false_code = reversed_comparison_code (true_expr, test_bb->end);
420 if (false_code != UNKNOWN)
421 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
422 XEXP (true_expr, 0), XEXP (true_expr, 1));
423 else
424 false_expr = NULL_RTX;
425
426 #ifdef IFCVT_MODIFY_TESTS
427 /* If the machine description needs to modify the tests, such as setting a
428 conditional execution register from a comparison, it can do so here. */
429 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
430 join_bb);
431
432 /* See if the conversion failed */
433 if (!true_expr || !false_expr)
434 goto fail;
435 #endif
436
437 #ifdef IFCVT_MODIFY_TESTS
438 /* If the machine description needs to modify the tests, such as setting a
439 conditional execution register from a comparison, it can do so here. */
440 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
441
442 /* See if the conversion failed */
443 if (!true_expr || !false_expr)
444 goto fail;
445 #endif
446
447 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
448 if (true_prob_val)
449 {
450 true_prob_val = XEXP (true_prob_val, 0);
451 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
452 }
453 else
454 false_prob_val = NULL_RTX;
455
456 /* If we have && or || tests, do them here. These tests are in the adjacent
457 blocks after the first block containing the test. */
458 if (ce_info->num_multiple_test_blocks > 0)
459 {
460 basic_block bb = test_bb;
461 basic_block last_test_bb = ce_info->last_test_bb;
462
463 do
464 {
465 rtx start, end;
466 rtx t, f;
467
468 bb = block_fallthru (bb);
469 start = first_active_insn (bb);
470 end = last_active_insn (bb, TRUE);
471 if (start
472 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
473 false_prob_val, FALSE))
474 goto fail;
475
476 /* If the conditional jump is more than just a conditional jump, then
477 we can not do conditional execution conversion on this block. */
478 if (! onlyjump_p (bb->end))
479 goto fail;
480
481 /* Find the conditional jump and isolate the test. */
482 t = cond_exec_get_condition (bb->end);
483 if (! t)
484 goto fail;
485
486 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
487 GET_MODE (t),
488 XEXP (t, 0),
489 XEXP (t, 1));
490
491 if (ce_info->and_and_p)
492 {
493 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
494 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
495 }
496 else
497 {
498 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
499 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
500 }
501
502 /* If the machine description needs to modify the tests, such as
503 setting a conditional execution register from a comparison, it can
504 do so here. */
505 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
506 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
507
508 /* See if the conversion failed */
509 if (!t || !f)
510 goto fail;
511 #endif
512
513 true_expr = t;
514 false_expr = f;
515 }
516 while (bb != last_test_bb);
517 }
518
519 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
520 on then THEN block. */
521 then_mod_ok = (else_bb == NULL_BLOCK);
522
523 /* Go through the THEN and ELSE blocks converting the insns if possible
524 to conditional execution. */
525
526 if (then_end
527 && (! false_expr
528 || ! cond_exec_process_insns (ce_info, then_start, then_end,
529 false_expr, false_prob_val,
530 then_mod_ok)))
531 goto fail;
532
533 if (else_bb && else_end
534 && ! cond_exec_process_insns (ce_info, else_start, else_end,
535 true_expr, true_prob_val, TRUE))
536 goto fail;
537
538 /* If we cannot apply the changes, fail. Do not go through the normal fail
539 processing, since apply_change_group will call cancel_changes. */
540 if (! apply_change_group ())
541 {
542 #ifdef IFCVT_MODIFY_CANCEL
543 /* Cancel any machine dependent changes. */
544 IFCVT_MODIFY_CANCEL (ce_info);
545 #endif
546 return FALSE;
547 }
548
549 #ifdef IFCVT_MODIFY_FINAL
550 /* Do any machine dependent final modifications */
551 IFCVT_MODIFY_FINAL (ce_info);
552 #endif
553
554 /* Conversion succeeded. */
555 if (rtl_dump_file)
556 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
557 n_insns, (n_insns == 1) ? " was" : "s were");
558
559 /* Merge the blocks! */
560 merge_if_block (ce_info);
561 cond_exec_changed_p = TRUE;
562 return TRUE;
563
564 fail:
565 #ifdef IFCVT_MODIFY_CANCEL
566 /* Cancel any machine dependent changes. */
567 IFCVT_MODIFY_CANCEL (ce_info);
568 #endif
569
570 cancel_changes (0);
571 return FALSE;
572 }
573 \f
574 /* Used by noce_process_if_block to communicate with its subroutines.
575
576 The subroutines know that A and B may be evaluated freely. They
577 know that X is a register. They should insert new instructions
578 before cond_earliest. */
579
580 struct noce_if_info
581 {
582 basic_block test_bb;
583 rtx insn_a, insn_b;
584 rtx x, a, b;
585 rtx jump, cond, cond_earliest;
586 };
587
588 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
589 rtx, int, int));
590 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
591 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
592 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
593 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
594 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
595 rtx, enum rtx_code, rtx,
596 rtx, rtx, rtx));
597 static int noce_try_cmove PARAMS ((struct noce_if_info *));
598 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
599 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
600 rtx, rtx *));
601 static int noce_try_minmax PARAMS ((struct noce_if_info *));
602 static int noce_try_abs PARAMS ((struct noce_if_info *));
603
604 /* Helper function for noce_try_store_flag*. */
605
606 static rtx
607 noce_emit_store_flag (if_info, x, reversep, normalize)
608 struct noce_if_info *if_info;
609 rtx x;
610 int reversep, normalize;
611 {
612 rtx cond = if_info->cond;
613 int cond_complex;
614 enum rtx_code code;
615
616 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
617 || ! general_operand (XEXP (cond, 1), VOIDmode));
618
619 /* If earliest == jump, or when the condition is complex, try to
620 build the store_flag insn directly. */
621
622 if (cond_complex)
623 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
624
625 if (reversep)
626 code = reversed_comparison_code (cond, if_info->jump);
627 else
628 code = GET_CODE (cond);
629
630 if ((if_info->cond_earliest == if_info->jump || cond_complex)
631 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
632 {
633 rtx tmp;
634
635 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
636 XEXP (cond, 1));
637 tmp = gen_rtx_SET (VOIDmode, x, tmp);
638
639 start_sequence ();
640 tmp = emit_insn (tmp);
641
642 if (recog_memoized (tmp) >= 0)
643 {
644 tmp = get_insns ();
645 end_sequence ();
646 emit_insn (tmp);
647
648 if_info->cond_earliest = if_info->jump;
649
650 return x;
651 }
652
653 end_sequence ();
654 }
655
656 /* Don't even try if the comparison operands are weird. */
657 if (cond_complex)
658 return NULL_RTX;
659
660 return emit_store_flag (x, code, XEXP (cond, 0),
661 XEXP (cond, 1), VOIDmode,
662 (code == LTU || code == LEU
663 || code == GEU || code == GTU), normalize);
664 }
665
666 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
667 static void
668 noce_emit_move_insn (x, y)
669 rtx x, y;
670 {
671 enum machine_mode outmode, inmode;
672 rtx outer, inner;
673 int bitpos;
674
675 if (GET_CODE (x) != STRICT_LOW_PART)
676 {
677 emit_move_insn (x, y);
678 return;
679 }
680
681 outer = XEXP (x, 0);
682 inner = XEXP (outer, 0);
683 outmode = GET_MODE (outer);
684 inmode = GET_MODE (inner);
685 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
686 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
687 GET_MODE_BITSIZE (inmode));
688 }
689
690 /* Convert "if (test) x = 1; else x = 0".
691
692 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
693 tried in noce_try_store_flag_constants after noce_try_cmove has had
694 a go at the conversion. */
695
696 static int
697 noce_try_store_flag (if_info)
698 struct noce_if_info *if_info;
699 {
700 int reversep;
701 rtx target, seq;
702
703 if (GET_CODE (if_info->b) == CONST_INT
704 && INTVAL (if_info->b) == STORE_FLAG_VALUE
705 && if_info->a == const0_rtx)
706 reversep = 0;
707 else if (if_info->b == const0_rtx
708 && GET_CODE (if_info->a) == CONST_INT
709 && INTVAL (if_info->a) == STORE_FLAG_VALUE
710 && (reversed_comparison_code (if_info->cond, if_info->jump)
711 != UNKNOWN))
712 reversep = 1;
713 else
714 return FALSE;
715
716 start_sequence ();
717
718 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
719 if (target)
720 {
721 if (target != if_info->x)
722 noce_emit_move_insn (if_info->x, target);
723
724 seq = get_insns ();
725 end_sequence ();
726 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
727
728 return TRUE;
729 }
730 else
731 {
732 end_sequence ();
733 return FALSE;
734 }
735 }
736
737 /* Convert "if (test) x = a; else x = b", for A and B constant. */
738
739 static int
740 noce_try_store_flag_constants (if_info)
741 struct noce_if_info *if_info;
742 {
743 rtx target, seq;
744 int reversep;
745 HOST_WIDE_INT itrue, ifalse, diff, tmp;
746 int normalize, can_reverse;
747 enum machine_mode mode;
748
749 if (! no_new_pseudos
750 && GET_CODE (if_info->a) == CONST_INT
751 && GET_CODE (if_info->b) == CONST_INT)
752 {
753 mode = GET_MODE (if_info->x);
754 ifalse = INTVAL (if_info->a);
755 itrue = INTVAL (if_info->b);
756
757 /* Make sure we can represent the difference between the two values. */
758 if ((itrue - ifalse > 0)
759 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
760 return FALSE;
761
762 diff = trunc_int_for_mode (itrue - ifalse, mode);
763
764 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
765 != UNKNOWN);
766
767 reversep = 0;
768 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
769 normalize = 0;
770 else if (ifalse == 0 && exact_log2 (itrue) >= 0
771 && (STORE_FLAG_VALUE == 1
772 || BRANCH_COST >= 2))
773 normalize = 1;
774 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
775 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
776 normalize = 1, reversep = 1;
777 else if (itrue == -1
778 && (STORE_FLAG_VALUE == -1
779 || BRANCH_COST >= 2))
780 normalize = -1;
781 else if (ifalse == -1 && can_reverse
782 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
783 normalize = -1, reversep = 1;
784 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
785 || BRANCH_COST >= 3)
786 normalize = -1;
787 else
788 return FALSE;
789
790 if (reversep)
791 {
792 tmp = itrue; itrue = ifalse; ifalse = tmp;
793 diff = trunc_int_for_mode (-diff, mode);
794 }
795
796 start_sequence ();
797 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
798 if (! target)
799 {
800 end_sequence ();
801 return FALSE;
802 }
803
804 /* if (test) x = 3; else x = 4;
805 => x = 3 + (test == 0); */
806 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
807 {
808 target = expand_simple_binop (mode,
809 (diff == STORE_FLAG_VALUE
810 ? PLUS : MINUS),
811 GEN_INT (ifalse), target, if_info->x, 0,
812 OPTAB_WIDEN);
813 }
814
815 /* if (test) x = 8; else x = 0;
816 => x = (test != 0) << 3; */
817 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
818 {
819 target = expand_simple_binop (mode, ASHIFT,
820 target, GEN_INT (tmp), if_info->x, 0,
821 OPTAB_WIDEN);
822 }
823
824 /* if (test) x = -1; else x = b;
825 => x = -(test != 0) | b; */
826 else if (itrue == -1)
827 {
828 target = expand_simple_binop (mode, IOR,
829 target, GEN_INT (ifalse), if_info->x, 0,
830 OPTAB_WIDEN);
831 }
832
833 /* if (test) x = a; else x = b;
834 => x = (-(test != 0) & (b - a)) + a; */
835 else
836 {
837 target = expand_simple_binop (mode, AND,
838 target, GEN_INT (diff), if_info->x, 0,
839 OPTAB_WIDEN);
840 if (target)
841 target = expand_simple_binop (mode, PLUS,
842 target, GEN_INT (ifalse),
843 if_info->x, 0, OPTAB_WIDEN);
844 }
845
846 if (! target)
847 {
848 end_sequence ();
849 return FALSE;
850 }
851
852 if (target != if_info->x)
853 noce_emit_move_insn (if_info->x, target);
854
855 seq = get_insns ();
856 end_sequence ();
857
858 if (seq_contains_jump (seq))
859 return FALSE;
860
861 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
862
863 return TRUE;
864 }
865
866 return FALSE;
867 }
868
869 /* Convert "if (test) foo++" into "foo += (test != 0)", and
870 similarly for "foo--". */
871
872 static int
873 noce_try_store_flag_inc (if_info)
874 struct noce_if_info *if_info;
875 {
876 rtx target, seq;
877 int subtract, normalize;
878
879 if (! no_new_pseudos
880 && (BRANCH_COST >= 2
881 || HAVE_incscc
882 || HAVE_decscc)
883 /* Should be no `else' case to worry about. */
884 && if_info->b == if_info->x
885 && GET_CODE (if_info->a) == PLUS
886 && (XEXP (if_info->a, 1) == const1_rtx
887 || XEXP (if_info->a, 1) == constm1_rtx)
888 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
889 && (reversed_comparison_code (if_info->cond, if_info->jump)
890 != UNKNOWN))
891 {
892 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
893 subtract = 0, normalize = 0;
894 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
895 subtract = 1, normalize = 0;
896 else
897 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
898
899 start_sequence ();
900
901 target = noce_emit_store_flag (if_info,
902 gen_reg_rtx (GET_MODE (if_info->x)),
903 1, normalize);
904
905 if (target)
906 target = expand_simple_binop (GET_MODE (if_info->x),
907 subtract ? MINUS : PLUS,
908 if_info->x, target, if_info->x,
909 0, OPTAB_WIDEN);
910 if (target)
911 {
912 if (target != if_info->x)
913 noce_emit_move_insn (if_info->x, target);
914
915 seq = get_insns ();
916 end_sequence ();
917
918 if (seq_contains_jump (seq))
919 return FALSE;
920
921 emit_insn_before_scope (seq, if_info->jump,
922 INSN_SCOPE (if_info->insn_a));
923
924 return TRUE;
925 }
926
927 end_sequence ();
928 }
929
930 return FALSE;
931 }
932
933 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
934
935 static int
936 noce_try_store_flag_mask (if_info)
937 struct noce_if_info *if_info;
938 {
939 rtx target, seq;
940 int reversep;
941
942 reversep = 0;
943 if (! no_new_pseudos
944 && (BRANCH_COST >= 2
945 || STORE_FLAG_VALUE == -1)
946 && ((if_info->a == const0_rtx
947 && rtx_equal_p (if_info->b, if_info->x))
948 || ((reversep = (reversed_comparison_code (if_info->cond,
949 if_info->jump)
950 != UNKNOWN))
951 && if_info->b == const0_rtx
952 && rtx_equal_p (if_info->a, if_info->x))))
953 {
954 start_sequence ();
955 target = noce_emit_store_flag (if_info,
956 gen_reg_rtx (GET_MODE (if_info->x)),
957 reversep, -1);
958 if (target)
959 target = expand_simple_binop (GET_MODE (if_info->x), AND,
960 if_info->x, target, if_info->x, 0,
961 OPTAB_WIDEN);
962
963 if (target)
964 {
965 if (target != if_info->x)
966 noce_emit_move_insn (if_info->x, target);
967
968 seq = get_insns ();
969 end_sequence ();
970
971 if (seq_contains_jump (seq))
972 return FALSE;
973
974 emit_insn_before_scope (seq, if_info->jump,
975 INSN_SCOPE (if_info->insn_a));
976
977 return TRUE;
978 }
979
980 end_sequence ();
981 }
982
983 return FALSE;
984 }
985
986 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
987
988 static rtx
989 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
990 struct noce_if_info *if_info;
991 rtx x, cmp_a, cmp_b, vfalse, vtrue;
992 enum rtx_code code;
993 {
994 /* If earliest == jump, try to build the cmove insn directly.
995 This is helpful when combine has created some complex condition
996 (like for alpha's cmovlbs) that we can't hope to regenerate
997 through the normal interface. */
998
999 if (if_info->cond_earliest == if_info->jump)
1000 {
1001 rtx tmp;
1002
1003 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1004 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1005 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1006
1007 start_sequence ();
1008 tmp = emit_insn (tmp);
1009
1010 if (recog_memoized (tmp) >= 0)
1011 {
1012 tmp = get_insns ();
1013 end_sequence ();
1014 emit_insn (tmp);
1015
1016 return x;
1017 }
1018
1019 end_sequence ();
1020 }
1021
1022 /* Don't even try if the comparison operands are weird. */
1023 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1024 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1025 return NULL_RTX;
1026
1027 #if HAVE_conditional_move
1028 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1029 vtrue, vfalse, GET_MODE (x),
1030 (code == LTU || code == GEU
1031 || code == LEU || code == GTU));
1032 #else
1033 /* We'll never get here, as noce_process_if_block doesn't call the
1034 functions involved. Ifdef code, however, should be discouraged
1035 because it leads to typos in the code not selected. However,
1036 emit_conditional_move won't exist either. */
1037 return NULL_RTX;
1038 #endif
1039 }
1040
1041 /* Try only simple constants and registers here. More complex cases
1042 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1043 has had a go at it. */
1044
1045 static int
1046 noce_try_cmove (if_info)
1047 struct noce_if_info *if_info;
1048 {
1049 enum rtx_code code;
1050 rtx target, seq;
1051
1052 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1053 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1054 {
1055 start_sequence ();
1056
1057 code = GET_CODE (if_info->cond);
1058 target = noce_emit_cmove (if_info, if_info->x, code,
1059 XEXP (if_info->cond, 0),
1060 XEXP (if_info->cond, 1),
1061 if_info->a, if_info->b);
1062
1063 if (target)
1064 {
1065 if (target != if_info->x)
1066 noce_emit_move_insn (if_info->x, target);
1067
1068 seq = get_insns ();
1069 end_sequence ();
1070 emit_insn_before_scope (seq, if_info->jump,
1071 INSN_SCOPE (if_info->insn_a));
1072 return TRUE;
1073 }
1074 else
1075 {
1076 end_sequence ();
1077 return FALSE;
1078 }
1079 }
1080
1081 return FALSE;
1082 }
1083
1084 /* Try more complex cases involving conditional_move. */
1085
1086 static int
1087 noce_try_cmove_arith (if_info)
1088 struct noce_if_info *if_info;
1089 {
1090 rtx a = if_info->a;
1091 rtx b = if_info->b;
1092 rtx x = if_info->x;
1093 rtx insn_a, insn_b;
1094 rtx tmp, target;
1095 int is_mem = 0;
1096 enum rtx_code code;
1097
1098 /* A conditional move from two memory sources is equivalent to a
1099 conditional on their addresses followed by a load. Don't do this
1100 early because it'll screw alias analysis. Note that we've
1101 already checked for no side effects. */
1102 if (! no_new_pseudos && cse_not_expected
1103 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1104 && BRANCH_COST >= 5)
1105 {
1106 a = XEXP (a, 0);
1107 b = XEXP (b, 0);
1108 x = gen_reg_rtx (Pmode);
1109 is_mem = 1;
1110 }
1111
1112 /* ??? We could handle this if we knew that a load from A or B could
1113 not fault. This is also true if we've already loaded
1114 from the address along the path from ENTRY. */
1115 else if (may_trap_p (a) || may_trap_p (b))
1116 return FALSE;
1117
1118 /* if (test) x = a + b; else x = c - d;
1119 => y = a + b;
1120 x = c - d;
1121 if (test)
1122 x = y;
1123 */
1124
1125 code = GET_CODE (if_info->cond);
1126 insn_a = if_info->insn_a;
1127 insn_b = if_info->insn_b;
1128
1129 /* Possibly rearrange operands to make things come out more natural. */
1130 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1131 {
1132 int reversep = 0;
1133 if (rtx_equal_p (b, x))
1134 reversep = 1;
1135 else if (general_operand (b, GET_MODE (b)))
1136 reversep = 1;
1137
1138 if (reversep)
1139 {
1140 code = reversed_comparison_code (if_info->cond, if_info->jump);
1141 tmp = a, a = b, b = tmp;
1142 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1143 }
1144 }
1145
1146 start_sequence ();
1147
1148 /* If either operand is complex, load it into a register first.
1149 The best way to do this is to copy the original insn. In this
1150 way we preserve any clobbers etc that the insn may have had.
1151 This is of course not possible in the IS_MEM case. */
1152 if (! general_operand (a, GET_MODE (a)))
1153 {
1154 rtx set;
1155
1156 if (no_new_pseudos)
1157 goto end_seq_and_fail;
1158
1159 if (is_mem)
1160 {
1161 tmp = gen_reg_rtx (GET_MODE (a));
1162 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1163 }
1164 else if (! insn_a)
1165 goto end_seq_and_fail;
1166 else
1167 {
1168 a = gen_reg_rtx (GET_MODE (a));
1169 tmp = copy_rtx (insn_a);
1170 set = single_set (tmp);
1171 SET_DEST (set) = a;
1172 tmp = emit_insn (PATTERN (tmp));
1173 }
1174 if (recog_memoized (tmp) < 0)
1175 goto end_seq_and_fail;
1176 }
1177 if (! general_operand (b, GET_MODE (b)))
1178 {
1179 rtx set;
1180
1181 if (no_new_pseudos)
1182 goto end_seq_and_fail;
1183
1184 if (is_mem)
1185 {
1186 tmp = gen_reg_rtx (GET_MODE (b));
1187 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1188 }
1189 else if (! insn_b)
1190 goto end_seq_and_fail;
1191 else
1192 {
1193 b = gen_reg_rtx (GET_MODE (b));
1194 tmp = copy_rtx (insn_b);
1195 set = single_set (tmp);
1196 SET_DEST (set) = b;
1197 tmp = emit_insn (PATTERN (tmp));
1198 }
1199 if (recog_memoized (tmp) < 0)
1200 goto end_seq_and_fail;
1201 }
1202
1203 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1204 XEXP (if_info->cond, 1), a, b);
1205
1206 if (! target)
1207 goto end_seq_and_fail;
1208
1209 /* If we're handling a memory for above, emit the load now. */
1210 if (is_mem)
1211 {
1212 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1213
1214 /* Copy over flags as appropriate. */
1215 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1216 MEM_VOLATILE_P (tmp) = 1;
1217 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1218 MEM_IN_STRUCT_P (tmp) = 1;
1219 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1220 MEM_SCALAR_P (tmp) = 1;
1221 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1222 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1223 set_mem_align (tmp,
1224 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1225
1226 noce_emit_move_insn (if_info->x, tmp);
1227 }
1228 else if (target != x)
1229 noce_emit_move_insn (x, target);
1230
1231 tmp = get_insns ();
1232 end_sequence ();
1233 emit_insn_before_scope (tmp, if_info->jump, INSN_SCOPE (if_info->insn_a));
1234 return TRUE;
1235
1236 end_seq_and_fail:
1237 end_sequence ();
1238 return FALSE;
1239 }
1240
1241 /* For most cases, the simplified condition we found is the best
1242 choice, but this is not the case for the min/max/abs transforms.
1243 For these we wish to know that it is A or B in the condition. */
1244
1245 static rtx
1246 noce_get_alt_condition (if_info, target, earliest)
1247 struct noce_if_info *if_info;
1248 rtx target;
1249 rtx *earliest;
1250 {
1251 rtx cond, set, insn;
1252 int reverse;
1253
1254 /* If target is already mentioned in the known condition, return it. */
1255 if (reg_mentioned_p (target, if_info->cond))
1256 {
1257 *earliest = if_info->cond_earliest;
1258 return if_info->cond;
1259 }
1260
1261 set = pc_set (if_info->jump);
1262 cond = XEXP (SET_SRC (set), 0);
1263 reverse
1264 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1265 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1266
1267 /* If we're looking for a constant, try to make the conditional
1268 have that constant in it. There are two reasons why it may
1269 not have the constant we want:
1270
1271 1. GCC may have needed to put the constant in a register, because
1272 the target can't compare directly against that constant. For
1273 this case, we look for a SET immediately before the comparison
1274 that puts a constant in that register.
1275
1276 2. GCC may have canonicalized the conditional, for example
1277 replacing "if x < 4" with "if x <= 3". We can undo that (or
1278 make equivalent types of changes) to get the constants we need
1279 if they're off by one in the right direction. */
1280
1281 if (GET_CODE (target) == CONST_INT)
1282 {
1283 enum rtx_code code = GET_CODE (if_info->cond);
1284 rtx op_a = XEXP (if_info->cond, 0);
1285 rtx op_b = XEXP (if_info->cond, 1);
1286 rtx prev_insn;
1287
1288 /* First, look to see if we put a constant in a register. */
1289 prev_insn = PREV_INSN (if_info->cond_earliest);
1290 if (prev_insn
1291 && INSN_P (prev_insn)
1292 && GET_CODE (PATTERN (prev_insn)) == SET)
1293 {
1294 rtx src = find_reg_equal_equiv_note (prev_insn);
1295 if (!src)
1296 src = SET_SRC (PATTERN (prev_insn));
1297 if (GET_CODE (src) == CONST_INT)
1298 {
1299 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1300 op_a = src;
1301 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1302 op_b = src;
1303
1304 if (GET_CODE (op_a) == CONST_INT)
1305 {
1306 rtx tmp = op_a;
1307 op_a = op_b;
1308 op_b = tmp;
1309 code = swap_condition (code);
1310 }
1311 }
1312 }
1313
1314 /* Now, look to see if we can get the right constant by
1315 adjusting the conditional. */
1316 if (GET_CODE (op_b) == CONST_INT)
1317 {
1318 HOST_WIDE_INT desired_val = INTVAL (target);
1319 HOST_WIDE_INT actual_val = INTVAL (op_b);
1320
1321 switch (code)
1322 {
1323 case LT:
1324 if (actual_val == desired_val + 1)
1325 {
1326 code = LE;
1327 op_b = GEN_INT (desired_val);
1328 }
1329 break;
1330 case LE:
1331 if (actual_val == desired_val - 1)
1332 {
1333 code = LT;
1334 op_b = GEN_INT (desired_val);
1335 }
1336 break;
1337 case GT:
1338 if (actual_val == desired_val - 1)
1339 {
1340 code = GE;
1341 op_b = GEN_INT (desired_val);
1342 }
1343 break;
1344 case GE:
1345 if (actual_val == desired_val + 1)
1346 {
1347 code = GT;
1348 op_b = GEN_INT (desired_val);
1349 }
1350 break;
1351 default:
1352 break;
1353 }
1354 }
1355
1356 /* If we made any changes, generate a new conditional that is
1357 equivalent to what we started with, but has the right
1358 constants in it. */
1359 if (code != GET_CODE (if_info->cond)
1360 || op_a != XEXP (if_info->cond, 0)
1361 || op_b != XEXP (if_info->cond, 1))
1362 {
1363 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1364 *earliest = if_info->cond_earliest;
1365 return cond;
1366 }
1367 }
1368
1369 cond = canonicalize_condition (if_info->jump, cond, reverse,
1370 earliest, target);
1371 if (! cond || ! reg_mentioned_p (target, cond))
1372 return NULL;
1373
1374 /* We almost certainly searched back to a different place.
1375 Need to re-verify correct lifetimes. */
1376
1377 /* X may not be mentioned in the range (cond_earliest, jump]. */
1378 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1379 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1380 return NULL;
1381
1382 /* A and B may not be modified in the range [cond_earliest, jump). */
1383 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1384 if (INSN_P (insn)
1385 && (modified_in_p (if_info->a, insn)
1386 || modified_in_p (if_info->b, insn)))
1387 return NULL;
1388
1389 return cond;
1390 }
1391
1392 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1393
1394 static int
1395 noce_try_minmax (if_info)
1396 struct noce_if_info *if_info;
1397 {
1398 rtx cond, earliest, target, seq;
1399 enum rtx_code code, op;
1400 int unsignedp;
1401
1402 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1403 if (no_new_pseudos)
1404 return FALSE;
1405
1406 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1407 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1408 to get the target to tell us... */
1409 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1410 || HONOR_NANS (GET_MODE (if_info->x)))
1411 return FALSE;
1412
1413 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1414 if (!cond)
1415 return FALSE;
1416
1417 /* Verify the condition is of the form we expect, and canonicalize
1418 the comparison code. */
1419 code = GET_CODE (cond);
1420 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1421 {
1422 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1423 return FALSE;
1424 }
1425 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1426 {
1427 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1428 return FALSE;
1429 code = swap_condition (code);
1430 }
1431 else
1432 return FALSE;
1433
1434 /* Determine what sort of operation this is. Note that the code is for
1435 a taken branch, so the code->operation mapping appears backwards. */
1436 switch (code)
1437 {
1438 case LT:
1439 case LE:
1440 case UNLT:
1441 case UNLE:
1442 op = SMAX;
1443 unsignedp = 0;
1444 break;
1445 case GT:
1446 case GE:
1447 case UNGT:
1448 case UNGE:
1449 op = SMIN;
1450 unsignedp = 0;
1451 break;
1452 case LTU:
1453 case LEU:
1454 op = UMAX;
1455 unsignedp = 1;
1456 break;
1457 case GTU:
1458 case GEU:
1459 op = UMIN;
1460 unsignedp = 1;
1461 break;
1462 default:
1463 return FALSE;
1464 }
1465
1466 start_sequence ();
1467
1468 target = expand_simple_binop (GET_MODE (if_info->x), op,
1469 if_info->a, if_info->b,
1470 if_info->x, unsignedp, OPTAB_WIDEN);
1471 if (! target)
1472 {
1473 end_sequence ();
1474 return FALSE;
1475 }
1476 if (target != if_info->x)
1477 noce_emit_move_insn (if_info->x, target);
1478
1479 seq = get_insns ();
1480 end_sequence ();
1481
1482 if (seq_contains_jump (seq))
1483 return FALSE;
1484
1485 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1486 if_info->cond = cond;
1487 if_info->cond_earliest = earliest;
1488
1489 return TRUE;
1490 }
1491
1492 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1493
1494 static int
1495 noce_try_abs (if_info)
1496 struct noce_if_info *if_info;
1497 {
1498 rtx cond, earliest, target, seq, a, b, c;
1499 int negate;
1500
1501 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1502 if (no_new_pseudos)
1503 return FALSE;
1504
1505 /* Recognize A and B as constituting an ABS or NABS. */
1506 a = if_info->a;
1507 b = if_info->b;
1508 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1509 negate = 0;
1510 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1511 {
1512 c = a; a = b; b = c;
1513 negate = 1;
1514 }
1515 else
1516 return FALSE;
1517
1518 cond = noce_get_alt_condition (if_info, b, &earliest);
1519 if (!cond)
1520 return FALSE;
1521
1522 /* Verify the condition is of the form we expect. */
1523 if (rtx_equal_p (XEXP (cond, 0), b))
1524 c = XEXP (cond, 1);
1525 else if (rtx_equal_p (XEXP (cond, 1), b))
1526 c = XEXP (cond, 0);
1527 else
1528 return FALSE;
1529
1530 /* Verify that C is zero. Search backward through the block for
1531 a REG_EQUAL note if necessary. */
1532 if (REG_P (c))
1533 {
1534 rtx insn, note = NULL;
1535 for (insn = earliest;
1536 insn != if_info->test_bb->head;
1537 insn = PREV_INSN (insn))
1538 if (INSN_P (insn)
1539 && ((note = find_reg_note (insn, REG_EQUAL, c))
1540 || (note = find_reg_note (insn, REG_EQUIV, c))))
1541 break;
1542 if (! note)
1543 return FALSE;
1544 c = XEXP (note, 0);
1545 }
1546 if (GET_CODE (c) == MEM
1547 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1548 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1549 c = get_pool_constant (XEXP (c, 0));
1550
1551 /* Work around funny ideas get_condition has wrt canonicalization.
1552 Note that these rtx constants are known to be CONST_INT, and
1553 therefore imply integer comparisons. */
1554 if (c == constm1_rtx && GET_CODE (cond) == GT)
1555 ;
1556 else if (c == const1_rtx && GET_CODE (cond) == LT)
1557 ;
1558 else if (c != CONST0_RTX (GET_MODE (b)))
1559 return FALSE;
1560
1561 /* Determine what sort of operation this is. */
1562 switch (GET_CODE (cond))
1563 {
1564 case LT:
1565 case LE:
1566 case UNLT:
1567 case UNLE:
1568 negate = !negate;
1569 break;
1570 case GT:
1571 case GE:
1572 case UNGT:
1573 case UNGE:
1574 break;
1575 default:
1576 return FALSE;
1577 }
1578
1579 start_sequence ();
1580
1581 target = expand_simple_unop (GET_MODE (if_info->x), ABS, b, if_info->x, 0);
1582
1583 /* ??? It's a quandry whether cmove would be better here, especially
1584 for integers. Perhaps combine will clean things up. */
1585 if (target && negate)
1586 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1587
1588 if (! target)
1589 {
1590 end_sequence ();
1591 return FALSE;
1592 }
1593
1594 if (target != if_info->x)
1595 noce_emit_move_insn (if_info->x, target);
1596
1597 seq = get_insns ();
1598 end_sequence ();
1599
1600 if (seq_contains_jump (seq))
1601 return FALSE;
1602
1603 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1604 if_info->cond = cond;
1605 if_info->cond_earliest = earliest;
1606
1607 return TRUE;
1608 }
1609
1610 /* Similar to get_condition, only the resulting condition must be
1611 valid at JUMP, instead of at EARLIEST. */
1612
1613 static rtx
1614 noce_get_condition (jump, earliest)
1615 rtx jump;
1616 rtx *earliest;
1617 {
1618 rtx cond, set, tmp, insn;
1619 bool reverse;
1620
1621 if (! any_condjump_p (jump))
1622 return NULL_RTX;
1623
1624 set = pc_set (jump);
1625
1626 /* If this branches to JUMP_LABEL when the condition is false,
1627 reverse the condition. */
1628 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1629 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1630
1631 /* If the condition variable is a register and is MODE_INT, accept it. */
1632
1633 cond = XEXP (SET_SRC (set), 0);
1634 tmp = XEXP (cond, 0);
1635 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1636 {
1637 *earliest = jump;
1638
1639 if (reverse)
1640 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1641 GET_MODE (cond), tmp, XEXP (cond, 1));
1642 return cond;
1643 }
1644
1645 /* Otherwise, fall back on canonicalize_condition to do the dirty
1646 work of manipulating MODE_CC values and COMPARE rtx codes. */
1647
1648 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX);
1649 if (!tmp)
1650 return NULL_RTX;
1651
1652 /* We are going to insert code before JUMP, not before EARLIEST.
1653 We must therefore be certain that the given condition is valid
1654 at JUMP by virtue of not having been modified since. */
1655 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1656 if (INSN_P (insn) && modified_in_p (tmp, insn))
1657 break;
1658 if (insn == jump)
1659 return tmp;
1660
1661 /* The condition was modified. See if we can get a partial result
1662 that doesn't follow all the reversals. Perhaps combine can fold
1663 them together later. */
1664 tmp = XEXP (tmp, 0);
1665 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1666 return NULL_RTX;
1667 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp);
1668 if (!tmp)
1669 return NULL_RTX;
1670
1671 /* For sanity's sake, re-validate the new result. */
1672 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1673 if (INSN_P (insn) && modified_in_p (tmp, insn))
1674 return NULL_RTX;
1675
1676 return tmp;
1677 }
1678
1679 /* Return true if OP is ok for if-then-else processing. */
1680
1681 static int
1682 noce_operand_ok (op)
1683 rtx op;
1684 {
1685 /* We special-case memories, so handle any of them with
1686 no address side effects. */
1687 if (GET_CODE (op) == MEM)
1688 return ! side_effects_p (XEXP (op, 0));
1689
1690 if (side_effects_p (op))
1691 return FALSE;
1692
1693 return ! may_trap_p (op);
1694 }
1695
1696 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1697 without using conditional execution. Return TRUE if we were
1698 successful at converting the the block. */
1699
1700 static int
1701 noce_process_if_block (ce_info)
1702 struct ce_if_block * ce_info;
1703 {
1704 basic_block test_bb = ce_info->test_bb; /* test block */
1705 basic_block then_bb = ce_info->then_bb; /* THEN */
1706 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1707 struct noce_if_info if_info;
1708 rtx insn_a, insn_b;
1709 rtx set_a, set_b;
1710 rtx orig_x, x, a, b;
1711 rtx jump, cond, insn;
1712
1713 /* We're looking for patterns of the form
1714
1715 (1) if (...) x = a; else x = b;
1716 (2) x = b; if (...) x = a;
1717 (3) if (...) x = a; // as if with an initial x = x.
1718
1719 The later patterns require jumps to be more expensive.
1720
1721 ??? For future expansion, look for multiple X in such patterns. */
1722
1723 /* If test is comprised of && or || elements, don't handle it unless it is
1724 the special case of && elements without an ELSE block. */
1725 if (ce_info->num_multiple_test_blocks)
1726 {
1727 if (else_bb || ! ce_info->and_and_p)
1728 return FALSE;
1729
1730 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1731 ce_info->num_multiple_test_blocks = 0;
1732 ce_info->num_and_and_blocks = 0;
1733 ce_info->num_or_or_blocks = 0;
1734 }
1735
1736 /* If this is not a standard conditional jump, we can't parse it. */
1737 jump = test_bb->end;
1738 cond = noce_get_condition (jump, &if_info.cond_earliest);
1739 if (! cond)
1740 return FALSE;
1741
1742 /* If the conditional jump is more than just a conditional
1743 jump, then we can not do if-conversion on this block. */
1744 if (! onlyjump_p (jump))
1745 return FALSE;
1746
1747 /* We must be comparing objects whose modes imply the size. */
1748 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1749 return FALSE;
1750
1751 /* Look for one of the potential sets. */
1752 insn_a = first_active_insn (then_bb);
1753 if (! insn_a
1754 || insn_a != last_active_insn (then_bb, FALSE)
1755 || (set_a = single_set (insn_a)) == NULL_RTX)
1756 return FALSE;
1757
1758 x = SET_DEST (set_a);
1759 a = SET_SRC (set_a);
1760
1761 /* Look for the other potential set. Make sure we've got equivalent
1762 destinations. */
1763 /* ??? This is overconservative. Storing to two different mems is
1764 as easy as conditionally computing the address. Storing to a
1765 single mem merely requires a scratch memory to use as one of the
1766 destination addresses; often the memory immediately below the
1767 stack pointer is available for this. */
1768 set_b = NULL_RTX;
1769 if (else_bb)
1770 {
1771 insn_b = first_active_insn (else_bb);
1772 if (! insn_b
1773 || insn_b != last_active_insn (else_bb, FALSE)
1774 || (set_b = single_set (insn_b)) == NULL_RTX
1775 || ! rtx_equal_p (x, SET_DEST (set_b)))
1776 return FALSE;
1777 }
1778 else
1779 {
1780 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1781 if (! insn_b
1782 || GET_CODE (insn_b) != INSN
1783 || (set_b = single_set (insn_b)) == NULL_RTX
1784 || ! rtx_equal_p (x, SET_DEST (set_b))
1785 || reg_mentioned_p (x, cond)
1786 || reg_mentioned_p (x, a)
1787 || reg_mentioned_p (x, SET_SRC (set_b)))
1788 insn_b = set_b = NULL_RTX;
1789 }
1790 b = (set_b ? SET_SRC (set_b) : x);
1791
1792 /* X may not be mentioned in the range (cond_earliest, jump]. */
1793 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1794 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1795 return FALSE;
1796
1797 /* A and B may not be modified in the range [cond_earliest, jump). */
1798 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1799 if (INSN_P (insn)
1800 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1801 return FALSE;
1802
1803 /* Only operate on register destinations, and even then avoid extending
1804 the lifetime of hard registers on small register class machines. */
1805 orig_x = x;
1806 if (GET_CODE (x) != REG
1807 || (SMALL_REGISTER_CLASSES
1808 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1809 {
1810 if (no_new_pseudos)
1811 return FALSE;
1812 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1813 ? XEXP (x, 0) : x));
1814 }
1815
1816 /* Don't operate on sources that may trap or are volatile. */
1817 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1818 return FALSE;
1819
1820 /* Set up the info block for our subroutines. */
1821 if_info.test_bb = test_bb;
1822 if_info.cond = cond;
1823 if_info.jump = jump;
1824 if_info.insn_a = insn_a;
1825 if_info.insn_b = insn_b;
1826 if_info.x = x;
1827 if_info.a = a;
1828 if_info.b = b;
1829
1830 /* Try optimizations in some approximation of a useful order. */
1831 /* ??? Should first look to see if X is live incoming at all. If it
1832 isn't, we don't need anything but an unconditional set. */
1833
1834 /* Look and see if A and B are really the same. Avoid creating silly
1835 cmove constructs that no one will fix up later. */
1836 if (rtx_equal_p (a, b))
1837 {
1838 /* If we have an INSN_B, we don't have to create any new rtl. Just
1839 move the instruction that we already have. If we don't have an
1840 INSN_B, that means that A == X, and we've got a noop move. In
1841 that case don't do anything and let the code below delete INSN_A. */
1842 if (insn_b && else_bb)
1843 {
1844 rtx note;
1845
1846 if (else_bb && insn_b == else_bb->end)
1847 else_bb->end = PREV_INSN (insn_b);
1848 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1849
1850 /* If there was a REG_EQUAL note, delete it since it may have been
1851 true due to this insn being after a jump. */
1852 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1853 remove_note (insn_b, note);
1854
1855 insn_b = NULL_RTX;
1856 }
1857 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1858 x must be executed twice. */
1859 else if (insn_b && side_effects_p (orig_x))
1860 return FALSE;
1861
1862 x = orig_x;
1863 goto success;
1864 }
1865
1866 if (noce_try_store_flag (&if_info))
1867 goto success;
1868 if (noce_try_minmax (&if_info))
1869 goto success;
1870 if (noce_try_abs (&if_info))
1871 goto success;
1872 if (HAVE_conditional_move
1873 && noce_try_cmove (&if_info))
1874 goto success;
1875 if (! HAVE_conditional_execution)
1876 {
1877 if (noce_try_store_flag_constants (&if_info))
1878 goto success;
1879 if (noce_try_store_flag_inc (&if_info))
1880 goto success;
1881 if (noce_try_store_flag_mask (&if_info))
1882 goto success;
1883 if (HAVE_conditional_move
1884 && noce_try_cmove_arith (&if_info))
1885 goto success;
1886 }
1887
1888 return FALSE;
1889
1890 success:
1891 /* The original sets may now be killed. */
1892 delete_insn (insn_a);
1893
1894 /* Several special cases here: First, we may have reused insn_b above,
1895 in which case insn_b is now NULL. Second, we want to delete insn_b
1896 if it came from the ELSE block, because follows the now correct
1897 write that appears in the TEST block. However, if we got insn_b from
1898 the TEST block, it may in fact be loading data needed for the comparison.
1899 We'll let life_analysis remove the insn if it's really dead. */
1900 if (insn_b && else_bb)
1901 delete_insn (insn_b);
1902
1903 /* The new insns will have been inserted before cond_earliest. We should
1904 be able to remove the jump with impunity, but the condition itself may
1905 have been modified by gcse to be shared across basic blocks. */
1906 delete_insn (jump);
1907
1908 /* If we used a temporary, fix it up now. */
1909 if (orig_x != x)
1910 {
1911 start_sequence ();
1912 noce_emit_move_insn (copy_rtx (orig_x), x);
1913 insn_b = get_insns ();
1914 end_sequence ();
1915
1916 emit_insn_after_scope (insn_b, test_bb->end, INSN_SCOPE (insn_a));
1917 }
1918
1919 /* Merge the blocks! */
1920 merge_if_block (ce_info);
1921
1922 return TRUE;
1923 }
1924 \f
1925 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1926 straight line code. Return true if successful. */
1927
1928 static int
1929 process_if_block (ce_info)
1930 struct ce_if_block * ce_info;
1931 {
1932 if (! reload_completed
1933 && noce_process_if_block (ce_info))
1934 return TRUE;
1935
1936 if (HAVE_conditional_execution && reload_completed)
1937 {
1938 /* If we have && and || tests, try to first handle combining the && and
1939 || tests into the conditional code, and if that fails, go back and
1940 handle it without the && and ||, which at present handles the && case
1941 if there was no ELSE block. */
1942 if (cond_exec_process_if_block (ce_info, TRUE))
1943 return TRUE;
1944
1945 if (ce_info->num_multiple_test_blocks)
1946 {
1947 cancel_changes (0);
1948
1949 if (cond_exec_process_if_block (ce_info, FALSE))
1950 return TRUE;
1951 }
1952 }
1953
1954 return FALSE;
1955 }
1956
1957 /* Merge the blocks and mark for local life update. */
1958
1959 static void
1960 merge_if_block (ce_info)
1961 struct ce_if_block * ce_info;
1962 {
1963 basic_block test_bb = ce_info->test_bb; /* last test block */
1964 basic_block then_bb = ce_info->then_bb; /* THEN */
1965 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1966 basic_block join_bb = ce_info->join_bb; /* join block */
1967 basic_block combo_bb;
1968
1969 /* All block merging is done into the lower block numbers. */
1970
1971 combo_bb = test_bb;
1972
1973 /* Merge any basic blocks to handle && and || subtests. Each of
1974 the blocks are on the fallthru path from the predecessor block. */
1975 if (ce_info->num_multiple_test_blocks > 0)
1976 {
1977 basic_block bb = test_bb;
1978 basic_block last_test_bb = ce_info->last_test_bb;
1979 basic_block fallthru = block_fallthru (bb);
1980
1981 do
1982 {
1983 bb = fallthru;
1984 fallthru = block_fallthru (bb);
1985 if (post_dominators)
1986 delete_from_dominance_info (post_dominators, bb);
1987 merge_blocks_nomove (combo_bb, bb);
1988 num_removed_blocks++;
1989 }
1990 while (bb != last_test_bb);
1991 }
1992
1993 /* Merge TEST block into THEN block. Normally the THEN block won't have a
1994 label, but it might if there were || tests. That label's count should be
1995 zero, and it normally should be removed. */
1996
1997 if (then_bb)
1998 {
1999 if (combo_bb->global_live_at_end)
2000 COPY_REG_SET (combo_bb->global_live_at_end,
2001 then_bb->global_live_at_end);
2002 if (post_dominators)
2003 delete_from_dominance_info (post_dominators, then_bb);
2004 merge_blocks_nomove (combo_bb, then_bb);
2005 num_removed_blocks++;
2006 }
2007
2008 /* The ELSE block, if it existed, had a label. That label count
2009 will almost always be zero, but odd things can happen when labels
2010 get their addresses taken. */
2011 if (else_bb)
2012 {
2013 if (post_dominators)
2014 delete_from_dominance_info (post_dominators, else_bb);
2015 merge_blocks_nomove (combo_bb, else_bb);
2016 num_removed_blocks++;
2017 }
2018
2019 /* If there was no join block reported, that means it was not adjacent
2020 to the others, and so we cannot merge them. */
2021
2022 if (! join_bb)
2023 {
2024 rtx last = combo_bb->end;
2025
2026 /* The outgoing edge for the current COMBO block should already
2027 be correct. Verify this. */
2028 if (combo_bb->succ == NULL_EDGE)
2029 {
2030 if (find_reg_note (last, REG_NORETURN, NULL))
2031 ;
2032 else if (GET_CODE (last) == INSN
2033 && GET_CODE (PATTERN (last)) == TRAP_IF
2034 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2035 ;
2036 else
2037 abort ();
2038 }
2039
2040 /* There should still be something at the end of the THEN or ELSE
2041 blocks taking us to our final destination. */
2042 else if (GET_CODE (last) == JUMP_INSN)
2043 ;
2044 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2045 && GET_CODE (last) == CALL_INSN
2046 && SIBLING_CALL_P (last))
2047 ;
2048 else if ((combo_bb->succ->flags & EDGE_EH)
2049 && can_throw_internal (last))
2050 ;
2051 else
2052 abort ();
2053 }
2054
2055 /* The JOIN block may have had quite a number of other predecessors too.
2056 Since we've already merged the TEST, THEN and ELSE blocks, we should
2057 have only one remaining edge from our if-then-else diamond. If there
2058 is more than one remaining edge, it must come from elsewhere. There
2059 may be zero incoming edges if the THEN block didn't actually join
2060 back up (as with a call to abort). */
2061 else if ((join_bb->pred == NULL
2062 || join_bb->pred->pred_next == NULL)
2063 && join_bb != EXIT_BLOCK_PTR)
2064 {
2065 /* We can merge the JOIN. */
2066 if (combo_bb->global_live_at_end)
2067 COPY_REG_SET (combo_bb->global_live_at_end,
2068 join_bb->global_live_at_end);
2069
2070 if (post_dominators)
2071 delete_from_dominance_info (post_dominators, join_bb);
2072 merge_blocks_nomove (combo_bb, join_bb);
2073 num_removed_blocks++;
2074 }
2075 else
2076 {
2077 /* We cannot merge the JOIN. */
2078
2079 /* The outgoing edge for the current COMBO block should already
2080 be correct. Verify this. */
2081 if (combo_bb->succ->succ_next != NULL_EDGE
2082 || combo_bb->succ->dest != join_bb)
2083 abort ();
2084
2085 /* Remove the jump and cruft from the end of the COMBO block. */
2086 if (join_bb != EXIT_BLOCK_PTR)
2087 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2088 }
2089
2090 num_updated_if_blocks++;
2091 }
2092 \f
2093 /* Find a block ending in a simple IF condition and try to transform it
2094 in some way. When converting a multi-block condition, put the new code
2095 in the first such block and delete the rest. Return a pointer to this
2096 first block if some transformation was done. Return NULL otherwise. */
2097
2098 static basic_block
2099 find_if_header (test_bb, pass)
2100 basic_block test_bb;
2101 int pass;
2102 {
2103 ce_if_block_t ce_info;
2104 edge then_edge;
2105 edge else_edge;
2106
2107 /* The kind of block we're looking for has exactly two successors. */
2108 if ((then_edge = test_bb->succ) == NULL_EDGE
2109 || (else_edge = then_edge->succ_next) == NULL_EDGE
2110 || else_edge->succ_next != NULL_EDGE)
2111 return NULL;
2112
2113 /* Neither edge should be abnormal. */
2114 if ((then_edge->flags & EDGE_COMPLEX)
2115 || (else_edge->flags & EDGE_COMPLEX))
2116 return NULL;
2117
2118 /* The THEN edge is canonically the one that falls through. */
2119 if (then_edge->flags & EDGE_FALLTHRU)
2120 ;
2121 else if (else_edge->flags & EDGE_FALLTHRU)
2122 {
2123 edge e = else_edge;
2124 else_edge = then_edge;
2125 then_edge = e;
2126 }
2127 else
2128 /* Otherwise this must be a multiway branch of some sort. */
2129 return NULL;
2130
2131 memset ((PTR) &ce_info, '\0', sizeof (ce_info));
2132 ce_info.test_bb = test_bb;
2133 ce_info.then_bb = then_edge->dest;
2134 ce_info.else_bb = else_edge->dest;
2135 ce_info.pass = pass;
2136
2137 #ifdef IFCVT_INIT_EXTRA_FIELDS
2138 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2139 #endif
2140
2141 if (find_if_block (&ce_info))
2142 goto success;
2143
2144 if (HAVE_trap && HAVE_conditional_trap
2145 && find_cond_trap (test_bb, then_edge, else_edge))
2146 goto success;
2147
2148 if (post_dominators
2149 && (! HAVE_conditional_execution || reload_completed))
2150 {
2151 if (find_if_case_1 (test_bb, then_edge, else_edge))
2152 goto success;
2153 if (find_if_case_2 (test_bb, then_edge, else_edge))
2154 goto success;
2155 }
2156
2157 return NULL;
2158
2159 success:
2160 if (rtl_dump_file)
2161 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2162 return ce_info.test_bb;
2163 }
2164
2165 /* Return true if a block has two edges, one of which falls through to the next
2166 block, and the other jumps to a specific block, so that we can tell if the
2167 block is part of an && test or an || test. Returns either -1 or the number
2168 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2169
2170 static int
2171 block_jumps_and_fallthru_p (cur_bb, target_bb)
2172 basic_block cur_bb;
2173 basic_block target_bb;
2174 {
2175 edge cur_edge;
2176 int fallthru_p = FALSE;
2177 int jump_p = FALSE;
2178 rtx insn;
2179 rtx end;
2180 int n_insns = 0;
2181
2182 if (!cur_bb || !target_bb)
2183 return -1;
2184
2185 /* If no edges, obviously it doesn't jump or fallthru. */
2186 if (cur_bb->succ == NULL_EDGE)
2187 return FALSE;
2188
2189 for (cur_edge = cur_bb->succ;
2190 cur_edge != NULL_EDGE;
2191 cur_edge = cur_edge->succ_next)
2192 {
2193 if (cur_edge->flags & EDGE_COMPLEX)
2194 /* Anything complex isn't what we want. */
2195 return -1;
2196
2197 else if (cur_edge->flags & EDGE_FALLTHRU)
2198 fallthru_p = TRUE;
2199
2200 else if (cur_edge->dest == target_bb)
2201 jump_p = TRUE;
2202
2203 else
2204 return -1;
2205 }
2206
2207 if ((jump_p & fallthru_p) == 0)
2208 return -1;
2209
2210 /* Don't allow calls in the block, since this is used to group && and ||
2211 together for conditional execution support. ??? we should support
2212 conditional execution support across calls for IA-64 some day, but
2213 for now it makes the code simpler. */
2214 end = cur_bb->end;
2215 insn = cur_bb->head;
2216
2217 while (insn != NULL_RTX)
2218 {
2219 if (GET_CODE (insn) == CALL_INSN)
2220 return -1;
2221
2222 if (INSN_P (insn)
2223 && GET_CODE (insn) != JUMP_INSN
2224 && GET_CODE (PATTERN (insn)) != USE
2225 && GET_CODE (PATTERN (insn)) != CLOBBER)
2226 n_insns++;
2227
2228 if (insn == end)
2229 break;
2230
2231 insn = NEXT_INSN (insn);
2232 }
2233
2234 return n_insns;
2235 }
2236
2237 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2238 block. If so, we'll try to convert the insns to not require the branch.
2239 Return TRUE if we were successful at converting the the block. */
2240
2241 static int
2242 find_if_block (ce_info)
2243 struct ce_if_block * ce_info;
2244 {
2245 basic_block test_bb = ce_info->test_bb;
2246 basic_block then_bb = ce_info->then_bb;
2247 basic_block else_bb = ce_info->else_bb;
2248 basic_block join_bb = NULL_BLOCK;
2249 edge then_succ = then_bb->succ;
2250 edge else_succ = else_bb->succ;
2251 int then_predecessors;
2252 int else_predecessors;
2253 edge cur_edge;
2254 basic_block next;
2255
2256 ce_info->last_test_bb = test_bb;
2257
2258 /* Discover if any fall through predecessors of the current test basic block
2259 were && tests (which jump to the else block) or || tests (which jump to
2260 the then block). */
2261 if (HAVE_conditional_execution && reload_completed
2262 && test_bb->pred != NULL_EDGE
2263 && test_bb->pred->pred_next == NULL_EDGE
2264 && test_bb->pred->flags == EDGE_FALLTHRU)
2265 {
2266 basic_block bb = test_bb->pred->src;
2267 basic_block target_bb;
2268 int max_insns = MAX_CONDITIONAL_EXECUTE;
2269 int n_insns;
2270
2271 /* Determine if the preceeding block is an && or || block. */
2272 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2273 {
2274 ce_info->and_and_p = TRUE;
2275 target_bb = else_bb;
2276 }
2277 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2278 {
2279 ce_info->and_and_p = FALSE;
2280 target_bb = then_bb;
2281 }
2282 else
2283 target_bb = NULL_BLOCK;
2284
2285 if (target_bb && n_insns <= max_insns)
2286 {
2287 int total_insns = 0;
2288 int blocks = 0;
2289
2290 ce_info->last_test_bb = test_bb;
2291
2292 /* Found at least one && or || block, look for more. */
2293 do
2294 {
2295 ce_info->test_bb = test_bb = bb;
2296 total_insns += n_insns;
2297 blocks++;
2298
2299 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2300 break;
2301
2302 bb = bb->pred->src;
2303 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2304 }
2305 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2306
2307 ce_info->num_multiple_test_blocks = blocks;
2308 ce_info->num_multiple_test_insns = total_insns;
2309
2310 if (ce_info->and_and_p)
2311 ce_info->num_and_and_blocks = blocks;
2312 else
2313 ce_info->num_or_or_blocks = blocks;
2314 }
2315 }
2316
2317 /* Count the number of edges the THEN and ELSE blocks have. */
2318 then_predecessors = 0;
2319 for (cur_edge = then_bb->pred;
2320 cur_edge != NULL_EDGE;
2321 cur_edge = cur_edge->pred_next)
2322 {
2323 then_predecessors++;
2324 if (cur_edge->flags & EDGE_COMPLEX)
2325 return FALSE;
2326 }
2327
2328 else_predecessors = 0;
2329 for (cur_edge = else_bb->pred;
2330 cur_edge != NULL_EDGE;
2331 cur_edge = cur_edge->pred_next)
2332 {
2333 else_predecessors++;
2334 if (cur_edge->flags & EDGE_COMPLEX)
2335 return FALSE;
2336 }
2337
2338 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2339 other than any || blocks which jump to the THEN block. */
2340 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2341 return FALSE;
2342
2343 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2344 if (then_succ != NULL_EDGE
2345 && (then_succ->succ_next != NULL_EDGE
2346 || (then_succ->flags & EDGE_COMPLEX)))
2347 return FALSE;
2348
2349 /* If the THEN block has no successors, conditional execution can still
2350 make a conditional call. Don't do this unless the ELSE block has
2351 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2352 Check for the last insn of the THEN block being an indirect jump, which
2353 is listed as not having any successors, but confuses the rest of the CE
2354 code processing. ??? we should fix this in the future. */
2355 if (then_succ == NULL)
2356 {
2357 if (else_bb->pred->pred_next == NULL_EDGE)
2358 {
2359 rtx last_insn = then_bb->end;
2360
2361 while (last_insn
2362 && GET_CODE (last_insn) == NOTE
2363 && last_insn != then_bb->head)
2364 last_insn = PREV_INSN (last_insn);
2365
2366 if (last_insn
2367 && GET_CODE (last_insn) == JUMP_INSN
2368 && ! simplejump_p (last_insn))
2369 return FALSE;
2370
2371 join_bb = else_bb;
2372 else_bb = NULL_BLOCK;
2373 }
2374 else
2375 return FALSE;
2376 }
2377
2378 /* If the THEN block's successor is the other edge out of the TEST block,
2379 then we have an IF-THEN combo without an ELSE. */
2380 else if (then_succ->dest == else_bb)
2381 {
2382 join_bb = else_bb;
2383 else_bb = NULL_BLOCK;
2384 }
2385
2386 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2387 has exactly one predecessor and one successor, and the outgoing edge
2388 is not complex, then we have an IF-THEN-ELSE combo. */
2389 else if (else_succ != NULL_EDGE
2390 && then_succ->dest == else_succ->dest
2391 && else_bb->pred->pred_next == NULL_EDGE
2392 && else_succ->succ_next == NULL_EDGE
2393 && ! (else_succ->flags & EDGE_COMPLEX))
2394 join_bb = else_succ->dest;
2395
2396 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2397 else
2398 return FALSE;
2399
2400 num_possible_if_blocks++;
2401
2402 if (rtl_dump_file)
2403 {
2404 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2405 (else_bb) ? "-ELSE" : "",
2406 ce_info->pass,
2407 test_bb->index, (test_bb->head) ? (int)INSN_UID (test_bb->head) : -1,
2408 then_bb->index, (then_bb->head) ? (int)INSN_UID (then_bb->head) : -1);
2409
2410 if (else_bb)
2411 fprintf (rtl_dump_file, ", else %d [%d]",
2412 else_bb->index, (else_bb->head) ? (int)INSN_UID (else_bb->head) : -1);
2413
2414 fprintf (rtl_dump_file, ", join %d [%d]",
2415 join_bb->index, (join_bb->head) ? (int)INSN_UID (join_bb->head) : -1);
2416
2417 if (ce_info->num_multiple_test_blocks > 0)
2418 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2419 ce_info->num_multiple_test_blocks,
2420 (ce_info->and_and_p) ? "&&" : "||",
2421 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2422 ce_info->last_test_bb->index,
2423 ((ce_info->last_test_bb->head)
2424 ? (int)INSN_UID (ce_info->last_test_bb->head)
2425 : -1));
2426
2427 fputc ('\n', rtl_dump_file);
2428 }
2429
2430 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2431 first condition for free, since we've already asserted that there's a
2432 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2433 we checked the FALLTHRU flag, those are already adjacent to the last IF
2434 block. */
2435 /* ??? As an enhancement, move the ELSE block. Have to deal with
2436 BLOCK notes, if by no other means than aborting the merge if they
2437 exist. Sticky enough I don't want to think about it now. */
2438 next = then_bb;
2439 if (else_bb && (next = next->next_bb) != else_bb)
2440 return FALSE;
2441 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2442 {
2443 if (else_bb)
2444 join_bb = NULL;
2445 else
2446 return FALSE;
2447 }
2448
2449 /* Do the real work. */
2450 ce_info->else_bb = else_bb;
2451 ce_info->join_bb = join_bb;
2452
2453 return process_if_block (ce_info);
2454 }
2455
2456 /* Convert a branch over a trap, or a branch
2457 to a trap, into a conditional trap. */
2458
2459 static int
2460 find_cond_trap (test_bb, then_edge, else_edge)
2461 basic_block test_bb;
2462 edge then_edge, else_edge;
2463 {
2464 basic_block then_bb = then_edge->dest;
2465 basic_block else_bb = else_edge->dest;
2466 basic_block other_bb, trap_bb;
2467 rtx trap, jump, cond, cond_earliest, seq;
2468 enum rtx_code code;
2469
2470 /* Locate the block with the trap instruction. */
2471 /* ??? While we look for no successors, we really ought to allow
2472 EH successors. Need to fix merge_if_block for that to work. */
2473 if ((trap = block_has_only_trap (then_bb)) != NULL)
2474 trap_bb = then_bb, other_bb = else_bb;
2475 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2476 trap_bb = else_bb, other_bb = then_bb;
2477 else
2478 return FALSE;
2479
2480 if (rtl_dump_file)
2481 {
2482 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2483 test_bb->index, trap_bb->index);
2484 }
2485
2486 /* If this is not a standard conditional jump, we can't parse it. */
2487 jump = test_bb->end;
2488 cond = noce_get_condition (jump, &cond_earliest);
2489 if (! cond)
2490 return FALSE;
2491
2492 /* If the conditional jump is more than just a conditional jump, then
2493 we can not do if-conversion on this block. */
2494 if (! onlyjump_p (jump))
2495 return FALSE;
2496
2497 /* We must be comparing objects whose modes imply the size. */
2498 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2499 return FALSE;
2500
2501 /* Reverse the comparison code, if necessary. */
2502 code = GET_CODE (cond);
2503 if (then_bb == trap_bb)
2504 {
2505 code = reversed_comparison_code (cond, jump);
2506 if (code == UNKNOWN)
2507 return FALSE;
2508 }
2509
2510 /* Attempt to generate the conditional trap. */
2511 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2512 TRAP_CODE (PATTERN (trap)));
2513 if (seq == NULL)
2514 return FALSE;
2515
2516 /* Emit the new insns before cond_earliest. */
2517 emit_insn_before_scope (seq, cond_earliest, INSN_SCOPE (trap));
2518
2519 /* Delete the trap block if possible. */
2520 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2521 if (trap_bb->pred == NULL)
2522 {
2523 if (post_dominators)
2524 delete_from_dominance_info (post_dominators, trap_bb);
2525 flow_delete_block (trap_bb);
2526 num_removed_blocks++;
2527 }
2528
2529 /* If the non-trap block and the test are now adjacent, merge them.
2530 Otherwise we must insert a direct branch. */
2531 if (test_bb->next_bb == other_bb)
2532 {
2533 struct ce_if_block new_ce_info;
2534 delete_insn (jump);
2535 memset ((PTR) &new_ce_info, '\0', sizeof (new_ce_info));
2536 new_ce_info.test_bb = test_bb;
2537 new_ce_info.then_bb = NULL;
2538 new_ce_info.else_bb = NULL;
2539 new_ce_info.join_bb = other_bb;
2540 merge_if_block (&new_ce_info);
2541 }
2542 else
2543 {
2544 rtx lab, newjump;
2545
2546 lab = JUMP_LABEL (jump);
2547 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2548 LABEL_NUSES (lab) += 1;
2549 JUMP_LABEL (newjump) = lab;
2550 emit_barrier_after (newjump);
2551
2552 delete_insn (jump);
2553 }
2554
2555 return TRUE;
2556 }
2557
2558 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2559 return it. */
2560
2561 static rtx
2562 block_has_only_trap (bb)
2563 basic_block bb;
2564 {
2565 rtx trap;
2566
2567 /* We're not the exit block. */
2568 if (bb == EXIT_BLOCK_PTR)
2569 return NULL_RTX;
2570
2571 /* The block must have no successors. */
2572 if (bb->succ)
2573 return NULL_RTX;
2574
2575 /* The only instruction in the THEN block must be the trap. */
2576 trap = first_active_insn (bb);
2577 if (! (trap == bb->end
2578 && GET_CODE (PATTERN (trap)) == TRAP_IF
2579 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2580 return NULL_RTX;
2581
2582 return trap;
2583 }
2584
2585 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2586 transformable, but not necessarily the other. There need be no
2587 JOIN block.
2588
2589 Return TRUE if we were successful at converting the the block.
2590
2591 Cases we'd like to look at:
2592
2593 (1)
2594 if (test) goto over; // x not live
2595 x = a;
2596 goto label;
2597 over:
2598
2599 becomes
2600
2601 x = a;
2602 if (! test) goto label;
2603
2604 (2)
2605 if (test) goto E; // x not live
2606 x = big();
2607 goto L;
2608 E:
2609 x = b;
2610 goto M;
2611
2612 becomes
2613
2614 x = b;
2615 if (test) goto M;
2616 x = big();
2617 goto L;
2618
2619 (3) // This one's really only interesting for targets that can do
2620 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2621 // it results in multiple branches on a cache line, which often
2622 // does not sit well with predictors.
2623
2624 if (test1) goto E; // predicted not taken
2625 x = a;
2626 if (test2) goto F;
2627 ...
2628 E:
2629 x = b;
2630 J:
2631
2632 becomes
2633
2634 x = a;
2635 if (test1) goto E;
2636 if (test2) goto F;
2637
2638 Notes:
2639
2640 (A) Don't do (2) if the branch is predicted against the block we're
2641 eliminating. Do it anyway if we can eliminate a branch; this requires
2642 that the sole successor of the eliminated block postdominate the other
2643 side of the if.
2644
2645 (B) With CE, on (3) we can steal from both sides of the if, creating
2646
2647 if (test1) x = a;
2648 if (!test1) x = b;
2649 if (test1) goto J;
2650 if (test2) goto F;
2651 ...
2652 J:
2653
2654 Again, this is most useful if J postdominates.
2655
2656 (C) CE substitutes for helpful life information.
2657
2658 (D) These heuristics need a lot of work. */
2659
2660 /* Tests for case 1 above. */
2661
2662 static int
2663 find_if_case_1 (test_bb, then_edge, else_edge)
2664 basic_block test_bb;
2665 edge then_edge, else_edge;
2666 {
2667 basic_block then_bb = then_edge->dest;
2668 basic_block else_bb = else_edge->dest, new_bb;
2669 edge then_succ = then_bb->succ;
2670 int then_bb_index;
2671
2672 /* THEN has one successor. */
2673 if (!then_succ || then_succ->succ_next != NULL)
2674 return FALSE;
2675
2676 /* THEN does not fall through, but is not strange either. */
2677 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2678 return FALSE;
2679
2680 /* THEN has one predecessor. */
2681 if (then_bb->pred->pred_next != NULL)
2682 return FALSE;
2683
2684 /* THEN must do something. */
2685 if (forwarder_block_p (then_bb))
2686 return FALSE;
2687
2688 num_possible_if_blocks++;
2689 if (rtl_dump_file)
2690 fprintf (rtl_dump_file,
2691 "\nIF-CASE-1 found, start %d, then %d\n",
2692 test_bb->index, then_bb->index);
2693
2694 /* THEN is small. */
2695 if (count_bb_insns (then_bb) > BRANCH_COST)
2696 return FALSE;
2697
2698 /* Registers set are dead, or are predicable. */
2699 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2700 then_bb->succ->dest, 1))
2701 return FALSE;
2702
2703 /* Conversion went ok, including moving the insns and fixing up the
2704 jump. Adjust the CFG to match. */
2705
2706 bitmap_operation (test_bb->global_live_at_end,
2707 else_bb->global_live_at_start,
2708 then_bb->global_live_at_end, BITMAP_IOR);
2709
2710 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2711 then_bb_index = then_bb->index;
2712 if (post_dominators)
2713 delete_from_dominance_info (post_dominators, then_bb);
2714 flow_delete_block (then_bb);
2715
2716 /* Make rest of code believe that the newly created block is the THEN_BB
2717 block we removed. */
2718 if (new_bb)
2719 {
2720 new_bb->index = then_bb_index;
2721 BASIC_BLOCK (then_bb_index) = new_bb;
2722 }
2723 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2724 later. */
2725
2726 num_removed_blocks++;
2727 num_updated_if_blocks++;
2728
2729 return TRUE;
2730 }
2731
2732 /* Test for case 2 above. */
2733
2734 static int
2735 find_if_case_2 (test_bb, then_edge, else_edge)
2736 basic_block test_bb;
2737 edge then_edge, else_edge;
2738 {
2739 basic_block then_bb = then_edge->dest;
2740 basic_block else_bb = else_edge->dest;
2741 edge else_succ = else_bb->succ;
2742 rtx note;
2743
2744 /* ELSE has one successor. */
2745 if (!else_succ || else_succ->succ_next != NULL)
2746 return FALSE;
2747
2748 /* ELSE outgoing edge is not complex. */
2749 if (else_succ->flags & EDGE_COMPLEX)
2750 return FALSE;
2751
2752 /* ELSE has one predecessor. */
2753 if (else_bb->pred->pred_next != NULL)
2754 return FALSE;
2755
2756 /* THEN is not EXIT. */
2757 if (then_bb->index < 0)
2758 return FALSE;
2759
2760 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2761 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2762 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2763 ;
2764 else if (else_succ->dest->index < 0
2765 || dominated_by_p (post_dominators, then_bb,
2766 else_succ->dest))
2767 ;
2768 else
2769 return FALSE;
2770
2771 num_possible_if_blocks++;
2772 if (rtl_dump_file)
2773 fprintf (rtl_dump_file,
2774 "\nIF-CASE-2 found, start %d, else %d\n",
2775 test_bb->index, else_bb->index);
2776
2777 /* ELSE is small. */
2778 if (count_bb_insns (then_bb) > BRANCH_COST)
2779 return FALSE;
2780
2781 /* Registers set are dead, or are predicable. */
2782 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2783 return FALSE;
2784
2785 /* Conversion went ok, including moving the insns and fixing up the
2786 jump. Adjust the CFG to match. */
2787
2788 bitmap_operation (test_bb->global_live_at_end,
2789 then_bb->global_live_at_start,
2790 else_bb->global_live_at_end, BITMAP_IOR);
2791
2792 if (post_dominators)
2793 delete_from_dominance_info (post_dominators, else_bb);
2794 flow_delete_block (else_bb);
2795
2796 num_removed_blocks++;
2797 num_updated_if_blocks++;
2798
2799 /* ??? We may now fallthru from one of THEN's successors into a join
2800 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2801
2802 return TRUE;
2803 }
2804
2805 /* A subroutine of dead_or_predicable called through for_each_rtx.
2806 Return 1 if a memory is found. */
2807
2808 static int
2809 find_memory (px, data)
2810 rtx *px;
2811 void *data ATTRIBUTE_UNUSED;
2812 {
2813 return GET_CODE (*px) == MEM;
2814 }
2815
2816 /* Used by the code above to perform the actual rtl transformations.
2817 Return TRUE if successful.
2818
2819 TEST_BB is the block containing the conditional branch. MERGE_BB
2820 is the block containing the code to manipulate. NEW_DEST is the
2821 label TEST_BB should be branching to after the conversion.
2822 REVERSEP is true if the sense of the branch should be reversed. */
2823
2824 static int
2825 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2826 basic_block test_bb, merge_bb, other_bb;
2827 basic_block new_dest;
2828 int reversep;
2829 {
2830 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2831
2832 jump = test_bb->end;
2833
2834 /* Find the extent of the real code in the merge block. */
2835 head = merge_bb->head;
2836 end = merge_bb->end;
2837
2838 if (GET_CODE (head) == CODE_LABEL)
2839 head = NEXT_INSN (head);
2840 if (GET_CODE (head) == NOTE)
2841 {
2842 if (head == end)
2843 {
2844 head = end = NULL_RTX;
2845 goto no_body;
2846 }
2847 head = NEXT_INSN (head);
2848 }
2849
2850 if (GET_CODE (end) == JUMP_INSN)
2851 {
2852 if (head == end)
2853 {
2854 head = end = NULL_RTX;
2855 goto no_body;
2856 }
2857 end = PREV_INSN (end);
2858 }
2859
2860 /* Disable handling dead code by conditional execution if the machine needs
2861 to do anything funny with the tests, etc. */
2862 #ifndef IFCVT_MODIFY_TESTS
2863 if (HAVE_conditional_execution)
2864 {
2865 /* In the conditional execution case, we have things easy. We know
2866 the condition is reversable. We don't have to check life info,
2867 becase we're going to conditionally execute the code anyway.
2868 All that's left is making sure the insns involved can actually
2869 be predicated. */
2870
2871 rtx cond, prob_val;
2872
2873 cond = cond_exec_get_condition (jump);
2874 if (! cond)
2875 return FALSE;
2876
2877 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2878 if (prob_val)
2879 prob_val = XEXP (prob_val, 0);
2880
2881 if (reversep)
2882 {
2883 enum rtx_code rev = reversed_comparison_code (cond, jump);
2884 if (rev == UNKNOWN)
2885 return FALSE;
2886 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2887 XEXP (cond, 1));
2888 if (prob_val)
2889 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2890 }
2891
2892 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
2893 prob_val, 0))
2894 goto cancel;
2895
2896 earliest = jump;
2897 }
2898 else
2899 #endif
2900 {
2901 /* In the non-conditional execution case, we have to verify that there
2902 are no trapping operations, no calls, no references to memory, and
2903 that any registers modified are dead at the branch site. */
2904
2905 rtx insn, cond, prev;
2906 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2907 regset merge_set, tmp, test_live, test_set;
2908 struct propagate_block_info *pbi;
2909 int i, fail = 0;
2910
2911 /* Check for no calls or trapping operations. */
2912 for (insn = head; ; insn = NEXT_INSN (insn))
2913 {
2914 if (GET_CODE (insn) == CALL_INSN)
2915 return FALSE;
2916 if (INSN_P (insn))
2917 {
2918 if (may_trap_p (PATTERN (insn)))
2919 return FALSE;
2920
2921 /* ??? Even non-trapping memories such as stack frame
2922 references must be avoided. For stores, we collect
2923 no lifetime info; for reads, we'd have to assert
2924 true_dependence false against every store in the
2925 TEST range. */
2926 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2927 return FALSE;
2928 }
2929 if (insn == end)
2930 break;
2931 }
2932
2933 if (! any_condjump_p (jump))
2934 return FALSE;
2935
2936 /* Find the extent of the conditional. */
2937 cond = noce_get_condition (jump, &earliest);
2938 if (! cond)
2939 return FALSE;
2940
2941 /* Collect:
2942 MERGE_SET = set of registers set in MERGE_BB
2943 TEST_LIVE = set of registers live at EARLIEST
2944 TEST_SET = set of registers set between EARLIEST and the
2945 end of the block. */
2946
2947 tmp = INITIALIZE_REG_SET (tmp_head);
2948 merge_set = INITIALIZE_REG_SET (merge_set_head);
2949 test_live = INITIALIZE_REG_SET (test_live_head);
2950 test_set = INITIALIZE_REG_SET (test_set_head);
2951
2952 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2953 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2954 since we've already asserted that MERGE_BB is small. */
2955 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2956
2957 /* For small register class machines, don't lengthen lifetimes of
2958 hard registers before reload. */
2959 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2960 {
2961 EXECUTE_IF_SET_IN_BITMAP
2962 (merge_set, 0, i,
2963 {
2964 if (i < FIRST_PSEUDO_REGISTER
2965 && ! fixed_regs[i]
2966 && ! global_regs[i])
2967 fail = 1;
2968 });
2969 }
2970
2971 /* For TEST, we're interested in a range of insns, not a whole block.
2972 Moreover, we're interested in the insns live from OTHER_BB. */
2973
2974 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2975 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2976 0);
2977
2978 for (insn = jump; ; insn = prev)
2979 {
2980 prev = propagate_one_insn (pbi, insn);
2981 if (insn == earliest)
2982 break;
2983 }
2984
2985 free_propagate_block_info (pbi);
2986
2987 /* We can perform the transformation if
2988 MERGE_SET & (TEST_SET | TEST_LIVE)
2989 and
2990 TEST_SET & merge_bb->global_live_at_start
2991 are empty. */
2992
2993 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2994 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2995 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2996
2997 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2998 BITMAP_AND);
2999 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3000
3001 FREE_REG_SET (tmp);
3002 FREE_REG_SET (merge_set);
3003 FREE_REG_SET (test_live);
3004 FREE_REG_SET (test_set);
3005
3006 if (fail)
3007 return FALSE;
3008 }
3009
3010 no_body:
3011 /* We don't want to use normal invert_jump or redirect_jump because
3012 we don't want to delete_insn called. Also, we want to do our own
3013 change group management. */
3014
3015 old_dest = JUMP_LABEL (jump);
3016 if (other_bb != new_dest)
3017 {
3018 new_label = block_label (new_dest);
3019 if (reversep
3020 ? ! invert_jump_1 (jump, new_label)
3021 : ! redirect_jump_1 (jump, new_label))
3022 goto cancel;
3023 }
3024
3025 if (! apply_change_group ())
3026 return FALSE;
3027
3028 if (other_bb != new_dest)
3029 {
3030 if (old_dest)
3031 LABEL_NUSES (old_dest) -= 1;
3032 if (new_label)
3033 LABEL_NUSES (new_label) += 1;
3034 JUMP_LABEL (jump) = new_label;
3035 if (reversep)
3036 invert_br_probabilities (jump);
3037
3038 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3039 if (reversep)
3040 {
3041 gcov_type count, probability;
3042 count = BRANCH_EDGE (test_bb)->count;
3043 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3044 FALLTHRU_EDGE (test_bb)->count = count;
3045 probability = BRANCH_EDGE (test_bb)->probability;
3046 BRANCH_EDGE (test_bb)->probability
3047 = FALLTHRU_EDGE (test_bb)->probability;
3048 FALLTHRU_EDGE (test_bb)->probability = probability;
3049 update_br_prob_note (test_bb);
3050 }
3051 }
3052
3053 /* Move the insns out of MERGE_BB to before the branch. */
3054 if (head != NULL)
3055 {
3056 if (end == merge_bb->end)
3057 merge_bb->end = PREV_INSN (head);
3058
3059 if (squeeze_notes (&head, &end))
3060 return TRUE;
3061
3062 reorder_insns (head, end, PREV_INSN (earliest));
3063 }
3064
3065 /* Remove the jump and edge if we can. */
3066 if (other_bb == new_dest)
3067 {
3068 delete_insn (jump);
3069 remove_edge (BRANCH_EDGE (test_bb));
3070 /* ??? Can't merge blocks here, as then_bb is still in use.
3071 At minimum, the merge will get done just before bb-reorder. */
3072 }
3073
3074 return TRUE;
3075
3076 cancel:
3077 cancel_changes (0);
3078 return FALSE;
3079 }
3080 \f
3081 /* Main entry point for all if-conversion. */
3082
3083 void
3084 if_convert (x_life_data_ok)
3085 int x_life_data_ok;
3086 {
3087 basic_block bb;
3088 int pass;
3089
3090 num_possible_if_blocks = 0;
3091 num_updated_if_blocks = 0;
3092 num_removed_blocks = 0;
3093 life_data_ok = (x_life_data_ok != 0);
3094
3095 /* Free up basic_block_for_insn so that we don't have to keep it
3096 up to date, either here or in merge_blocks_nomove. */
3097 free_basic_block_vars (1);
3098
3099 /* Compute postdominators if we think we'll use them. */
3100 post_dominators = NULL;
3101 if (HAVE_conditional_execution || life_data_ok)
3102 {
3103 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
3104 }
3105 if (life_data_ok)
3106 clear_bb_flags ();
3107
3108 /* Go through each of the basic blocks looking for things to convert. If we
3109 have conditional execution, we make multiple passes to allow us to handle
3110 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3111 pass = 0;
3112 do
3113 {
3114 cond_exec_changed_p = FALSE;
3115 pass++;
3116
3117 #ifdef IFCVT_MULTIPLE_DUMPS
3118 if (rtl_dump_file && pass > 1)
3119 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3120 #endif
3121
3122 FOR_EACH_BB (bb)
3123 {
3124 basic_block new_bb = find_if_header (bb, pass);
3125 if (new_bb)
3126 bb = new_bb;
3127 }
3128
3129 #ifdef IFCVT_MULTIPLE_DUMPS
3130 if (rtl_dump_file && cond_exec_changed_p)
3131 print_rtl_with_bb (rtl_dump_file, get_insns ());
3132 #endif
3133 }
3134 while (cond_exec_changed_p);
3135
3136 #ifdef IFCVT_MULTIPLE_DUMPS
3137 if (rtl_dump_file)
3138 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3139 #endif
3140
3141 if (post_dominators)
3142 free_dominance_info (post_dominators);
3143
3144 if (rtl_dump_file)
3145 fflush (rtl_dump_file);
3146
3147 clear_aux_for_blocks ();
3148
3149 /* Rebuild life info for basic blocks that require it. */
3150 if (num_removed_blocks && life_data_ok)
3151 {
3152 /* If we allocated new pseudos, we must resize the array for sched1. */
3153 if (max_regno < max_reg_num ())
3154 {
3155 max_regno = max_reg_num ();
3156 allocate_reg_info (max_regno, FALSE, FALSE);
3157 }
3158 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3159 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3160 | PROP_KILL_DEAD_CODE);
3161 }
3162
3163 /* Write the final stats. */
3164 if (rtl_dump_file && num_possible_if_blocks > 0)
3165 {
3166 fprintf (rtl_dump_file,
3167 "\n%d possible IF blocks searched.\n",
3168 num_possible_if_blocks);
3169 fprintf (rtl_dump_file,
3170 "%d IF blocks converted.\n",
3171 num_updated_if_blocks);
3172 fprintf (rtl_dump_file,
3173 "%d basic blocks deleted.\n\n\n",
3174 num_removed_blocks);
3175 }
3176
3177 #ifdef ENABLE_CHECKING
3178 verify_flow_info ();
3179 #endif
3180 }