ifcvt.c (noce_emit_store_flag): Don't emit store flag if mode of x is not a scalar...
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "tm_p.h"
40
41
42 #ifndef HAVE_conditional_execution
43 #define HAVE_conditional_execution 0
44 #endif
45 #ifndef HAVE_conditional_move
46 #define HAVE_conditional_move 0
47 #endif
48 #ifndef HAVE_incscc
49 #define HAVE_incscc 0
50 #endif
51 #ifndef HAVE_decscc
52 #define HAVE_decscc 0
53 #endif
54 #ifndef HAVE_trap
55 #define HAVE_trap 0
56 #endif
57 #ifndef HAVE_conditional_trap
58 #define HAVE_conditional_trap 0
59 #endif
60
61 #ifndef MAX_CONDITIONAL_EXECUTE
62 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
63 #endif
64
65 #define NULL_EDGE ((struct edge_def *)NULL)
66 #define NULL_BLOCK ((struct basic_block_def *)NULL)
67
68 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
69 static int num_possible_if_blocks;
70
71 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
72 execution. */
73 static int num_updated_if_blocks;
74
75 /* # of basic blocks that were removed. */
76 static int num_removed_blocks;
77
78 /* Whether conditional execution changes were made. */
79 static int cond_exec_changed_p;
80
81 /* True if life data ok at present. */
82 static bool life_data_ok;
83
84 /* The post-dominator relation on the original block numbers. */
85 static dominance_info post_dominators;
86
87 /* Forward references. */
88 static int count_bb_insns PARAMS ((basic_block));
89 static rtx first_active_insn PARAMS ((basic_block));
90 static rtx last_active_insn PARAMS ((basic_block, int));
91 static int seq_contains_jump PARAMS ((rtx));
92 static basic_block block_fallthru PARAMS ((basic_block));
93 static int cond_exec_process_insns PARAMS ((ce_if_block_t *,
94 rtx, rtx, rtx, rtx, int));
95 static rtx cond_exec_get_condition PARAMS ((rtx));
96 static int cond_exec_process_if_block PARAMS ((ce_if_block_t *, int));
97 static rtx noce_get_condition PARAMS ((rtx, rtx *));
98 static int noce_operand_ok PARAMS ((rtx));
99 static int noce_process_if_block PARAMS ((ce_if_block_t *));
100 static int process_if_block PARAMS ((ce_if_block_t *));
101 static void merge_if_block PARAMS ((ce_if_block_t *));
102 static int find_cond_trap PARAMS ((basic_block, edge, edge));
103 static basic_block find_if_header PARAMS ((basic_block, int));
104 static int block_jumps_and_fallthru_p PARAMS ((basic_block, basic_block));
105 static int find_if_block PARAMS ((ce_if_block_t *));
106 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
107 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
108 static int find_memory PARAMS ((rtx *, void *));
109 static int dead_or_predicable PARAMS ((basic_block, basic_block,
110 basic_block, basic_block, int));
111 static void noce_emit_move_insn PARAMS ((rtx, rtx));
112 static rtx block_has_only_trap PARAMS ((basic_block));
113 \f
114 /* Count the number of non-jump active insns in BB. */
115
116 static int
117 count_bb_insns (bb)
118 basic_block bb;
119 {
120 int count = 0;
121 rtx insn = bb->head;
122
123 while (1)
124 {
125 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
126 count++;
127
128 if (insn == bb->end)
129 break;
130 insn = NEXT_INSN (insn);
131 }
132
133 return count;
134 }
135
136 /* Return the first non-jump active insn in the basic block. */
137
138 static rtx
139 first_active_insn (bb)
140 basic_block bb;
141 {
142 rtx insn = bb->head;
143
144 if (GET_CODE (insn) == CODE_LABEL)
145 {
146 if (insn == bb->end)
147 return NULL_RTX;
148 insn = NEXT_INSN (insn);
149 }
150
151 while (GET_CODE (insn) == NOTE)
152 {
153 if (insn == bb->end)
154 return NULL_RTX;
155 insn = NEXT_INSN (insn);
156 }
157
158 if (GET_CODE (insn) == JUMP_INSN)
159 return NULL_RTX;
160
161 return insn;
162 }
163
164 /* Return the last non-jump active (non-jump) insn in the basic block. */
165
166 static rtx
167 last_active_insn (bb, skip_use_p)
168 basic_block bb;
169 int skip_use_p;
170 {
171 rtx insn = bb->end;
172 rtx head = bb->head;
173
174 while (GET_CODE (insn) == NOTE
175 || GET_CODE (insn) == JUMP_INSN
176 || (skip_use_p
177 && GET_CODE (insn) == INSN
178 && GET_CODE (PATTERN (insn)) == USE))
179 {
180 if (insn == head)
181 return NULL_RTX;
182 insn = PREV_INSN (insn);
183 }
184
185 if (GET_CODE (insn) == CODE_LABEL)
186 return NULL_RTX;
187
188 return insn;
189 }
190
191 /* It is possible, especially when having dealt with multi-word
192 arithmetic, for the expanders to have emitted jumps. Search
193 through the sequence and return TRUE if a jump exists so that
194 we can abort the conversion. */
195
196 static int
197 seq_contains_jump (insn)
198 rtx insn;
199 {
200 while (insn)
201 {
202 if (GET_CODE (insn) == JUMP_INSN)
203 return 1;
204 insn = NEXT_INSN (insn);
205 }
206 return 0;
207 }
208
209 static basic_block
210 block_fallthru (bb)
211 basic_block bb;
212 {
213 edge e;
214
215 for (e = bb->succ;
216 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
217 e = e->succ_next)
218 ;
219
220 return (e) ? e->dest : NULL_BLOCK;
221 }
222 \f
223 /* Go through a bunch of insns, converting them to conditional
224 execution format if possible. Return TRUE if all of the non-note
225 insns were processed. */
226
227 static int
228 cond_exec_process_insns (ce_info, start, end, test, prob_val, mod_ok)
229 ce_if_block_t *ce_info ATTRIBUTE_UNUSED; /* if block information */
230 rtx start; /* first insn to look at */
231 rtx end; /* last insn to look at */
232 rtx test; /* conditional execution test */
233 rtx prob_val; /* probability of branch taken. */
234 int mod_ok; /* true if modifications ok last insn. */
235 {
236 int must_be_last = FALSE;
237 rtx insn;
238 rtx xtest;
239 rtx pattern;
240
241 if (!start || !end)
242 return FALSE;
243
244 for (insn = start; ; insn = NEXT_INSN (insn))
245 {
246 if (GET_CODE (insn) == NOTE)
247 goto insn_done;
248
249 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
250 abort ();
251
252 /* Remove USE insns that get in the way. */
253 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
254 {
255 /* ??? Ug. Actually unlinking the thing is problematic,
256 given what we'd have to coordinate with our callers. */
257 PUT_CODE (insn, NOTE);
258 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
259 NOTE_SOURCE_FILE (insn) = 0;
260 goto insn_done;
261 }
262
263 /* Last insn wasn't last? */
264 if (must_be_last)
265 return FALSE;
266
267 if (modified_in_p (test, insn))
268 {
269 if (!mod_ok)
270 return FALSE;
271 must_be_last = TRUE;
272 }
273
274 /* Now build the conditional form of the instruction. */
275 pattern = PATTERN (insn);
276 xtest = copy_rtx (test);
277
278 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
279 two conditions. */
280 if (GET_CODE (pattern) == COND_EXEC)
281 {
282 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
283 return FALSE;
284
285 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
286 COND_EXEC_TEST (pattern));
287 pattern = COND_EXEC_CODE (pattern);
288 }
289
290 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
291
292 /* If the machine needs to modify the insn being conditionally executed,
293 say for example to force a constant integer operand into a temp
294 register, do so here. */
295 #ifdef IFCVT_MODIFY_INSN
296 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
297 if (! pattern)
298 return FALSE;
299 #endif
300
301 validate_change (insn, &PATTERN (insn), pattern, 1);
302
303 if (GET_CODE (insn) == CALL_INSN && prob_val)
304 validate_change (insn, &REG_NOTES (insn),
305 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
306 REG_NOTES (insn)), 1);
307
308 insn_done:
309 if (insn == end)
310 break;
311 }
312
313 return TRUE;
314 }
315
316 /* Return the condition for a jump. Do not do any special processing. */
317
318 static rtx
319 cond_exec_get_condition (jump)
320 rtx jump;
321 {
322 rtx test_if, cond;
323
324 if (any_condjump_p (jump))
325 test_if = SET_SRC (pc_set (jump));
326 else
327 return NULL_RTX;
328 cond = XEXP (test_if, 0);
329
330 /* If this branches to JUMP_LABEL when the condition is false,
331 reverse the condition. */
332 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
333 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
334 {
335 enum rtx_code rev = reversed_comparison_code (cond, jump);
336 if (rev == UNKNOWN)
337 return NULL_RTX;
338
339 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
340 XEXP (cond, 1));
341 }
342
343 return cond;
344 }
345
346 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
347 to conditional execution. Return TRUE if we were successful at
348 converting the block. */
349
350 static int
351 cond_exec_process_if_block (ce_info, do_multiple_p)
352 ce_if_block_t * ce_info; /* if block information */
353 int do_multiple_p; /* != 0 if we should handle && and || blocks */
354 {
355 basic_block test_bb = ce_info->test_bb; /* last test block */
356 basic_block then_bb = ce_info->then_bb; /* THEN */
357 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
358 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
359 rtx then_start; /* first insn in THEN block */
360 rtx then_end; /* last insn + 1 in THEN block */
361 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
362 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
363 int max; /* max # of insns to convert. */
364 int then_mod_ok; /* whether conditional mods are ok in THEN */
365 rtx true_expr; /* test for else block insns */
366 rtx false_expr; /* test for then block insns */
367 rtx true_prob_val; /* probability of else block */
368 rtx false_prob_val; /* probability of then block */
369 int n_insns;
370 enum rtx_code false_code;
371
372 /* If test is comprised of && or || elements, and we've failed at handling
373 all of them together, just use the last test if it is the special case of
374 && elements without an ELSE block. */
375 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
376 {
377 if (else_bb || ! ce_info->and_and_p)
378 return FALSE;
379
380 ce_info->test_bb = test_bb = ce_info->last_test_bb;
381 ce_info->num_multiple_test_blocks = 0;
382 ce_info->num_and_and_blocks = 0;
383 ce_info->num_or_or_blocks = 0;
384 }
385
386 /* Find the conditional jump to the ELSE or JOIN part, and isolate
387 the test. */
388 test_expr = cond_exec_get_condition (test_bb->end);
389 if (! test_expr)
390 return FALSE;
391
392 /* If the conditional jump is more than just a conditional jump,
393 then we can not do conditional execution conversion on this block. */
394 if (! onlyjump_p (test_bb->end))
395 return FALSE;
396
397 /* Collect the bounds of where we're to search, skipping any labels, jumps
398 and notes at the beginning and end of the block. Then count the total
399 number of insns and see if it is small enough to convert. */
400 then_start = first_active_insn (then_bb);
401 then_end = last_active_insn (then_bb, TRUE);
402 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
403 max = MAX_CONDITIONAL_EXECUTE;
404
405 if (else_bb)
406 {
407 max *= 2;
408 else_start = first_active_insn (else_bb);
409 else_end = last_active_insn (else_bb, TRUE);
410 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
411 }
412
413 if (n_insns > max)
414 return FALSE;
415
416 /* Map test_expr/test_jump into the appropriate MD tests to use on
417 the conditionally executed code. */
418
419 true_expr = test_expr;
420
421 false_code = reversed_comparison_code (true_expr, test_bb->end);
422 if (false_code != UNKNOWN)
423 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
424 XEXP (true_expr, 0), XEXP (true_expr, 1));
425 else
426 false_expr = NULL_RTX;
427
428 #ifdef IFCVT_MODIFY_TESTS
429 /* If the machine description needs to modify the tests, such as setting a
430 conditional execution register from a comparison, it can do so here. */
431 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
432
433 /* See if the conversion failed */
434 if (!true_expr || !false_expr)
435 goto fail;
436 #endif
437
438 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
439 if (true_prob_val)
440 {
441 true_prob_val = XEXP (true_prob_val, 0);
442 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
443 }
444 else
445 false_prob_val = NULL_RTX;
446
447 /* If we have && or || tests, do them here. These tests are in the adjacent
448 blocks after the first block containing the test. */
449 if (ce_info->num_multiple_test_blocks > 0)
450 {
451 basic_block bb = test_bb;
452 basic_block last_test_bb = ce_info->last_test_bb;
453
454 if (! false_expr)
455 goto fail;
456
457 do
458 {
459 rtx start, end;
460 rtx t, f;
461
462 bb = block_fallthru (bb);
463 start = first_active_insn (bb);
464 end = last_active_insn (bb, TRUE);
465 if (start
466 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
467 false_prob_val, FALSE))
468 goto fail;
469
470 /* If the conditional jump is more than just a conditional jump, then
471 we can not do conditional execution conversion on this block. */
472 if (! onlyjump_p (bb->end))
473 goto fail;
474
475 /* Find the conditional jump and isolate the test. */
476 t = cond_exec_get_condition (bb->end);
477 if (! t)
478 goto fail;
479
480 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
481 GET_MODE (t),
482 XEXP (t, 0),
483 XEXP (t, 1));
484
485 if (ce_info->and_and_p)
486 {
487 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
488 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
489 }
490 else
491 {
492 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
493 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
494 }
495
496 /* If the machine description needs to modify the tests, such as
497 setting a conditional execution register from a comparison, it can
498 do so here. */
499 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
500 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
501
502 /* See if the conversion failed */
503 if (!t || !f)
504 goto fail;
505 #endif
506
507 true_expr = t;
508 false_expr = f;
509 }
510 while (bb != last_test_bb);
511 }
512
513 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
514 on then THEN block. */
515 then_mod_ok = (else_bb == NULL_BLOCK);
516
517 /* Go through the THEN and ELSE blocks converting the insns if possible
518 to conditional execution. */
519
520 if (then_end
521 && (! false_expr
522 || ! cond_exec_process_insns (ce_info, then_start, then_end,
523 false_expr, false_prob_val,
524 then_mod_ok)))
525 goto fail;
526
527 if (else_bb && else_end
528 && ! cond_exec_process_insns (ce_info, else_start, else_end,
529 true_expr, true_prob_val, TRUE))
530 goto fail;
531
532 /* If we cannot apply the changes, fail. Do not go through the normal fail
533 processing, since apply_change_group will call cancel_changes. */
534 if (! apply_change_group ())
535 {
536 #ifdef IFCVT_MODIFY_CANCEL
537 /* Cancel any machine dependent changes. */
538 IFCVT_MODIFY_CANCEL (ce_info);
539 #endif
540 return FALSE;
541 }
542
543 #ifdef IFCVT_MODIFY_FINAL
544 /* Do any machine dependent final modifications */
545 IFCVT_MODIFY_FINAL (ce_info);
546 #endif
547
548 /* Conversion succeeded. */
549 if (rtl_dump_file)
550 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
551 n_insns, (n_insns == 1) ? " was" : "s were");
552
553 /* Merge the blocks! */
554 merge_if_block (ce_info);
555 cond_exec_changed_p = TRUE;
556 return TRUE;
557
558 fail:
559 #ifdef IFCVT_MODIFY_CANCEL
560 /* Cancel any machine dependent changes. */
561 IFCVT_MODIFY_CANCEL (ce_info);
562 #endif
563
564 cancel_changes (0);
565 return FALSE;
566 }
567 \f
568 /* Used by noce_process_if_block to communicate with its subroutines.
569
570 The subroutines know that A and B may be evaluated freely. They
571 know that X is a register. They should insert new instructions
572 before cond_earliest. */
573
574 struct noce_if_info
575 {
576 basic_block test_bb;
577 rtx insn_a, insn_b;
578 rtx x, a, b;
579 rtx jump, cond, cond_earliest;
580 };
581
582 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
583 rtx, int, int));
584 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
585 static int noce_try_addcc PARAMS ((struct noce_if_info *));
586 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
587 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
588 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
589 rtx, enum rtx_code, rtx,
590 rtx, rtx, rtx));
591 static int noce_try_cmove PARAMS ((struct noce_if_info *));
592 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
593 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
594 rtx, rtx *));
595 static int noce_try_minmax PARAMS ((struct noce_if_info *));
596 static int noce_try_abs PARAMS ((struct noce_if_info *));
597
598 /* Helper function for noce_try_store_flag*. */
599
600 static rtx
601 noce_emit_store_flag (if_info, x, reversep, normalize)
602 struct noce_if_info *if_info;
603 rtx x;
604 int reversep, normalize;
605 {
606 rtx cond = if_info->cond;
607 int cond_complex;
608 enum rtx_code code;
609
610 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
611 || ! general_operand (XEXP (cond, 1), VOIDmode));
612
613 /* If earliest == jump, or when the condition is complex, try to
614 build the store_flag insn directly. */
615
616 if (cond_complex)
617 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
618
619 if (reversep)
620 code = reversed_comparison_code (cond, if_info->jump);
621 else
622 code = GET_CODE (cond);
623
624 if ((if_info->cond_earliest == if_info->jump || cond_complex)
625 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
626 {
627 rtx tmp;
628
629 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
630 XEXP (cond, 1));
631 tmp = gen_rtx_SET (VOIDmode, x, tmp);
632
633 start_sequence ();
634 tmp = emit_insn (tmp);
635
636 if (recog_memoized (tmp) >= 0)
637 {
638 tmp = get_insns ();
639 end_sequence ();
640 emit_insn (tmp);
641
642 if_info->cond_earliest = if_info->jump;
643
644 return x;
645 }
646
647 end_sequence ();
648 }
649
650 /* Don't even try if the comparison operands or the mode of X are weird. */
651 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
652 return NULL_RTX;
653
654 return emit_store_flag (x, code, XEXP (cond, 0),
655 XEXP (cond, 1), VOIDmode,
656 (code == LTU || code == LEU
657 || code == GEU || code == GTU), normalize);
658 }
659
660 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
661 static void
662 noce_emit_move_insn (x, y)
663 rtx x, y;
664 {
665 enum machine_mode outmode, inmode;
666 rtx outer, inner;
667 int bitpos;
668
669 if (GET_CODE (x) != STRICT_LOW_PART)
670 {
671 emit_move_insn (x, y);
672 return;
673 }
674
675 outer = XEXP (x, 0);
676 inner = XEXP (outer, 0);
677 outmode = GET_MODE (outer);
678 inmode = GET_MODE (inner);
679 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
680 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
681 GET_MODE_BITSIZE (inmode));
682 }
683
684 /* Convert "if (test) x = 1; else x = 0".
685
686 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
687 tried in noce_try_store_flag_constants after noce_try_cmove has had
688 a go at the conversion. */
689
690 static int
691 noce_try_store_flag (if_info)
692 struct noce_if_info *if_info;
693 {
694 int reversep;
695 rtx target, seq;
696
697 if (GET_CODE (if_info->b) == CONST_INT
698 && INTVAL (if_info->b) == STORE_FLAG_VALUE
699 && if_info->a == const0_rtx)
700 reversep = 0;
701 else if (if_info->b == const0_rtx
702 && GET_CODE (if_info->a) == CONST_INT
703 && INTVAL (if_info->a) == STORE_FLAG_VALUE
704 && (reversed_comparison_code (if_info->cond, if_info->jump)
705 != UNKNOWN))
706 reversep = 1;
707 else
708 return FALSE;
709
710 start_sequence ();
711
712 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
713 if (target)
714 {
715 if (target != if_info->x)
716 noce_emit_move_insn (if_info->x, target);
717
718 seq = get_insns ();
719 end_sequence ();
720 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
721
722 return TRUE;
723 }
724 else
725 {
726 end_sequence ();
727 return FALSE;
728 }
729 }
730
731 /* Convert "if (test) x = a; else x = b", for A and B constant. */
732
733 static int
734 noce_try_store_flag_constants (if_info)
735 struct noce_if_info *if_info;
736 {
737 rtx target, seq;
738 int reversep;
739 HOST_WIDE_INT itrue, ifalse, diff, tmp;
740 int normalize, can_reverse;
741 enum machine_mode mode;
742
743 if (! no_new_pseudos
744 && GET_CODE (if_info->a) == CONST_INT
745 && GET_CODE (if_info->b) == CONST_INT)
746 {
747 mode = GET_MODE (if_info->x);
748 ifalse = INTVAL (if_info->a);
749 itrue = INTVAL (if_info->b);
750
751 /* Make sure we can represent the difference between the two values. */
752 if ((itrue - ifalse > 0)
753 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
754 return FALSE;
755
756 diff = trunc_int_for_mode (itrue - ifalse, mode);
757
758 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
759 != UNKNOWN);
760
761 reversep = 0;
762 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
763 normalize = 0;
764 else if (ifalse == 0 && exact_log2 (itrue) >= 0
765 && (STORE_FLAG_VALUE == 1
766 || BRANCH_COST >= 2))
767 normalize = 1;
768 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
769 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
770 normalize = 1, reversep = 1;
771 else if (itrue == -1
772 && (STORE_FLAG_VALUE == -1
773 || BRANCH_COST >= 2))
774 normalize = -1;
775 else if (ifalse == -1 && can_reverse
776 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
777 normalize = -1, reversep = 1;
778 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
779 || BRANCH_COST >= 3)
780 normalize = -1;
781 else
782 return FALSE;
783
784 if (reversep)
785 {
786 tmp = itrue; itrue = ifalse; ifalse = tmp;
787 diff = trunc_int_for_mode (-diff, mode);
788 }
789
790 start_sequence ();
791 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
792 if (! target)
793 {
794 end_sequence ();
795 return FALSE;
796 }
797
798 /* if (test) x = 3; else x = 4;
799 => x = 3 + (test == 0); */
800 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
801 {
802 target = expand_simple_binop (mode,
803 (diff == STORE_FLAG_VALUE
804 ? PLUS : MINUS),
805 GEN_INT (ifalse), target, if_info->x, 0,
806 OPTAB_WIDEN);
807 }
808
809 /* if (test) x = 8; else x = 0;
810 => x = (test != 0) << 3; */
811 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
812 {
813 target = expand_simple_binop (mode, ASHIFT,
814 target, GEN_INT (tmp), if_info->x, 0,
815 OPTAB_WIDEN);
816 }
817
818 /* if (test) x = -1; else x = b;
819 => x = -(test != 0) | b; */
820 else if (itrue == -1)
821 {
822 target = expand_simple_binop (mode, IOR,
823 target, GEN_INT (ifalse), if_info->x, 0,
824 OPTAB_WIDEN);
825 }
826
827 /* if (test) x = a; else x = b;
828 => x = (-(test != 0) & (b - a)) + a; */
829 else
830 {
831 target = expand_simple_binop (mode, AND,
832 target, GEN_INT (diff), if_info->x, 0,
833 OPTAB_WIDEN);
834 if (target)
835 target = expand_simple_binop (mode, PLUS,
836 target, GEN_INT (ifalse),
837 if_info->x, 0, OPTAB_WIDEN);
838 }
839
840 if (! target)
841 {
842 end_sequence ();
843 return FALSE;
844 }
845
846 if (target != if_info->x)
847 noce_emit_move_insn (if_info->x, target);
848
849 seq = get_insns ();
850 end_sequence ();
851
852 if (seq_contains_jump (seq))
853 return FALSE;
854
855 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
856
857 return TRUE;
858 }
859
860 return FALSE;
861 }
862
863 /* Convert "if (test) foo++" into "foo += (test != 0)", and
864 similarly for "foo--". */
865
866 static int
867 noce_try_addcc (if_info)
868 struct noce_if_info *if_info;
869 {
870 rtx target, seq;
871 int subtract, normalize;
872
873 if (! no_new_pseudos
874 /* Should be no `else' case to worry about. */
875 && if_info->b == if_info->x
876 && GET_CODE (if_info->a) == PLUS
877 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
878 && (reversed_comparison_code (if_info->cond, if_info->jump)
879 != UNKNOWN))
880 {
881 rtx cond = if_info->cond;
882 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
883
884 /* First try to use addcc pattern. */
885 if (general_operand (XEXP (cond, 0), VOIDmode)
886 && general_operand (XEXP (cond, 1), VOIDmode))
887 {
888 start_sequence ();
889 target = emit_conditional_add (if_info->x, code,
890 XEXP (cond, 0), XEXP (cond, 1),
891 VOIDmode,
892 if_info->b, XEXP (if_info->a, 1),
893 GET_MODE (if_info->x),
894 (code == LTU || code == GEU
895 || code == LEU || code == GTU));
896 if (target)
897 {
898 if (target != if_info->x)
899 noce_emit_move_insn (if_info->x, target);
900
901 seq = get_insns ();
902 end_sequence ();
903 emit_insn_before_scope (seq, if_info->jump,
904 INSN_SCOPE (if_info->insn_a));
905 return TRUE;
906 }
907 end_sequence ();
908 }
909
910 /* If that fails, construct conditional increment or decrement using
911 setcc. */
912 if (BRANCH_COST >= 2
913 && (XEXP (if_info->a, 1) == const1_rtx
914 || XEXP (if_info->a, 1) == constm1_rtx))
915 {
916 start_sequence ();
917 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
918 subtract = 0, normalize = 0;
919 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
920 subtract = 1, normalize = 0;
921 else
922 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
923
924
925 target = noce_emit_store_flag (if_info,
926 gen_reg_rtx (GET_MODE (if_info->x)),
927 1, normalize);
928
929 if (target)
930 target = expand_simple_binop (GET_MODE (if_info->x),
931 subtract ? MINUS : PLUS,
932 if_info->x, target, if_info->x,
933 0, OPTAB_WIDEN);
934 if (target)
935 {
936 if (target != if_info->x)
937 noce_emit_move_insn (if_info->x, target);
938
939 seq = get_insns ();
940 end_sequence ();
941
942 if (seq_contains_jump (seq))
943 return FALSE;
944
945 emit_insn_before_scope (seq, if_info->jump,
946 INSN_SCOPE (if_info->insn_a));
947
948 return TRUE;
949 }
950 end_sequence ();
951 }
952 }
953
954 return FALSE;
955 }
956
957 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
958
959 static int
960 noce_try_store_flag_mask (if_info)
961 struct noce_if_info *if_info;
962 {
963 rtx target, seq;
964 int reversep;
965
966 reversep = 0;
967 if (! no_new_pseudos
968 && (BRANCH_COST >= 2
969 || STORE_FLAG_VALUE == -1)
970 && ((if_info->a == const0_rtx
971 && rtx_equal_p (if_info->b, if_info->x))
972 || ((reversep = (reversed_comparison_code (if_info->cond,
973 if_info->jump)
974 != UNKNOWN))
975 && if_info->b == const0_rtx
976 && rtx_equal_p (if_info->a, if_info->x))))
977 {
978 start_sequence ();
979 target = noce_emit_store_flag (if_info,
980 gen_reg_rtx (GET_MODE (if_info->x)),
981 reversep, -1);
982 if (target)
983 target = expand_simple_binop (GET_MODE (if_info->x), AND,
984 if_info->x, target, if_info->x, 0,
985 OPTAB_WIDEN);
986
987 if (target)
988 {
989 if (target != if_info->x)
990 noce_emit_move_insn (if_info->x, target);
991
992 seq = get_insns ();
993 end_sequence ();
994
995 if (seq_contains_jump (seq))
996 return FALSE;
997
998 emit_insn_before_scope (seq, if_info->jump,
999 INSN_SCOPE (if_info->insn_a));
1000
1001 return TRUE;
1002 }
1003
1004 end_sequence ();
1005 }
1006
1007 return FALSE;
1008 }
1009
1010 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1011
1012 static rtx
1013 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
1014 struct noce_if_info *if_info;
1015 rtx x, cmp_a, cmp_b, vfalse, vtrue;
1016 enum rtx_code code;
1017 {
1018 /* If earliest == jump, try to build the cmove insn directly.
1019 This is helpful when combine has created some complex condition
1020 (like for alpha's cmovlbs) that we can't hope to regenerate
1021 through the normal interface. */
1022
1023 if (if_info->cond_earliest == if_info->jump)
1024 {
1025 rtx tmp;
1026
1027 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1028 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1029 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1030
1031 start_sequence ();
1032 tmp = emit_insn (tmp);
1033
1034 if (recog_memoized (tmp) >= 0)
1035 {
1036 tmp = get_insns ();
1037 end_sequence ();
1038 emit_insn (tmp);
1039
1040 return x;
1041 }
1042
1043 end_sequence ();
1044 }
1045
1046 /* Don't even try if the comparison operands are weird. */
1047 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1048 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1049 return NULL_RTX;
1050
1051 #if HAVE_conditional_move
1052 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1053 vtrue, vfalse, GET_MODE (x),
1054 (code == LTU || code == GEU
1055 || code == LEU || code == GTU));
1056 #else
1057 /* We'll never get here, as noce_process_if_block doesn't call the
1058 functions involved. Ifdef code, however, should be discouraged
1059 because it leads to typos in the code not selected. However,
1060 emit_conditional_move won't exist either. */
1061 return NULL_RTX;
1062 #endif
1063 }
1064
1065 /* Try only simple constants and registers here. More complex cases
1066 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1067 has had a go at it. */
1068
1069 static int
1070 noce_try_cmove (if_info)
1071 struct noce_if_info *if_info;
1072 {
1073 enum rtx_code code;
1074 rtx target, seq;
1075
1076 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1077 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1078 {
1079 start_sequence ();
1080
1081 code = GET_CODE (if_info->cond);
1082 target = noce_emit_cmove (if_info, if_info->x, code,
1083 XEXP (if_info->cond, 0),
1084 XEXP (if_info->cond, 1),
1085 if_info->a, if_info->b);
1086
1087 if (target)
1088 {
1089 if (target != if_info->x)
1090 noce_emit_move_insn (if_info->x, target);
1091
1092 seq = get_insns ();
1093 end_sequence ();
1094 emit_insn_before_scope (seq, if_info->jump,
1095 INSN_SCOPE (if_info->insn_a));
1096 return TRUE;
1097 }
1098 else
1099 {
1100 end_sequence ();
1101 return FALSE;
1102 }
1103 }
1104
1105 return FALSE;
1106 }
1107
1108 /* Try more complex cases involving conditional_move. */
1109
1110 static int
1111 noce_try_cmove_arith (if_info)
1112 struct noce_if_info *if_info;
1113 {
1114 rtx a = if_info->a;
1115 rtx b = if_info->b;
1116 rtx x = if_info->x;
1117 rtx insn_a, insn_b;
1118 rtx tmp, target;
1119 int is_mem = 0;
1120 enum rtx_code code;
1121
1122 /* A conditional move from two memory sources is equivalent to a
1123 conditional on their addresses followed by a load. Don't do this
1124 early because it'll screw alias analysis. Note that we've
1125 already checked for no side effects. */
1126 if (! no_new_pseudos && cse_not_expected
1127 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1128 && BRANCH_COST >= 5)
1129 {
1130 a = XEXP (a, 0);
1131 b = XEXP (b, 0);
1132 x = gen_reg_rtx (Pmode);
1133 is_mem = 1;
1134 }
1135
1136 /* ??? We could handle this if we knew that a load from A or B could
1137 not fault. This is also true if we've already loaded
1138 from the address along the path from ENTRY. */
1139 else if (may_trap_p (a) || may_trap_p (b))
1140 return FALSE;
1141
1142 /* if (test) x = a + b; else x = c - d;
1143 => y = a + b;
1144 x = c - d;
1145 if (test)
1146 x = y;
1147 */
1148
1149 code = GET_CODE (if_info->cond);
1150 insn_a = if_info->insn_a;
1151 insn_b = if_info->insn_b;
1152
1153 /* Possibly rearrange operands to make things come out more natural. */
1154 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1155 {
1156 int reversep = 0;
1157 if (rtx_equal_p (b, x))
1158 reversep = 1;
1159 else if (general_operand (b, GET_MODE (b)))
1160 reversep = 1;
1161
1162 if (reversep)
1163 {
1164 code = reversed_comparison_code (if_info->cond, if_info->jump);
1165 tmp = a, a = b, b = tmp;
1166 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1167 }
1168 }
1169
1170 start_sequence ();
1171
1172 /* If either operand is complex, load it into a register first.
1173 The best way to do this is to copy the original insn. In this
1174 way we preserve any clobbers etc that the insn may have had.
1175 This is of course not possible in the IS_MEM case. */
1176 if (! general_operand (a, GET_MODE (a)))
1177 {
1178 rtx set;
1179
1180 if (no_new_pseudos)
1181 goto end_seq_and_fail;
1182
1183 if (is_mem)
1184 {
1185 tmp = gen_reg_rtx (GET_MODE (a));
1186 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1187 }
1188 else if (! insn_a)
1189 goto end_seq_and_fail;
1190 else
1191 {
1192 a = gen_reg_rtx (GET_MODE (a));
1193 tmp = copy_rtx (insn_a);
1194 set = single_set (tmp);
1195 SET_DEST (set) = a;
1196 tmp = emit_insn (PATTERN (tmp));
1197 }
1198 if (recog_memoized (tmp) < 0)
1199 goto end_seq_and_fail;
1200 }
1201 if (! general_operand (b, GET_MODE (b)))
1202 {
1203 rtx set;
1204
1205 if (no_new_pseudos)
1206 goto end_seq_and_fail;
1207
1208 if (is_mem)
1209 {
1210 tmp = gen_reg_rtx (GET_MODE (b));
1211 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1212 }
1213 else if (! insn_b)
1214 goto end_seq_and_fail;
1215 else
1216 {
1217 b = gen_reg_rtx (GET_MODE (b));
1218 tmp = copy_rtx (insn_b);
1219 set = single_set (tmp);
1220 SET_DEST (set) = b;
1221 tmp = emit_insn (PATTERN (tmp));
1222 }
1223 if (recog_memoized (tmp) < 0)
1224 goto end_seq_and_fail;
1225 }
1226
1227 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1228 XEXP (if_info->cond, 1), a, b);
1229
1230 if (! target)
1231 goto end_seq_and_fail;
1232
1233 /* If we're handling a memory for above, emit the load now. */
1234 if (is_mem)
1235 {
1236 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1237
1238 /* Copy over flags as appropriate. */
1239 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1240 MEM_VOLATILE_P (tmp) = 1;
1241 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1242 MEM_IN_STRUCT_P (tmp) = 1;
1243 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1244 MEM_SCALAR_P (tmp) = 1;
1245 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1246 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1247 set_mem_align (tmp,
1248 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1249
1250 noce_emit_move_insn (if_info->x, tmp);
1251 }
1252 else if (target != x)
1253 noce_emit_move_insn (x, target);
1254
1255 tmp = get_insns ();
1256 end_sequence ();
1257 emit_insn_before_scope (tmp, if_info->jump, INSN_SCOPE (if_info->insn_a));
1258 return TRUE;
1259
1260 end_seq_and_fail:
1261 end_sequence ();
1262 return FALSE;
1263 }
1264
1265 /* For most cases, the simplified condition we found is the best
1266 choice, but this is not the case for the min/max/abs transforms.
1267 For these we wish to know that it is A or B in the condition. */
1268
1269 static rtx
1270 noce_get_alt_condition (if_info, target, earliest)
1271 struct noce_if_info *if_info;
1272 rtx target;
1273 rtx *earliest;
1274 {
1275 rtx cond, set, insn;
1276 int reverse;
1277
1278 /* If target is already mentioned in the known condition, return it. */
1279 if (reg_mentioned_p (target, if_info->cond))
1280 {
1281 *earliest = if_info->cond_earliest;
1282 return if_info->cond;
1283 }
1284
1285 set = pc_set (if_info->jump);
1286 cond = XEXP (SET_SRC (set), 0);
1287 reverse
1288 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1289 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1290
1291 /* If we're looking for a constant, try to make the conditional
1292 have that constant in it. There are two reasons why it may
1293 not have the constant we want:
1294
1295 1. GCC may have needed to put the constant in a register, because
1296 the target can't compare directly against that constant. For
1297 this case, we look for a SET immediately before the comparison
1298 that puts a constant in that register.
1299
1300 2. GCC may have canonicalized the conditional, for example
1301 replacing "if x < 4" with "if x <= 3". We can undo that (or
1302 make equivalent types of changes) to get the constants we need
1303 if they're off by one in the right direction. */
1304
1305 if (GET_CODE (target) == CONST_INT)
1306 {
1307 enum rtx_code code = GET_CODE (if_info->cond);
1308 rtx op_a = XEXP (if_info->cond, 0);
1309 rtx op_b = XEXP (if_info->cond, 1);
1310 rtx prev_insn;
1311
1312 /* First, look to see if we put a constant in a register. */
1313 prev_insn = PREV_INSN (if_info->cond_earliest);
1314 if (prev_insn
1315 && INSN_P (prev_insn)
1316 && GET_CODE (PATTERN (prev_insn)) == SET)
1317 {
1318 rtx src = find_reg_equal_equiv_note (prev_insn);
1319 if (!src)
1320 src = SET_SRC (PATTERN (prev_insn));
1321 if (GET_CODE (src) == CONST_INT)
1322 {
1323 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1324 op_a = src;
1325 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1326 op_b = src;
1327
1328 if (GET_CODE (op_a) == CONST_INT)
1329 {
1330 rtx tmp = op_a;
1331 op_a = op_b;
1332 op_b = tmp;
1333 code = swap_condition (code);
1334 }
1335 }
1336 }
1337
1338 /* Now, look to see if we can get the right constant by
1339 adjusting the conditional. */
1340 if (GET_CODE (op_b) == CONST_INT)
1341 {
1342 HOST_WIDE_INT desired_val = INTVAL (target);
1343 HOST_WIDE_INT actual_val = INTVAL (op_b);
1344
1345 switch (code)
1346 {
1347 case LT:
1348 if (actual_val == desired_val + 1)
1349 {
1350 code = LE;
1351 op_b = GEN_INT (desired_val);
1352 }
1353 break;
1354 case LE:
1355 if (actual_val == desired_val - 1)
1356 {
1357 code = LT;
1358 op_b = GEN_INT (desired_val);
1359 }
1360 break;
1361 case GT:
1362 if (actual_val == desired_val - 1)
1363 {
1364 code = GE;
1365 op_b = GEN_INT (desired_val);
1366 }
1367 break;
1368 case GE:
1369 if (actual_val == desired_val + 1)
1370 {
1371 code = GT;
1372 op_b = GEN_INT (desired_val);
1373 }
1374 break;
1375 default:
1376 break;
1377 }
1378 }
1379
1380 /* If we made any changes, generate a new conditional that is
1381 equivalent to what we started with, but has the right
1382 constants in it. */
1383 if (code != GET_CODE (if_info->cond)
1384 || op_a != XEXP (if_info->cond, 0)
1385 || op_b != XEXP (if_info->cond, 1))
1386 {
1387 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1388 *earliest = if_info->cond_earliest;
1389 return cond;
1390 }
1391 }
1392
1393 cond = canonicalize_condition (if_info->jump, cond, reverse,
1394 earliest, target);
1395 if (! cond || ! reg_mentioned_p (target, cond))
1396 return NULL;
1397
1398 /* We almost certainly searched back to a different place.
1399 Need to re-verify correct lifetimes. */
1400
1401 /* X may not be mentioned in the range (cond_earliest, jump]. */
1402 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1403 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1404 return NULL;
1405
1406 /* A and B may not be modified in the range [cond_earliest, jump). */
1407 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1408 if (INSN_P (insn)
1409 && (modified_in_p (if_info->a, insn)
1410 || modified_in_p (if_info->b, insn)))
1411 return NULL;
1412
1413 return cond;
1414 }
1415
1416 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1417
1418 static int
1419 noce_try_minmax (if_info)
1420 struct noce_if_info *if_info;
1421 {
1422 rtx cond, earliest, target, seq;
1423 enum rtx_code code, op;
1424 int unsignedp;
1425
1426 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1427 if (no_new_pseudos)
1428 return FALSE;
1429
1430 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1431 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1432 to get the target to tell us... */
1433 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1434 || HONOR_NANS (GET_MODE (if_info->x)))
1435 return FALSE;
1436
1437 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1438 if (!cond)
1439 return FALSE;
1440
1441 /* Verify the condition is of the form we expect, and canonicalize
1442 the comparison code. */
1443 code = GET_CODE (cond);
1444 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1445 {
1446 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1447 return FALSE;
1448 }
1449 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1450 {
1451 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1452 return FALSE;
1453 code = swap_condition (code);
1454 }
1455 else
1456 return FALSE;
1457
1458 /* Determine what sort of operation this is. Note that the code is for
1459 a taken branch, so the code->operation mapping appears backwards. */
1460 switch (code)
1461 {
1462 case LT:
1463 case LE:
1464 case UNLT:
1465 case UNLE:
1466 op = SMAX;
1467 unsignedp = 0;
1468 break;
1469 case GT:
1470 case GE:
1471 case UNGT:
1472 case UNGE:
1473 op = SMIN;
1474 unsignedp = 0;
1475 break;
1476 case LTU:
1477 case LEU:
1478 op = UMAX;
1479 unsignedp = 1;
1480 break;
1481 case GTU:
1482 case GEU:
1483 op = UMIN;
1484 unsignedp = 1;
1485 break;
1486 default:
1487 return FALSE;
1488 }
1489
1490 start_sequence ();
1491
1492 target = expand_simple_binop (GET_MODE (if_info->x), op,
1493 if_info->a, if_info->b,
1494 if_info->x, unsignedp, OPTAB_WIDEN);
1495 if (! target)
1496 {
1497 end_sequence ();
1498 return FALSE;
1499 }
1500 if (target != if_info->x)
1501 noce_emit_move_insn (if_info->x, target);
1502
1503 seq = get_insns ();
1504 end_sequence ();
1505
1506 if (seq_contains_jump (seq))
1507 return FALSE;
1508
1509 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1510 if_info->cond = cond;
1511 if_info->cond_earliest = earliest;
1512
1513 return TRUE;
1514 }
1515
1516 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1517
1518 static int
1519 noce_try_abs (if_info)
1520 struct noce_if_info *if_info;
1521 {
1522 rtx cond, earliest, target, seq, a, b, c;
1523 int negate;
1524
1525 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1526 if (no_new_pseudos)
1527 return FALSE;
1528
1529 /* Recognize A and B as constituting an ABS or NABS. */
1530 a = if_info->a;
1531 b = if_info->b;
1532 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1533 negate = 0;
1534 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1535 {
1536 c = a; a = b; b = c;
1537 negate = 1;
1538 }
1539 else
1540 return FALSE;
1541
1542 cond = noce_get_alt_condition (if_info, b, &earliest);
1543 if (!cond)
1544 return FALSE;
1545
1546 /* Verify the condition is of the form we expect. */
1547 if (rtx_equal_p (XEXP (cond, 0), b))
1548 c = XEXP (cond, 1);
1549 else if (rtx_equal_p (XEXP (cond, 1), b))
1550 c = XEXP (cond, 0);
1551 else
1552 return FALSE;
1553
1554 /* Verify that C is zero. Search backward through the block for
1555 a REG_EQUAL note if necessary. */
1556 if (REG_P (c))
1557 {
1558 rtx insn, note = NULL;
1559 for (insn = earliest;
1560 insn != if_info->test_bb->head;
1561 insn = PREV_INSN (insn))
1562 if (INSN_P (insn)
1563 && ((note = find_reg_note (insn, REG_EQUAL, c))
1564 || (note = find_reg_note (insn, REG_EQUIV, c))))
1565 break;
1566 if (! note)
1567 return FALSE;
1568 c = XEXP (note, 0);
1569 }
1570 if (GET_CODE (c) == MEM
1571 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1572 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1573 c = get_pool_constant (XEXP (c, 0));
1574
1575 /* Work around funny ideas get_condition has wrt canonicalization.
1576 Note that these rtx constants are known to be CONST_INT, and
1577 therefore imply integer comparisons. */
1578 if (c == constm1_rtx && GET_CODE (cond) == GT)
1579 ;
1580 else if (c == const1_rtx && GET_CODE (cond) == LT)
1581 ;
1582 else if (c != CONST0_RTX (GET_MODE (b)))
1583 return FALSE;
1584
1585 /* Determine what sort of operation this is. */
1586 switch (GET_CODE (cond))
1587 {
1588 case LT:
1589 case LE:
1590 case UNLT:
1591 case UNLE:
1592 negate = !negate;
1593 break;
1594 case GT:
1595 case GE:
1596 case UNGT:
1597 case UNGE:
1598 break;
1599 default:
1600 return FALSE;
1601 }
1602
1603 start_sequence ();
1604
1605 target = expand_simple_unop (GET_MODE (if_info->x), ABS, b, if_info->x, 0);
1606
1607 /* ??? It's a quandry whether cmove would be better here, especially
1608 for integers. Perhaps combine will clean things up. */
1609 if (target && negate)
1610 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1611
1612 if (! target)
1613 {
1614 end_sequence ();
1615 return FALSE;
1616 }
1617
1618 if (target != if_info->x)
1619 noce_emit_move_insn (if_info->x, target);
1620
1621 seq = get_insns ();
1622 end_sequence ();
1623
1624 if (seq_contains_jump (seq))
1625 return FALSE;
1626
1627 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1628 if_info->cond = cond;
1629 if_info->cond_earliest = earliest;
1630
1631 return TRUE;
1632 }
1633
1634 /* Similar to get_condition, only the resulting condition must be
1635 valid at JUMP, instead of at EARLIEST. */
1636
1637 static rtx
1638 noce_get_condition (jump, earliest)
1639 rtx jump;
1640 rtx *earliest;
1641 {
1642 rtx cond, set, tmp, insn;
1643 bool reverse;
1644
1645 if (! any_condjump_p (jump))
1646 return NULL_RTX;
1647
1648 set = pc_set (jump);
1649
1650 /* If this branches to JUMP_LABEL when the condition is false,
1651 reverse the condition. */
1652 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1653 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1654
1655 /* If the condition variable is a register and is MODE_INT, accept it. */
1656
1657 cond = XEXP (SET_SRC (set), 0);
1658 tmp = XEXP (cond, 0);
1659 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1660 {
1661 *earliest = jump;
1662
1663 if (reverse)
1664 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1665 GET_MODE (cond), tmp, XEXP (cond, 1));
1666 return cond;
1667 }
1668
1669 /* Otherwise, fall back on canonicalize_condition to do the dirty
1670 work of manipulating MODE_CC values and COMPARE rtx codes. */
1671
1672 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX);
1673 if (!tmp)
1674 return NULL_RTX;
1675
1676 /* We are going to insert code before JUMP, not before EARLIEST.
1677 We must therefore be certain that the given condition is valid
1678 at JUMP by virtue of not having been modified since. */
1679 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1680 if (INSN_P (insn) && modified_in_p (tmp, insn))
1681 break;
1682 if (insn == jump)
1683 return tmp;
1684
1685 /* The condition was modified. See if we can get a partial result
1686 that doesn't follow all the reversals. Perhaps combine can fold
1687 them together later. */
1688 tmp = XEXP (tmp, 0);
1689 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1690 return NULL_RTX;
1691 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp);
1692 if (!tmp)
1693 return NULL_RTX;
1694
1695 /* For sanity's sake, re-validate the new result. */
1696 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1697 if (INSN_P (insn) && modified_in_p (tmp, insn))
1698 return NULL_RTX;
1699
1700 return tmp;
1701 }
1702
1703 /* Return true if OP is ok for if-then-else processing. */
1704
1705 static int
1706 noce_operand_ok (op)
1707 rtx op;
1708 {
1709 /* We special-case memories, so handle any of them with
1710 no address side effects. */
1711 if (GET_CODE (op) == MEM)
1712 return ! side_effects_p (XEXP (op, 0));
1713
1714 if (side_effects_p (op))
1715 return FALSE;
1716
1717 return ! may_trap_p (op);
1718 }
1719
1720 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1721 without using conditional execution. Return TRUE if we were
1722 successful at converting the block. */
1723
1724 static int
1725 noce_process_if_block (ce_info)
1726 struct ce_if_block * ce_info;
1727 {
1728 basic_block test_bb = ce_info->test_bb; /* test block */
1729 basic_block then_bb = ce_info->then_bb; /* THEN */
1730 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1731 struct noce_if_info if_info;
1732 rtx insn_a, insn_b;
1733 rtx set_a, set_b;
1734 rtx orig_x, x, a, b;
1735 rtx jump, cond;
1736
1737 /* We're looking for patterns of the form
1738
1739 (1) if (...) x = a; else x = b;
1740 (2) x = b; if (...) x = a;
1741 (3) if (...) x = a; // as if with an initial x = x.
1742
1743 The later patterns require jumps to be more expensive.
1744
1745 ??? For future expansion, look for multiple X in such patterns. */
1746
1747 /* If test is comprised of && or || elements, don't handle it unless it is
1748 the special case of && elements without an ELSE block. */
1749 if (ce_info->num_multiple_test_blocks)
1750 {
1751 if (else_bb || ! ce_info->and_and_p)
1752 return FALSE;
1753
1754 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1755 ce_info->num_multiple_test_blocks = 0;
1756 ce_info->num_and_and_blocks = 0;
1757 ce_info->num_or_or_blocks = 0;
1758 }
1759
1760 /* If this is not a standard conditional jump, we can't parse it. */
1761 jump = test_bb->end;
1762 cond = noce_get_condition (jump, &if_info.cond_earliest);
1763 if (! cond)
1764 return FALSE;
1765
1766 /* If the conditional jump is more than just a conditional
1767 jump, then we can not do if-conversion on this block. */
1768 if (! onlyjump_p (jump))
1769 return FALSE;
1770
1771 /* We must be comparing objects whose modes imply the size. */
1772 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1773 return FALSE;
1774
1775 /* Look for one of the potential sets. */
1776 insn_a = first_active_insn (then_bb);
1777 if (! insn_a
1778 || insn_a != last_active_insn (then_bb, FALSE)
1779 || (set_a = single_set (insn_a)) == NULL_RTX)
1780 return FALSE;
1781
1782 x = SET_DEST (set_a);
1783 a = SET_SRC (set_a);
1784
1785 /* Look for the other potential set. Make sure we've got equivalent
1786 destinations. */
1787 /* ??? This is overconservative. Storing to two different mems is
1788 as easy as conditionally computing the address. Storing to a
1789 single mem merely requires a scratch memory to use as one of the
1790 destination addresses; often the memory immediately below the
1791 stack pointer is available for this. */
1792 set_b = NULL_RTX;
1793 if (else_bb)
1794 {
1795 insn_b = first_active_insn (else_bb);
1796 if (! insn_b
1797 || insn_b != last_active_insn (else_bb, FALSE)
1798 || (set_b = single_set (insn_b)) == NULL_RTX
1799 || ! rtx_equal_p (x, SET_DEST (set_b)))
1800 return FALSE;
1801 }
1802 else
1803 {
1804 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1805 if (! insn_b
1806 || GET_CODE (insn_b) != INSN
1807 || (set_b = single_set (insn_b)) == NULL_RTX
1808 || ! rtx_equal_p (x, SET_DEST (set_b))
1809 || reg_overlap_mentioned_p (x, cond)
1810 || reg_overlap_mentioned_p (x, a)
1811 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1812 || modified_between_p (x, if_info.cond_earliest, NEXT_INSN (jump)))
1813 insn_b = set_b = NULL_RTX;
1814 }
1815 b = (set_b ? SET_SRC (set_b) : x);
1816
1817 /* Only operate on register destinations, and even then avoid extending
1818 the lifetime of hard registers on small register class machines. */
1819 orig_x = x;
1820 if (GET_CODE (x) != REG
1821 || (SMALL_REGISTER_CLASSES
1822 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1823 {
1824 if (no_new_pseudos)
1825 return FALSE;
1826 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1827 ? XEXP (x, 0) : x));
1828 }
1829
1830 /* Don't operate on sources that may trap or are volatile. */
1831 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1832 return FALSE;
1833
1834 /* Set up the info block for our subroutines. */
1835 if_info.test_bb = test_bb;
1836 if_info.cond = cond;
1837 if_info.jump = jump;
1838 if_info.insn_a = insn_a;
1839 if_info.insn_b = insn_b;
1840 if_info.x = x;
1841 if_info.a = a;
1842 if_info.b = b;
1843
1844 /* Try optimizations in some approximation of a useful order. */
1845 /* ??? Should first look to see if X is live incoming at all. If it
1846 isn't, we don't need anything but an unconditional set. */
1847
1848 /* Look and see if A and B are really the same. Avoid creating silly
1849 cmove constructs that no one will fix up later. */
1850 if (rtx_equal_p (a, b))
1851 {
1852 /* If we have an INSN_B, we don't have to create any new rtl. Just
1853 move the instruction that we already have. If we don't have an
1854 INSN_B, that means that A == X, and we've got a noop move. In
1855 that case don't do anything and let the code below delete INSN_A. */
1856 if (insn_b && else_bb)
1857 {
1858 rtx note;
1859
1860 if (else_bb && insn_b == else_bb->end)
1861 else_bb->end = PREV_INSN (insn_b);
1862 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
1863
1864 /* If there was a REG_EQUAL note, delete it since it may have been
1865 true due to this insn being after a jump. */
1866 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1867 remove_note (insn_b, note);
1868
1869 insn_b = NULL_RTX;
1870 }
1871 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1872 x must be executed twice. */
1873 else if (insn_b && side_effects_p (orig_x))
1874 return FALSE;
1875
1876 x = orig_x;
1877 goto success;
1878 }
1879
1880 if (noce_try_store_flag (&if_info))
1881 goto success;
1882 if (noce_try_minmax (&if_info))
1883 goto success;
1884 if (noce_try_abs (&if_info))
1885 goto success;
1886 if (HAVE_conditional_move
1887 && noce_try_cmove (&if_info))
1888 goto success;
1889 if (! HAVE_conditional_execution)
1890 {
1891 if (noce_try_store_flag_constants (&if_info))
1892 goto success;
1893 if (noce_try_addcc (&if_info))
1894 goto success;
1895 if (noce_try_store_flag_mask (&if_info))
1896 goto success;
1897 if (HAVE_conditional_move
1898 && noce_try_cmove_arith (&if_info))
1899 goto success;
1900 }
1901
1902 return FALSE;
1903
1904 success:
1905 /* The original sets may now be killed. */
1906 delete_insn (insn_a);
1907
1908 /* Several special cases here: First, we may have reused insn_b above,
1909 in which case insn_b is now NULL. Second, we want to delete insn_b
1910 if it came from the ELSE block, because follows the now correct
1911 write that appears in the TEST block. However, if we got insn_b from
1912 the TEST block, it may in fact be loading data needed for the comparison.
1913 We'll let life_analysis remove the insn if it's really dead. */
1914 if (insn_b && else_bb)
1915 delete_insn (insn_b);
1916
1917 /* The new insns will have been inserted immediately before the jump. We
1918 should be able to remove the jump with impunity, but the condition itself
1919 may have been modified by gcse to be shared across basic blocks. */
1920 delete_insn (jump);
1921
1922 /* If we used a temporary, fix it up now. */
1923 if (orig_x != x)
1924 {
1925 start_sequence ();
1926 noce_emit_move_insn (copy_rtx (orig_x), x);
1927 insn_b = get_insns ();
1928 end_sequence ();
1929
1930 emit_insn_after_scope (insn_b, test_bb->end, INSN_SCOPE (insn_a));
1931 }
1932
1933 /* Merge the blocks! */
1934 merge_if_block (ce_info);
1935
1936 return TRUE;
1937 }
1938 \f
1939 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1940 straight line code. Return true if successful. */
1941
1942 static int
1943 process_if_block (ce_info)
1944 struct ce_if_block * ce_info;
1945 {
1946 if (! reload_completed
1947 && noce_process_if_block (ce_info))
1948 return TRUE;
1949
1950 if (HAVE_conditional_execution && reload_completed)
1951 {
1952 /* If we have && and || tests, try to first handle combining the && and
1953 || tests into the conditional code, and if that fails, go back and
1954 handle it without the && and ||, which at present handles the && case
1955 if there was no ELSE block. */
1956 if (cond_exec_process_if_block (ce_info, TRUE))
1957 return TRUE;
1958
1959 if (ce_info->num_multiple_test_blocks)
1960 {
1961 cancel_changes (0);
1962
1963 if (cond_exec_process_if_block (ce_info, FALSE))
1964 return TRUE;
1965 }
1966 }
1967
1968 return FALSE;
1969 }
1970
1971 /* Merge the blocks and mark for local life update. */
1972
1973 static void
1974 merge_if_block (ce_info)
1975 struct ce_if_block * ce_info;
1976 {
1977 basic_block test_bb = ce_info->test_bb; /* last test block */
1978 basic_block then_bb = ce_info->then_bb; /* THEN */
1979 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1980 basic_block join_bb = ce_info->join_bb; /* join block */
1981 basic_block combo_bb;
1982
1983 /* All block merging is done into the lower block numbers. */
1984
1985 combo_bb = test_bb;
1986
1987 /* Merge any basic blocks to handle && and || subtests. Each of
1988 the blocks are on the fallthru path from the predecessor block. */
1989 if (ce_info->num_multiple_test_blocks > 0)
1990 {
1991 basic_block bb = test_bb;
1992 basic_block last_test_bb = ce_info->last_test_bb;
1993 basic_block fallthru = block_fallthru (bb);
1994
1995 do
1996 {
1997 bb = fallthru;
1998 fallthru = block_fallthru (bb);
1999 if (post_dominators)
2000 delete_from_dominance_info (post_dominators, bb);
2001 merge_blocks_nomove (combo_bb, bb);
2002 num_removed_blocks++;
2003 }
2004 while (bb != last_test_bb);
2005 }
2006
2007 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2008 label, but it might if there were || tests. That label's count should be
2009 zero, and it normally should be removed. */
2010
2011 if (then_bb)
2012 {
2013 if (combo_bb->global_live_at_end)
2014 COPY_REG_SET (combo_bb->global_live_at_end,
2015 then_bb->global_live_at_end);
2016 if (post_dominators)
2017 delete_from_dominance_info (post_dominators, then_bb);
2018 merge_blocks_nomove (combo_bb, then_bb);
2019 num_removed_blocks++;
2020 }
2021
2022 /* The ELSE block, if it existed, had a label. That label count
2023 will almost always be zero, but odd things can happen when labels
2024 get their addresses taken. */
2025 if (else_bb)
2026 {
2027 if (post_dominators)
2028 delete_from_dominance_info (post_dominators, else_bb);
2029 merge_blocks_nomove (combo_bb, else_bb);
2030 num_removed_blocks++;
2031 }
2032
2033 /* If there was no join block reported, that means it was not adjacent
2034 to the others, and so we cannot merge them. */
2035
2036 if (! join_bb)
2037 {
2038 rtx last = combo_bb->end;
2039
2040 /* The outgoing edge for the current COMBO block should already
2041 be correct. Verify this. */
2042 if (combo_bb->succ == NULL_EDGE)
2043 {
2044 if (find_reg_note (last, REG_NORETURN, NULL))
2045 ;
2046 else if (GET_CODE (last) == INSN
2047 && GET_CODE (PATTERN (last)) == TRAP_IF
2048 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2049 ;
2050 else
2051 abort ();
2052 }
2053
2054 /* There should still be something at the end of the THEN or ELSE
2055 blocks taking us to our final destination. */
2056 else if (GET_CODE (last) == JUMP_INSN)
2057 ;
2058 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2059 && GET_CODE (last) == CALL_INSN
2060 && SIBLING_CALL_P (last))
2061 ;
2062 else if ((combo_bb->succ->flags & EDGE_EH)
2063 && can_throw_internal (last))
2064 ;
2065 else
2066 abort ();
2067 }
2068
2069 /* The JOIN block may have had quite a number of other predecessors too.
2070 Since we've already merged the TEST, THEN and ELSE blocks, we should
2071 have only one remaining edge from our if-then-else diamond. If there
2072 is more than one remaining edge, it must come from elsewhere. There
2073 may be zero incoming edges if the THEN block didn't actually join
2074 back up (as with a call to abort). */
2075 else if ((join_bb->pred == NULL
2076 || join_bb->pred->pred_next == NULL)
2077 && join_bb != EXIT_BLOCK_PTR)
2078 {
2079 /* We can merge the JOIN. */
2080 if (combo_bb->global_live_at_end)
2081 COPY_REG_SET (combo_bb->global_live_at_end,
2082 join_bb->global_live_at_end);
2083
2084 if (post_dominators)
2085 delete_from_dominance_info (post_dominators, join_bb);
2086 merge_blocks_nomove (combo_bb, join_bb);
2087 num_removed_blocks++;
2088 }
2089 else
2090 {
2091 /* We cannot merge the JOIN. */
2092
2093 /* The outgoing edge for the current COMBO block should already
2094 be correct. Verify this. */
2095 if (combo_bb->succ->succ_next != NULL_EDGE
2096 || combo_bb->succ->dest != join_bb)
2097 abort ();
2098
2099 /* Remove the jump and cruft from the end of the COMBO block. */
2100 if (join_bb != EXIT_BLOCK_PTR)
2101 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2102 }
2103
2104 num_updated_if_blocks++;
2105 }
2106 \f
2107 /* Find a block ending in a simple IF condition and try to transform it
2108 in some way. When converting a multi-block condition, put the new code
2109 in the first such block and delete the rest. Return a pointer to this
2110 first block if some transformation was done. Return NULL otherwise. */
2111
2112 static basic_block
2113 find_if_header (test_bb, pass)
2114 basic_block test_bb;
2115 int pass;
2116 {
2117 ce_if_block_t ce_info;
2118 edge then_edge;
2119 edge else_edge;
2120
2121 /* The kind of block we're looking for has exactly two successors. */
2122 if ((then_edge = test_bb->succ) == NULL_EDGE
2123 || (else_edge = then_edge->succ_next) == NULL_EDGE
2124 || else_edge->succ_next != NULL_EDGE)
2125 return NULL;
2126
2127 /* Neither edge should be abnormal. */
2128 if ((then_edge->flags & EDGE_COMPLEX)
2129 || (else_edge->flags & EDGE_COMPLEX))
2130 return NULL;
2131
2132 /* The THEN edge is canonically the one that falls through. */
2133 if (then_edge->flags & EDGE_FALLTHRU)
2134 ;
2135 else if (else_edge->flags & EDGE_FALLTHRU)
2136 {
2137 edge e = else_edge;
2138 else_edge = then_edge;
2139 then_edge = e;
2140 }
2141 else
2142 /* Otherwise this must be a multiway branch of some sort. */
2143 return NULL;
2144
2145 memset ((PTR) &ce_info, '\0', sizeof (ce_info));
2146 ce_info.test_bb = test_bb;
2147 ce_info.then_bb = then_edge->dest;
2148 ce_info.else_bb = else_edge->dest;
2149 ce_info.pass = pass;
2150
2151 #ifdef IFCVT_INIT_EXTRA_FIELDS
2152 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2153 #endif
2154
2155 if (find_if_block (&ce_info))
2156 goto success;
2157
2158 if (HAVE_trap && HAVE_conditional_trap
2159 && find_cond_trap (test_bb, then_edge, else_edge))
2160 goto success;
2161
2162 if (post_dominators
2163 && (! HAVE_conditional_execution || reload_completed))
2164 {
2165 if (find_if_case_1 (test_bb, then_edge, else_edge))
2166 goto success;
2167 if (find_if_case_2 (test_bb, then_edge, else_edge))
2168 goto success;
2169 }
2170
2171 return NULL;
2172
2173 success:
2174 if (rtl_dump_file)
2175 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2176 return ce_info.test_bb;
2177 }
2178
2179 /* Return true if a block has two edges, one of which falls through to the next
2180 block, and the other jumps to a specific block, so that we can tell if the
2181 block is part of an && test or an || test. Returns either -1 or the number
2182 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2183
2184 static int
2185 block_jumps_and_fallthru_p (cur_bb, target_bb)
2186 basic_block cur_bb;
2187 basic_block target_bb;
2188 {
2189 edge cur_edge;
2190 int fallthru_p = FALSE;
2191 int jump_p = FALSE;
2192 rtx insn;
2193 rtx end;
2194 int n_insns = 0;
2195
2196 if (!cur_bb || !target_bb)
2197 return -1;
2198
2199 /* If no edges, obviously it doesn't jump or fallthru. */
2200 if (cur_bb->succ == NULL_EDGE)
2201 return FALSE;
2202
2203 for (cur_edge = cur_bb->succ;
2204 cur_edge != NULL_EDGE;
2205 cur_edge = cur_edge->succ_next)
2206 {
2207 if (cur_edge->flags & EDGE_COMPLEX)
2208 /* Anything complex isn't what we want. */
2209 return -1;
2210
2211 else if (cur_edge->flags & EDGE_FALLTHRU)
2212 fallthru_p = TRUE;
2213
2214 else if (cur_edge->dest == target_bb)
2215 jump_p = TRUE;
2216
2217 else
2218 return -1;
2219 }
2220
2221 if ((jump_p & fallthru_p) == 0)
2222 return -1;
2223
2224 /* Don't allow calls in the block, since this is used to group && and ||
2225 together for conditional execution support. ??? we should support
2226 conditional execution support across calls for IA-64 some day, but
2227 for now it makes the code simpler. */
2228 end = cur_bb->end;
2229 insn = cur_bb->head;
2230
2231 while (insn != NULL_RTX)
2232 {
2233 if (GET_CODE (insn) == CALL_INSN)
2234 return -1;
2235
2236 if (INSN_P (insn)
2237 && GET_CODE (insn) != JUMP_INSN
2238 && GET_CODE (PATTERN (insn)) != USE
2239 && GET_CODE (PATTERN (insn)) != CLOBBER)
2240 n_insns++;
2241
2242 if (insn == end)
2243 break;
2244
2245 insn = NEXT_INSN (insn);
2246 }
2247
2248 return n_insns;
2249 }
2250
2251 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2252 block. If so, we'll try to convert the insns to not require the branch.
2253 Return TRUE if we were successful at converting the block. */
2254
2255 static int
2256 find_if_block (ce_info)
2257 struct ce_if_block * ce_info;
2258 {
2259 basic_block test_bb = ce_info->test_bb;
2260 basic_block then_bb = ce_info->then_bb;
2261 basic_block else_bb = ce_info->else_bb;
2262 basic_block join_bb = NULL_BLOCK;
2263 edge then_succ = then_bb->succ;
2264 edge else_succ = else_bb->succ;
2265 int then_predecessors;
2266 int else_predecessors;
2267 edge cur_edge;
2268 basic_block next;
2269
2270 ce_info->last_test_bb = test_bb;
2271
2272 /* Discover if any fall through predecessors of the current test basic block
2273 were && tests (which jump to the else block) or || tests (which jump to
2274 the then block). */
2275 if (HAVE_conditional_execution && reload_completed
2276 && test_bb->pred != NULL_EDGE
2277 && test_bb->pred->pred_next == NULL_EDGE
2278 && test_bb->pred->flags == EDGE_FALLTHRU)
2279 {
2280 basic_block bb = test_bb->pred->src;
2281 basic_block target_bb;
2282 int max_insns = MAX_CONDITIONAL_EXECUTE;
2283 int n_insns;
2284
2285 /* Determine if the preceding block is an && or || block. */
2286 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2287 {
2288 ce_info->and_and_p = TRUE;
2289 target_bb = else_bb;
2290 }
2291 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2292 {
2293 ce_info->and_and_p = FALSE;
2294 target_bb = then_bb;
2295 }
2296 else
2297 target_bb = NULL_BLOCK;
2298
2299 if (target_bb && n_insns <= max_insns)
2300 {
2301 int total_insns = 0;
2302 int blocks = 0;
2303
2304 ce_info->last_test_bb = test_bb;
2305
2306 /* Found at least one && or || block, look for more. */
2307 do
2308 {
2309 ce_info->test_bb = test_bb = bb;
2310 total_insns += n_insns;
2311 blocks++;
2312
2313 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2314 break;
2315
2316 bb = bb->pred->src;
2317 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2318 }
2319 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2320
2321 ce_info->num_multiple_test_blocks = blocks;
2322 ce_info->num_multiple_test_insns = total_insns;
2323
2324 if (ce_info->and_and_p)
2325 ce_info->num_and_and_blocks = blocks;
2326 else
2327 ce_info->num_or_or_blocks = blocks;
2328 }
2329 }
2330
2331 /* Count the number of edges the THEN and ELSE blocks have. */
2332 then_predecessors = 0;
2333 for (cur_edge = then_bb->pred;
2334 cur_edge != NULL_EDGE;
2335 cur_edge = cur_edge->pred_next)
2336 {
2337 then_predecessors++;
2338 if (cur_edge->flags & EDGE_COMPLEX)
2339 return FALSE;
2340 }
2341
2342 else_predecessors = 0;
2343 for (cur_edge = else_bb->pred;
2344 cur_edge != NULL_EDGE;
2345 cur_edge = cur_edge->pred_next)
2346 {
2347 else_predecessors++;
2348 if (cur_edge->flags & EDGE_COMPLEX)
2349 return FALSE;
2350 }
2351
2352 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2353 other than any || blocks which jump to the THEN block. */
2354 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2355 return FALSE;
2356
2357 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2358 if (then_succ != NULL_EDGE
2359 && (then_succ->succ_next != NULL_EDGE
2360 || (then_succ->flags & EDGE_COMPLEX)))
2361 return FALSE;
2362
2363 /* If the THEN block has no successors, conditional execution can still
2364 make a conditional call. Don't do this unless the ELSE block has
2365 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2366 Check for the last insn of the THEN block being an indirect jump, which
2367 is listed as not having any successors, but confuses the rest of the CE
2368 code processing. ??? we should fix this in the future. */
2369 if (then_succ == NULL)
2370 {
2371 if (else_bb->pred->pred_next == NULL_EDGE)
2372 {
2373 rtx last_insn = then_bb->end;
2374
2375 while (last_insn
2376 && GET_CODE (last_insn) == NOTE
2377 && last_insn != then_bb->head)
2378 last_insn = PREV_INSN (last_insn);
2379
2380 if (last_insn
2381 && GET_CODE (last_insn) == JUMP_INSN
2382 && ! simplejump_p (last_insn))
2383 return FALSE;
2384
2385 join_bb = else_bb;
2386 else_bb = NULL_BLOCK;
2387 }
2388 else
2389 return FALSE;
2390 }
2391
2392 /* If the THEN block's successor is the other edge out of the TEST block,
2393 then we have an IF-THEN combo without an ELSE. */
2394 else if (then_succ->dest == else_bb)
2395 {
2396 join_bb = else_bb;
2397 else_bb = NULL_BLOCK;
2398 }
2399
2400 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2401 has exactly one predecessor and one successor, and the outgoing edge
2402 is not complex, then we have an IF-THEN-ELSE combo. */
2403 else if (else_succ != NULL_EDGE
2404 && then_succ->dest == else_succ->dest
2405 && else_bb->pred->pred_next == NULL_EDGE
2406 && else_succ->succ_next == NULL_EDGE
2407 && ! (else_succ->flags & EDGE_COMPLEX))
2408 join_bb = else_succ->dest;
2409
2410 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2411 else
2412 return FALSE;
2413
2414 num_possible_if_blocks++;
2415
2416 if (rtl_dump_file)
2417 {
2418 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2419 (else_bb) ? "-ELSE" : "",
2420 ce_info->pass,
2421 test_bb->index, (test_bb->head) ? (int)INSN_UID (test_bb->head) : -1,
2422 then_bb->index, (then_bb->head) ? (int)INSN_UID (then_bb->head) : -1);
2423
2424 if (else_bb)
2425 fprintf (rtl_dump_file, ", else %d [%d]",
2426 else_bb->index, (else_bb->head) ? (int)INSN_UID (else_bb->head) : -1);
2427
2428 fprintf (rtl_dump_file, ", join %d [%d]",
2429 join_bb->index, (join_bb->head) ? (int)INSN_UID (join_bb->head) : -1);
2430
2431 if (ce_info->num_multiple_test_blocks > 0)
2432 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2433 ce_info->num_multiple_test_blocks,
2434 (ce_info->and_and_p) ? "&&" : "||",
2435 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2436 ce_info->last_test_bb->index,
2437 ((ce_info->last_test_bb->head)
2438 ? (int)INSN_UID (ce_info->last_test_bb->head)
2439 : -1));
2440
2441 fputc ('\n', rtl_dump_file);
2442 }
2443
2444 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2445 first condition for free, since we've already asserted that there's a
2446 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2447 we checked the FALLTHRU flag, those are already adjacent to the last IF
2448 block. */
2449 /* ??? As an enhancement, move the ELSE block. Have to deal with
2450 BLOCK notes, if by no other means than aborting the merge if they
2451 exist. Sticky enough I don't want to think about it now. */
2452 next = then_bb;
2453 if (else_bb && (next = next->next_bb) != else_bb)
2454 return FALSE;
2455 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2456 {
2457 if (else_bb)
2458 join_bb = NULL;
2459 else
2460 return FALSE;
2461 }
2462
2463 /* Do the real work. */
2464 ce_info->else_bb = else_bb;
2465 ce_info->join_bb = join_bb;
2466
2467 return process_if_block (ce_info);
2468 }
2469
2470 /* Convert a branch over a trap, or a branch
2471 to a trap, into a conditional trap. */
2472
2473 static int
2474 find_cond_trap (test_bb, then_edge, else_edge)
2475 basic_block test_bb;
2476 edge then_edge, else_edge;
2477 {
2478 basic_block then_bb = then_edge->dest;
2479 basic_block else_bb = else_edge->dest;
2480 basic_block other_bb, trap_bb;
2481 rtx trap, jump, cond, cond_earliest, seq;
2482 enum rtx_code code;
2483
2484 /* Locate the block with the trap instruction. */
2485 /* ??? While we look for no successors, we really ought to allow
2486 EH successors. Need to fix merge_if_block for that to work. */
2487 if ((trap = block_has_only_trap (then_bb)) != NULL)
2488 trap_bb = then_bb, other_bb = else_bb;
2489 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2490 trap_bb = else_bb, other_bb = then_bb;
2491 else
2492 return FALSE;
2493
2494 if (rtl_dump_file)
2495 {
2496 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2497 test_bb->index, trap_bb->index);
2498 }
2499
2500 /* If this is not a standard conditional jump, we can't parse it. */
2501 jump = test_bb->end;
2502 cond = noce_get_condition (jump, &cond_earliest);
2503 if (! cond)
2504 return FALSE;
2505
2506 /* If the conditional jump is more than just a conditional jump, then
2507 we can not do if-conversion on this block. */
2508 if (! onlyjump_p (jump))
2509 return FALSE;
2510
2511 /* We must be comparing objects whose modes imply the size. */
2512 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2513 return FALSE;
2514
2515 /* Reverse the comparison code, if necessary. */
2516 code = GET_CODE (cond);
2517 if (then_bb == trap_bb)
2518 {
2519 code = reversed_comparison_code (cond, jump);
2520 if (code == UNKNOWN)
2521 return FALSE;
2522 }
2523
2524 /* Attempt to generate the conditional trap. */
2525 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2526 TRAP_CODE (PATTERN (trap)));
2527 if (seq == NULL)
2528 return FALSE;
2529
2530 /* Emit the new insns before cond_earliest. */
2531 emit_insn_before_scope (seq, cond_earliest, INSN_SCOPE (trap));
2532
2533 /* Delete the trap block if possible. */
2534 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2535 if (trap_bb->pred == NULL)
2536 {
2537 if (post_dominators)
2538 delete_from_dominance_info (post_dominators, trap_bb);
2539 flow_delete_block (trap_bb);
2540 num_removed_blocks++;
2541 }
2542
2543 /* If the non-trap block and the test are now adjacent, merge them.
2544 Otherwise we must insert a direct branch. */
2545 if (test_bb->next_bb == other_bb)
2546 {
2547 struct ce_if_block new_ce_info;
2548 delete_insn (jump);
2549 memset ((PTR) &new_ce_info, '\0', sizeof (new_ce_info));
2550 new_ce_info.test_bb = test_bb;
2551 new_ce_info.then_bb = NULL;
2552 new_ce_info.else_bb = NULL;
2553 new_ce_info.join_bb = other_bb;
2554 merge_if_block (&new_ce_info);
2555 }
2556 else
2557 {
2558 rtx lab, newjump;
2559
2560 lab = JUMP_LABEL (jump);
2561 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2562 LABEL_NUSES (lab) += 1;
2563 JUMP_LABEL (newjump) = lab;
2564 emit_barrier_after (newjump);
2565
2566 delete_insn (jump);
2567 }
2568
2569 return TRUE;
2570 }
2571
2572 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2573 return it. */
2574
2575 static rtx
2576 block_has_only_trap (bb)
2577 basic_block bb;
2578 {
2579 rtx trap;
2580
2581 /* We're not the exit block. */
2582 if (bb == EXIT_BLOCK_PTR)
2583 return NULL_RTX;
2584
2585 /* The block must have no successors. */
2586 if (bb->succ)
2587 return NULL_RTX;
2588
2589 /* The only instruction in the THEN block must be the trap. */
2590 trap = first_active_insn (bb);
2591 if (! (trap == bb->end
2592 && GET_CODE (PATTERN (trap)) == TRAP_IF
2593 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2594 return NULL_RTX;
2595
2596 return trap;
2597 }
2598
2599 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2600 transformable, but not necessarily the other. There need be no
2601 JOIN block.
2602
2603 Return TRUE if we were successful at converting the block.
2604
2605 Cases we'd like to look at:
2606
2607 (1)
2608 if (test) goto over; // x not live
2609 x = a;
2610 goto label;
2611 over:
2612
2613 becomes
2614
2615 x = a;
2616 if (! test) goto label;
2617
2618 (2)
2619 if (test) goto E; // x not live
2620 x = big();
2621 goto L;
2622 E:
2623 x = b;
2624 goto M;
2625
2626 becomes
2627
2628 x = b;
2629 if (test) goto M;
2630 x = big();
2631 goto L;
2632
2633 (3) // This one's really only interesting for targets that can do
2634 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2635 // it results in multiple branches on a cache line, which often
2636 // does not sit well with predictors.
2637
2638 if (test1) goto E; // predicted not taken
2639 x = a;
2640 if (test2) goto F;
2641 ...
2642 E:
2643 x = b;
2644 J:
2645
2646 becomes
2647
2648 x = a;
2649 if (test1) goto E;
2650 if (test2) goto F;
2651
2652 Notes:
2653
2654 (A) Don't do (2) if the branch is predicted against the block we're
2655 eliminating. Do it anyway if we can eliminate a branch; this requires
2656 that the sole successor of the eliminated block postdominate the other
2657 side of the if.
2658
2659 (B) With CE, on (3) we can steal from both sides of the if, creating
2660
2661 if (test1) x = a;
2662 if (!test1) x = b;
2663 if (test1) goto J;
2664 if (test2) goto F;
2665 ...
2666 J:
2667
2668 Again, this is most useful if J postdominates.
2669
2670 (C) CE substitutes for helpful life information.
2671
2672 (D) These heuristics need a lot of work. */
2673
2674 /* Tests for case 1 above. */
2675
2676 static int
2677 find_if_case_1 (test_bb, then_edge, else_edge)
2678 basic_block test_bb;
2679 edge then_edge, else_edge;
2680 {
2681 basic_block then_bb = then_edge->dest;
2682 basic_block else_bb = else_edge->dest, new_bb;
2683 edge then_succ = then_bb->succ;
2684 int then_bb_index;
2685
2686 /* THEN has one successor. */
2687 if (!then_succ || then_succ->succ_next != NULL)
2688 return FALSE;
2689
2690 /* THEN does not fall through, but is not strange either. */
2691 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2692 return FALSE;
2693
2694 /* THEN has one predecessor. */
2695 if (then_bb->pred->pred_next != NULL)
2696 return FALSE;
2697
2698 /* THEN must do something. */
2699 if (forwarder_block_p (then_bb))
2700 return FALSE;
2701
2702 num_possible_if_blocks++;
2703 if (rtl_dump_file)
2704 fprintf (rtl_dump_file,
2705 "\nIF-CASE-1 found, start %d, then %d\n",
2706 test_bb->index, then_bb->index);
2707
2708 /* THEN is small. */
2709 if (count_bb_insns (then_bb) > BRANCH_COST)
2710 return FALSE;
2711
2712 /* Registers set are dead, or are predicable. */
2713 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2714 then_bb->succ->dest, 1))
2715 return FALSE;
2716
2717 /* Conversion went ok, including moving the insns and fixing up the
2718 jump. Adjust the CFG to match. */
2719
2720 bitmap_operation (test_bb->global_live_at_end,
2721 else_bb->global_live_at_start,
2722 then_bb->global_live_at_end, BITMAP_IOR);
2723
2724 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2725 then_bb_index = then_bb->index;
2726 if (post_dominators)
2727 delete_from_dominance_info (post_dominators, then_bb);
2728 flow_delete_block (then_bb);
2729
2730 /* Make rest of code believe that the newly created block is the THEN_BB
2731 block we removed. */
2732 if (new_bb)
2733 {
2734 new_bb->index = then_bb_index;
2735 BASIC_BLOCK (then_bb_index) = new_bb;
2736 }
2737 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2738 later. */
2739
2740 num_removed_blocks++;
2741 num_updated_if_blocks++;
2742
2743 return TRUE;
2744 }
2745
2746 /* Test for case 2 above. */
2747
2748 static int
2749 find_if_case_2 (test_bb, then_edge, else_edge)
2750 basic_block test_bb;
2751 edge then_edge, else_edge;
2752 {
2753 basic_block then_bb = then_edge->dest;
2754 basic_block else_bb = else_edge->dest;
2755 edge else_succ = else_bb->succ;
2756 rtx note;
2757
2758 /* ELSE has one successor. */
2759 if (!else_succ || else_succ->succ_next != NULL)
2760 return FALSE;
2761
2762 /* ELSE outgoing edge is not complex. */
2763 if (else_succ->flags & EDGE_COMPLEX)
2764 return FALSE;
2765
2766 /* ELSE has one predecessor. */
2767 if (else_bb->pred->pred_next != NULL)
2768 return FALSE;
2769
2770 /* THEN is not EXIT. */
2771 if (then_bb->index < 0)
2772 return FALSE;
2773
2774 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2775 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2776 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2777 ;
2778 else if (else_succ->dest->index < 0
2779 || dominated_by_p (post_dominators, then_bb,
2780 else_succ->dest))
2781 ;
2782 else
2783 return FALSE;
2784
2785 num_possible_if_blocks++;
2786 if (rtl_dump_file)
2787 fprintf (rtl_dump_file,
2788 "\nIF-CASE-2 found, start %d, else %d\n",
2789 test_bb->index, else_bb->index);
2790
2791 /* ELSE is small. */
2792 if (count_bb_insns (else_bb) > BRANCH_COST)
2793 return FALSE;
2794
2795 /* Registers set are dead, or are predicable. */
2796 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2797 return FALSE;
2798
2799 /* Conversion went ok, including moving the insns and fixing up the
2800 jump. Adjust the CFG to match. */
2801
2802 bitmap_operation (test_bb->global_live_at_end,
2803 then_bb->global_live_at_start,
2804 else_bb->global_live_at_end, BITMAP_IOR);
2805
2806 if (post_dominators)
2807 delete_from_dominance_info (post_dominators, else_bb);
2808 flow_delete_block (else_bb);
2809
2810 num_removed_blocks++;
2811 num_updated_if_blocks++;
2812
2813 /* ??? We may now fallthru from one of THEN's successors into a join
2814 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2815
2816 return TRUE;
2817 }
2818
2819 /* A subroutine of dead_or_predicable called through for_each_rtx.
2820 Return 1 if a memory is found. */
2821
2822 static int
2823 find_memory (px, data)
2824 rtx *px;
2825 void *data ATTRIBUTE_UNUSED;
2826 {
2827 return GET_CODE (*px) == MEM;
2828 }
2829
2830 /* Used by the code above to perform the actual rtl transformations.
2831 Return TRUE if successful.
2832
2833 TEST_BB is the block containing the conditional branch. MERGE_BB
2834 is the block containing the code to manipulate. NEW_DEST is the
2835 label TEST_BB should be branching to after the conversion.
2836 REVERSEP is true if the sense of the branch should be reversed. */
2837
2838 static int
2839 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2840 basic_block test_bb, merge_bb, other_bb;
2841 basic_block new_dest;
2842 int reversep;
2843 {
2844 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2845
2846 jump = test_bb->end;
2847
2848 /* Find the extent of the real code in the merge block. */
2849 head = merge_bb->head;
2850 end = merge_bb->end;
2851
2852 if (GET_CODE (head) == CODE_LABEL)
2853 head = NEXT_INSN (head);
2854 if (GET_CODE (head) == NOTE)
2855 {
2856 if (head == end)
2857 {
2858 head = end = NULL_RTX;
2859 goto no_body;
2860 }
2861 head = NEXT_INSN (head);
2862 }
2863
2864 if (GET_CODE (end) == JUMP_INSN)
2865 {
2866 if (head == end)
2867 {
2868 head = end = NULL_RTX;
2869 goto no_body;
2870 }
2871 end = PREV_INSN (end);
2872 }
2873
2874 /* Disable handling dead code by conditional execution if the machine needs
2875 to do anything funny with the tests, etc. */
2876 #ifndef IFCVT_MODIFY_TESTS
2877 if (HAVE_conditional_execution)
2878 {
2879 /* In the conditional execution case, we have things easy. We know
2880 the condition is reversible. We don't have to check life info,
2881 becase we're going to conditionally execute the code anyway.
2882 All that's left is making sure the insns involved can actually
2883 be predicated. */
2884
2885 rtx cond, prob_val;
2886
2887 cond = cond_exec_get_condition (jump);
2888 if (! cond)
2889 return FALSE;
2890
2891 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2892 if (prob_val)
2893 prob_val = XEXP (prob_val, 0);
2894
2895 if (reversep)
2896 {
2897 enum rtx_code rev = reversed_comparison_code (cond, jump);
2898 if (rev == UNKNOWN)
2899 return FALSE;
2900 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2901 XEXP (cond, 1));
2902 if (prob_val)
2903 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2904 }
2905
2906 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
2907 prob_val, 0))
2908 goto cancel;
2909
2910 earliest = jump;
2911 }
2912 else
2913 #endif
2914 {
2915 /* In the non-conditional execution case, we have to verify that there
2916 are no trapping operations, no calls, no references to memory, and
2917 that any registers modified are dead at the branch site. */
2918
2919 rtx insn, cond, prev;
2920 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2921 regset merge_set, tmp, test_live, test_set;
2922 struct propagate_block_info *pbi;
2923 int i, fail = 0;
2924
2925 /* Check for no calls or trapping operations. */
2926 for (insn = head; ; insn = NEXT_INSN (insn))
2927 {
2928 if (GET_CODE (insn) == CALL_INSN)
2929 return FALSE;
2930 if (INSN_P (insn))
2931 {
2932 if (may_trap_p (PATTERN (insn)))
2933 return FALSE;
2934
2935 /* ??? Even non-trapping memories such as stack frame
2936 references must be avoided. For stores, we collect
2937 no lifetime info; for reads, we'd have to assert
2938 true_dependence false against every store in the
2939 TEST range. */
2940 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2941 return FALSE;
2942 }
2943 if (insn == end)
2944 break;
2945 }
2946
2947 if (! any_condjump_p (jump))
2948 return FALSE;
2949
2950 /* Find the extent of the conditional. */
2951 cond = noce_get_condition (jump, &earliest);
2952 if (! cond)
2953 return FALSE;
2954
2955 /* Collect:
2956 MERGE_SET = set of registers set in MERGE_BB
2957 TEST_LIVE = set of registers live at EARLIEST
2958 TEST_SET = set of registers set between EARLIEST and the
2959 end of the block. */
2960
2961 tmp = INITIALIZE_REG_SET (tmp_head);
2962 merge_set = INITIALIZE_REG_SET (merge_set_head);
2963 test_live = INITIALIZE_REG_SET (test_live_head);
2964 test_set = INITIALIZE_REG_SET (test_set_head);
2965
2966 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2967 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2968 since we've already asserted that MERGE_BB is small. */
2969 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2970
2971 /* For small register class machines, don't lengthen lifetimes of
2972 hard registers before reload. */
2973 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2974 {
2975 EXECUTE_IF_SET_IN_BITMAP
2976 (merge_set, 0, i,
2977 {
2978 if (i < FIRST_PSEUDO_REGISTER
2979 && ! fixed_regs[i]
2980 && ! global_regs[i])
2981 fail = 1;
2982 });
2983 }
2984
2985 /* For TEST, we're interested in a range of insns, not a whole block.
2986 Moreover, we're interested in the insns live from OTHER_BB. */
2987
2988 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2989 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2990 0);
2991
2992 for (insn = jump; ; insn = prev)
2993 {
2994 prev = propagate_one_insn (pbi, insn);
2995 if (insn == earliest)
2996 break;
2997 }
2998
2999 free_propagate_block_info (pbi);
3000
3001 /* We can perform the transformation if
3002 MERGE_SET & (TEST_SET | TEST_LIVE)
3003 and
3004 TEST_SET & merge_bb->global_live_at_start
3005 are empty. */
3006
3007 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3008 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3009 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3010
3011 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3012 BITMAP_AND);
3013 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3014
3015 FREE_REG_SET (tmp);
3016 FREE_REG_SET (merge_set);
3017 FREE_REG_SET (test_live);
3018 FREE_REG_SET (test_set);
3019
3020 if (fail)
3021 return FALSE;
3022 }
3023
3024 no_body:
3025 /* We don't want to use normal invert_jump or redirect_jump because
3026 we don't want to delete_insn called. Also, we want to do our own
3027 change group management. */
3028
3029 old_dest = JUMP_LABEL (jump);
3030 if (other_bb != new_dest)
3031 {
3032 new_label = block_label (new_dest);
3033 if (reversep
3034 ? ! invert_jump_1 (jump, new_label)
3035 : ! redirect_jump_1 (jump, new_label))
3036 goto cancel;
3037 }
3038
3039 if (! apply_change_group ())
3040 return FALSE;
3041
3042 if (other_bb != new_dest)
3043 {
3044 if (old_dest)
3045 LABEL_NUSES (old_dest) -= 1;
3046 if (new_label)
3047 LABEL_NUSES (new_label) += 1;
3048 JUMP_LABEL (jump) = new_label;
3049 if (reversep)
3050 invert_br_probabilities (jump);
3051
3052 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3053 if (reversep)
3054 {
3055 gcov_type count, probability;
3056 count = BRANCH_EDGE (test_bb)->count;
3057 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3058 FALLTHRU_EDGE (test_bb)->count = count;
3059 probability = BRANCH_EDGE (test_bb)->probability;
3060 BRANCH_EDGE (test_bb)->probability
3061 = FALLTHRU_EDGE (test_bb)->probability;
3062 FALLTHRU_EDGE (test_bb)->probability = probability;
3063 update_br_prob_note (test_bb);
3064 }
3065 }
3066
3067 /* Move the insns out of MERGE_BB to before the branch. */
3068 if (head != NULL)
3069 {
3070 if (end == merge_bb->end)
3071 merge_bb->end = PREV_INSN (head);
3072
3073 if (squeeze_notes (&head, &end))
3074 return TRUE;
3075
3076 reorder_insns (head, end, PREV_INSN (earliest));
3077 }
3078
3079 /* Remove the jump and edge if we can. */
3080 if (other_bb == new_dest)
3081 {
3082 delete_insn (jump);
3083 remove_edge (BRANCH_EDGE (test_bb));
3084 /* ??? Can't merge blocks here, as then_bb is still in use.
3085 At minimum, the merge will get done just before bb-reorder. */
3086 }
3087
3088 return TRUE;
3089
3090 cancel:
3091 cancel_changes (0);
3092 return FALSE;
3093 }
3094 \f
3095 /* Main entry point for all if-conversion. */
3096
3097 void
3098 if_convert (x_life_data_ok)
3099 int x_life_data_ok;
3100 {
3101 basic_block bb;
3102 int pass;
3103
3104 num_possible_if_blocks = 0;
3105 num_updated_if_blocks = 0;
3106 num_removed_blocks = 0;
3107 life_data_ok = (x_life_data_ok != 0);
3108
3109 /* Free up basic_block_for_insn so that we don't have to keep it
3110 up to date, either here or in merge_blocks_nomove. */
3111 free_basic_block_vars (1);
3112
3113 /* Compute postdominators if we think we'll use them. */
3114 post_dominators = NULL;
3115 if (HAVE_conditional_execution || life_data_ok)
3116 {
3117 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
3118 }
3119 if (life_data_ok)
3120 clear_bb_flags ();
3121
3122 /* Go through each of the basic blocks looking for things to convert. If we
3123 have conditional execution, we make multiple passes to allow us to handle
3124 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3125 pass = 0;
3126 do
3127 {
3128 cond_exec_changed_p = FALSE;
3129 pass++;
3130
3131 #ifdef IFCVT_MULTIPLE_DUMPS
3132 if (rtl_dump_file && pass > 1)
3133 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3134 #endif
3135
3136 FOR_EACH_BB (bb)
3137 {
3138 basic_block new_bb;
3139 while ((new_bb = find_if_header (bb, pass)))
3140 bb = new_bb;
3141 }
3142
3143 #ifdef IFCVT_MULTIPLE_DUMPS
3144 if (rtl_dump_file && cond_exec_changed_p)
3145 print_rtl_with_bb (rtl_dump_file, get_insns ());
3146 #endif
3147 }
3148 while (cond_exec_changed_p);
3149
3150 #ifdef IFCVT_MULTIPLE_DUMPS
3151 if (rtl_dump_file)
3152 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3153 #endif
3154
3155 if (post_dominators)
3156 free_dominance_info (post_dominators);
3157
3158 if (rtl_dump_file)
3159 fflush (rtl_dump_file);
3160
3161 clear_aux_for_blocks ();
3162
3163 /* Rebuild life info for basic blocks that require it. */
3164 if (num_removed_blocks && life_data_ok)
3165 {
3166 /* If we allocated new pseudos, we must resize the array for sched1. */
3167 if (max_regno < max_reg_num ())
3168 {
3169 max_regno = max_reg_num ();
3170 allocate_reg_info (max_regno, FALSE, FALSE);
3171 }
3172 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3173 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3174 | PROP_KILL_DEAD_CODE);
3175 }
3176
3177 /* Write the final stats. */
3178 if (rtl_dump_file && num_possible_if_blocks > 0)
3179 {
3180 fprintf (rtl_dump_file,
3181 "\n%d possible IF blocks searched.\n",
3182 num_possible_if_blocks);
3183 fprintf (rtl_dump_file,
3184 "%d IF blocks converted.\n",
3185 num_updated_if_blocks);
3186 fprintf (rtl_dump_file,
3187 "%d basic blocks deleted.\n\n\n",
3188 num_removed_blocks);
3189 }
3190
3191 #ifdef ENABLE_CHECKING
3192 verify_flow_info ();
3193 #endif
3194 }