cppcharset.c (one_iso88591_to_utf8): New function.
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
43
44
45 #ifndef HAVE_conditional_execution
46 #define HAVE_conditional_execution 0
47 #endif
48 #ifndef HAVE_conditional_move
49 #define HAVE_conditional_move 0
50 #endif
51 #ifndef HAVE_incscc
52 #define HAVE_incscc 0
53 #endif
54 #ifndef HAVE_decscc
55 #define HAVE_decscc 0
56 #endif
57 #ifndef HAVE_trap
58 #define HAVE_trap 0
59 #endif
60 #ifndef HAVE_conditional_trap
61 #define HAVE_conditional_trap 0
62 #endif
63
64 #ifndef MAX_CONDITIONAL_EXECUTE
65 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
66 #endif
67
68 #define NULL_EDGE ((struct edge_def *)NULL)
69 #define NULL_BLOCK ((struct basic_block_def *)NULL)
70
71 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
72 static int num_possible_if_blocks;
73
74 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
75 execution. */
76 static int num_updated_if_blocks;
77
78 /* # of changes made which require life information to be updated. */
79 static int num_true_changes;
80
81 /* Whether conditional execution changes were made. */
82 static int cond_exec_changed_p;
83
84 /* True if life data ok at present. */
85 static bool life_data_ok;
86
87 /* Forward references. */
88 static int count_bb_insns (basic_block);
89 static rtx first_active_insn (basic_block);
90 static rtx last_active_insn (basic_block, int);
91 static int seq_contains_jump (rtx);
92 static basic_block block_fallthru (basic_block);
93 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
94 static rtx cond_exec_get_condition (rtx);
95 static int cond_exec_process_if_block (ce_if_block_t *, int);
96 static rtx noce_get_condition (rtx, rtx *);
97 static int noce_operand_ok (rtx);
98 static int noce_process_if_block (ce_if_block_t *);
99 static int process_if_block (ce_if_block_t *);
100 static void merge_if_block (ce_if_block_t *);
101 static int find_cond_trap (basic_block, edge, edge);
102 static basic_block find_if_header (basic_block, int);
103 static int block_jumps_and_fallthru_p (basic_block, basic_block);
104 static int find_if_block (ce_if_block_t *);
105 static int find_if_case_1 (basic_block, edge, edge);
106 static int find_if_case_2 (basic_block, edge, edge);
107 static int find_memory (rtx *, void *);
108 static int dead_or_predicable (basic_block, basic_block, basic_block,
109 basic_block, int);
110 static void noce_emit_move_insn (rtx, rtx);
111 static rtx block_has_only_trap (basic_block);
112 static void mark_loop_exit_edges (void);
113 \f
114 /* Sets EDGE_LOOP_EXIT flag for all loop exits. */
115 static void
116 mark_loop_exit_edges (void)
117 {
118 struct loops loops;
119 basic_block bb;
120 edge e;
121
122 flow_loops_find (&loops, LOOP_TREE);
123 free_dominance_info (CDI_DOMINATORS);
124
125 if (loops.num > 1)
126 {
127 FOR_EACH_BB (bb)
128 {
129 for (e = bb->succ; e; e = e->succ_next)
130 {
131 if (find_common_loop (bb->loop_father, e->dest->loop_father)
132 != bb->loop_father)
133 e->flags |= EDGE_LOOP_EXIT;
134 else
135 e->flags &= ~EDGE_LOOP_EXIT;
136 }
137 }
138 }
139
140 flow_loops_free (&loops);
141 }
142
143 /* Count the number of non-jump active insns in BB. */
144
145 static int
146 count_bb_insns (basic_block bb)
147 {
148 int count = 0;
149 rtx insn = BB_HEAD (bb);
150
151 while (1)
152 {
153 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
154 count++;
155
156 if (insn == BB_END (bb))
157 break;
158 insn = NEXT_INSN (insn);
159 }
160
161 return count;
162 }
163
164 /* Return the first non-jump active insn in the basic block. */
165
166 static rtx
167 first_active_insn (basic_block bb)
168 {
169 rtx insn = BB_HEAD (bb);
170
171 if (GET_CODE (insn) == CODE_LABEL)
172 {
173 if (insn == BB_END (bb))
174 return NULL_RTX;
175 insn = NEXT_INSN (insn);
176 }
177
178 while (GET_CODE (insn) == NOTE)
179 {
180 if (insn == BB_END (bb))
181 return NULL_RTX;
182 insn = NEXT_INSN (insn);
183 }
184
185 if (GET_CODE (insn) == JUMP_INSN)
186 return NULL_RTX;
187
188 return insn;
189 }
190
191 /* Return the last non-jump active (non-jump) insn in the basic block. */
192
193 static rtx
194 last_active_insn (basic_block bb, int skip_use_p)
195 {
196 rtx insn = BB_END (bb);
197 rtx head = BB_HEAD (bb);
198
199 while (GET_CODE (insn) == NOTE
200 || GET_CODE (insn) == JUMP_INSN
201 || (skip_use_p
202 && GET_CODE (insn) == INSN
203 && GET_CODE (PATTERN (insn)) == USE))
204 {
205 if (insn == head)
206 return NULL_RTX;
207 insn = PREV_INSN (insn);
208 }
209
210 if (GET_CODE (insn) == CODE_LABEL)
211 return NULL_RTX;
212
213 return insn;
214 }
215
216 /* It is possible, especially when having dealt with multi-word
217 arithmetic, for the expanders to have emitted jumps. Search
218 through the sequence and return TRUE if a jump exists so that
219 we can abort the conversion. */
220
221 static int
222 seq_contains_jump (rtx insn)
223 {
224 while (insn)
225 {
226 if (GET_CODE (insn) == JUMP_INSN)
227 return 1;
228 insn = NEXT_INSN (insn);
229 }
230 return 0;
231 }
232
233 static basic_block
234 block_fallthru (basic_block bb)
235 {
236 edge e;
237
238 for (e = bb->succ;
239 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
240 e = e->succ_next)
241 ;
242
243 return (e) ? e->dest : NULL_BLOCK;
244 }
245 \f
246 /* Go through a bunch of insns, converting them to conditional
247 execution format if possible. Return TRUE if all of the non-note
248 insns were processed. */
249
250 static int
251 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
252 /* if block information */rtx start,
253 /* first insn to look at */rtx end,
254 /* last insn to look at */rtx test,
255 /* conditional execution test */rtx prob_val,
256 /* probability of branch taken. */int mod_ok)
257 {
258 int must_be_last = FALSE;
259 rtx insn;
260 rtx xtest;
261 rtx pattern;
262
263 if (!start || !end)
264 return FALSE;
265
266 for (insn = start; ; insn = NEXT_INSN (insn))
267 {
268 if (GET_CODE (insn) == NOTE)
269 goto insn_done;
270
271 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
272 abort ();
273
274 /* Remove USE insns that get in the way. */
275 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
276 {
277 /* ??? Ug. Actually unlinking the thing is problematic,
278 given what we'd have to coordinate with our callers. */
279 PUT_CODE (insn, NOTE);
280 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
281 NOTE_SOURCE_FILE (insn) = 0;
282 goto insn_done;
283 }
284
285 /* Last insn wasn't last? */
286 if (must_be_last)
287 return FALSE;
288
289 if (modified_in_p (test, insn))
290 {
291 if (!mod_ok)
292 return FALSE;
293 must_be_last = TRUE;
294 }
295
296 /* Now build the conditional form of the instruction. */
297 pattern = PATTERN (insn);
298 xtest = copy_rtx (test);
299
300 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
301 two conditions. */
302 if (GET_CODE (pattern) == COND_EXEC)
303 {
304 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
305 return FALSE;
306
307 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
308 COND_EXEC_TEST (pattern));
309 pattern = COND_EXEC_CODE (pattern);
310 }
311
312 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
313
314 /* If the machine needs to modify the insn being conditionally executed,
315 say for example to force a constant integer operand into a temp
316 register, do so here. */
317 #ifdef IFCVT_MODIFY_INSN
318 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
319 if (! pattern)
320 return FALSE;
321 #endif
322
323 validate_change (insn, &PATTERN (insn), pattern, 1);
324
325 if (GET_CODE (insn) == CALL_INSN && prob_val)
326 validate_change (insn, &REG_NOTES (insn),
327 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
328 REG_NOTES (insn)), 1);
329
330 insn_done:
331 if (insn == end)
332 break;
333 }
334
335 return TRUE;
336 }
337
338 /* Return the condition for a jump. Do not do any special processing. */
339
340 static rtx
341 cond_exec_get_condition (rtx jump)
342 {
343 rtx test_if, cond;
344
345 if (any_condjump_p (jump))
346 test_if = SET_SRC (pc_set (jump));
347 else
348 return NULL_RTX;
349 cond = XEXP (test_if, 0);
350
351 /* If this branches to JUMP_LABEL when the condition is false,
352 reverse the condition. */
353 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
354 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
355 {
356 enum rtx_code rev = reversed_comparison_code (cond, jump);
357 if (rev == UNKNOWN)
358 return NULL_RTX;
359
360 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
361 XEXP (cond, 1));
362 }
363
364 return cond;
365 }
366
367 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
368 to conditional execution. Return TRUE if we were successful at
369 converting the block. */
370
371 static int
372 cond_exec_process_if_block (ce_if_block_t * ce_info,
373 /* if block information */int do_multiple_p)
374 {
375 basic_block test_bb = ce_info->test_bb; /* last test block */
376 basic_block then_bb = ce_info->then_bb; /* THEN */
377 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
378 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
379 rtx then_start; /* first insn in THEN block */
380 rtx then_end; /* last insn + 1 in THEN block */
381 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
382 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
383 int max; /* max # of insns to convert. */
384 int then_mod_ok; /* whether conditional mods are ok in THEN */
385 rtx true_expr; /* test for else block insns */
386 rtx false_expr; /* test for then block insns */
387 rtx true_prob_val; /* probability of else block */
388 rtx false_prob_val; /* probability of then block */
389 int n_insns;
390 enum rtx_code false_code;
391
392 /* If test is comprised of && or || elements, and we've failed at handling
393 all of them together, just use the last test if it is the special case of
394 && elements without an ELSE block. */
395 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
396 {
397 if (else_bb || ! ce_info->and_and_p)
398 return FALSE;
399
400 ce_info->test_bb = test_bb = ce_info->last_test_bb;
401 ce_info->num_multiple_test_blocks = 0;
402 ce_info->num_and_and_blocks = 0;
403 ce_info->num_or_or_blocks = 0;
404 }
405
406 /* Find the conditional jump to the ELSE or JOIN part, and isolate
407 the test. */
408 test_expr = cond_exec_get_condition (BB_END (test_bb));
409 if (! test_expr)
410 return FALSE;
411
412 /* If the conditional jump is more than just a conditional jump,
413 then we can not do conditional execution conversion on this block. */
414 if (! onlyjump_p (BB_END (test_bb)))
415 return FALSE;
416
417 /* Collect the bounds of where we're to search, skipping any labels, jumps
418 and notes at the beginning and end of the block. Then count the total
419 number of insns and see if it is small enough to convert. */
420 then_start = first_active_insn (then_bb);
421 then_end = last_active_insn (then_bb, TRUE);
422 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
423 max = MAX_CONDITIONAL_EXECUTE;
424
425 if (else_bb)
426 {
427 max *= 2;
428 else_start = first_active_insn (else_bb);
429 else_end = last_active_insn (else_bb, TRUE);
430 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
431 }
432
433 if (n_insns > max)
434 return FALSE;
435
436 /* Map test_expr/test_jump into the appropriate MD tests to use on
437 the conditionally executed code. */
438
439 true_expr = test_expr;
440
441 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
442 if (false_code != UNKNOWN)
443 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
444 XEXP (true_expr, 0), XEXP (true_expr, 1));
445 else
446 false_expr = NULL_RTX;
447
448 #ifdef IFCVT_MODIFY_TESTS
449 /* If the machine description needs to modify the tests, such as setting a
450 conditional execution register from a comparison, it can do so here. */
451 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
452
453 /* See if the conversion failed. */
454 if (!true_expr || !false_expr)
455 goto fail;
456 #endif
457
458 true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
459 if (true_prob_val)
460 {
461 true_prob_val = XEXP (true_prob_val, 0);
462 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
463 }
464 else
465 false_prob_val = NULL_RTX;
466
467 /* If we have && or || tests, do them here. These tests are in the adjacent
468 blocks after the first block containing the test. */
469 if (ce_info->num_multiple_test_blocks > 0)
470 {
471 basic_block bb = test_bb;
472 basic_block last_test_bb = ce_info->last_test_bb;
473
474 if (! false_expr)
475 goto fail;
476
477 do
478 {
479 rtx start, end;
480 rtx t, f;
481
482 bb = block_fallthru (bb);
483 start = first_active_insn (bb);
484 end = last_active_insn (bb, TRUE);
485 if (start
486 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
487 false_prob_val, FALSE))
488 goto fail;
489
490 /* If the conditional jump is more than just a conditional jump, then
491 we can not do conditional execution conversion on this block. */
492 if (! onlyjump_p (BB_END (bb)))
493 goto fail;
494
495 /* Find the conditional jump and isolate the test. */
496 t = cond_exec_get_condition (BB_END (bb));
497 if (! t)
498 goto fail;
499
500 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
501 GET_MODE (t),
502 XEXP (t, 0),
503 XEXP (t, 1));
504
505 if (ce_info->and_and_p)
506 {
507 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
508 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
509 }
510 else
511 {
512 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
513 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
514 }
515
516 /* If the machine description needs to modify the tests, such as
517 setting a conditional execution register from a comparison, it can
518 do so here. */
519 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
520 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
521
522 /* See if the conversion failed. */
523 if (!t || !f)
524 goto fail;
525 #endif
526
527 true_expr = t;
528 false_expr = f;
529 }
530 while (bb != last_test_bb);
531 }
532
533 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
534 on then THEN block. */
535 then_mod_ok = (else_bb == NULL_BLOCK);
536
537 /* Go through the THEN and ELSE blocks converting the insns if possible
538 to conditional execution. */
539
540 if (then_end
541 && (! false_expr
542 || ! cond_exec_process_insns (ce_info, then_start, then_end,
543 false_expr, false_prob_val,
544 then_mod_ok)))
545 goto fail;
546
547 if (else_bb && else_end
548 && ! cond_exec_process_insns (ce_info, else_start, else_end,
549 true_expr, true_prob_val, TRUE))
550 goto fail;
551
552 /* If we cannot apply the changes, fail. Do not go through the normal fail
553 processing, since apply_change_group will call cancel_changes. */
554 if (! apply_change_group ())
555 {
556 #ifdef IFCVT_MODIFY_CANCEL
557 /* Cancel any machine dependent changes. */
558 IFCVT_MODIFY_CANCEL (ce_info);
559 #endif
560 return FALSE;
561 }
562
563 #ifdef IFCVT_MODIFY_FINAL
564 /* Do any machine dependent final modifications. */
565 IFCVT_MODIFY_FINAL (ce_info);
566 #endif
567
568 /* Conversion succeeded. */
569 if (rtl_dump_file)
570 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
571 n_insns, (n_insns == 1) ? " was" : "s were");
572
573 /* Merge the blocks! */
574 merge_if_block (ce_info);
575 cond_exec_changed_p = TRUE;
576 return TRUE;
577
578 fail:
579 #ifdef IFCVT_MODIFY_CANCEL
580 /* Cancel any machine dependent changes. */
581 IFCVT_MODIFY_CANCEL (ce_info);
582 #endif
583
584 cancel_changes (0);
585 return FALSE;
586 }
587 \f
588 /* Used by noce_process_if_block to communicate with its subroutines.
589
590 The subroutines know that A and B may be evaluated freely. They
591 know that X is a register. They should insert new instructions
592 before cond_earliest. */
593
594 struct noce_if_info
595 {
596 basic_block test_bb;
597 rtx insn_a, insn_b;
598 rtx x, a, b;
599 rtx jump, cond, cond_earliest;
600 };
601
602 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
603 static int noce_try_move (struct noce_if_info *);
604 static int noce_try_store_flag (struct noce_if_info *);
605 static int noce_try_addcc (struct noce_if_info *);
606 static int noce_try_store_flag_constants (struct noce_if_info *);
607 static int noce_try_store_flag_mask (struct noce_if_info *);
608 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
609 rtx, rtx, rtx);
610 static int noce_try_cmove (struct noce_if_info *);
611 static int noce_try_cmove_arith (struct noce_if_info *);
612 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
613 static int noce_try_minmax (struct noce_if_info *);
614 static int noce_try_abs (struct noce_if_info *);
615
616 /* Helper function for noce_try_store_flag*. */
617
618 static rtx
619 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
620 int normalize)
621 {
622 rtx cond = if_info->cond;
623 int cond_complex;
624 enum rtx_code code;
625
626 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
627 || ! general_operand (XEXP (cond, 1), VOIDmode));
628
629 /* If earliest == jump, or when the condition is complex, try to
630 build the store_flag insn directly. */
631
632 if (cond_complex)
633 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
634
635 if (reversep)
636 code = reversed_comparison_code (cond, if_info->jump);
637 else
638 code = GET_CODE (cond);
639
640 if ((if_info->cond_earliest == if_info->jump || cond_complex)
641 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
642 {
643 rtx tmp;
644
645 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
646 XEXP (cond, 1));
647 tmp = gen_rtx_SET (VOIDmode, x, tmp);
648
649 start_sequence ();
650 tmp = emit_insn (tmp);
651
652 if (recog_memoized (tmp) >= 0)
653 {
654 tmp = get_insns ();
655 end_sequence ();
656 emit_insn (tmp);
657
658 if_info->cond_earliest = if_info->jump;
659
660 return x;
661 }
662
663 end_sequence ();
664 }
665
666 /* Don't even try if the comparison operands or the mode of X are weird. */
667 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
668 return NULL_RTX;
669
670 return emit_store_flag (x, code, XEXP (cond, 0),
671 XEXP (cond, 1), VOIDmode,
672 (code == LTU || code == LEU
673 || code == GEU || code == GTU), normalize);
674 }
675
676 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
677 X is the destination/target and Y is the value to copy. */
678
679 static void
680 noce_emit_move_insn (rtx x, rtx y)
681 {
682 enum machine_mode outmode, inmode;
683 rtx outer, inner;
684 int bitpos;
685
686 if (GET_CODE (x) != STRICT_LOW_PART)
687 {
688 emit_move_insn (x, y);
689 return;
690 }
691
692 outer = XEXP (x, 0);
693 inner = XEXP (outer, 0);
694 outmode = GET_MODE (outer);
695 inmode = GET_MODE (inner);
696 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
697 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
698 GET_MODE_BITSIZE (inmode));
699 }
700
701 /* Unshare sequence SEQ produced by if conversion. We care to mark
702 all arguments that may be shared with outer instruction stream. */
703 static void
704 unshare_ifcvt_sequence (struct noce_if_info *if_info, rtx seq)
705 {
706 set_used_flags (if_info->x);
707 set_used_flags (if_info->cond);
708 unshare_all_rtl_in_chain (seq);
709 }
710
711 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
712 "if (a == b) x = a; else x = b" into "x = b". */
713
714 static int
715 noce_try_move (struct noce_if_info *if_info)
716 {
717 rtx cond = if_info->cond;
718 enum rtx_code code = GET_CODE (cond);
719 rtx y, seq;
720
721 if (code != NE && code != EQ)
722 return FALSE;
723
724 /* This optimization isn't valid if either A or B could be a NaN
725 or a signed zero. */
726 if (HONOR_NANS (GET_MODE (if_info->x))
727 || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
728 return FALSE;
729
730 /* Check whether the operands of the comparison are A and in
731 either order. */
732 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
733 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
734 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
735 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
736 {
737 y = (code == EQ) ? if_info->a : if_info->b;
738
739 /* Avoid generating the move if the source is the destination. */
740 if (! rtx_equal_p (if_info->x, y))
741 {
742 start_sequence ();
743 noce_emit_move_insn (if_info->x, y);
744 seq = get_insns ();
745 unshare_ifcvt_sequence (if_info, seq);
746 end_sequence ();
747 emit_insn_before_setloc (seq, if_info->jump,
748 INSN_LOCATOR (if_info->insn_a));
749 }
750 return TRUE;
751 }
752 return FALSE;
753 }
754
755 /* Convert "if (test) x = 1; else x = 0".
756
757 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
758 tried in noce_try_store_flag_constants after noce_try_cmove has had
759 a go at the conversion. */
760
761 static int
762 noce_try_store_flag (struct noce_if_info *if_info)
763 {
764 int reversep;
765 rtx target, seq;
766
767 if (GET_CODE (if_info->b) == CONST_INT
768 && INTVAL (if_info->b) == STORE_FLAG_VALUE
769 && if_info->a == const0_rtx)
770 reversep = 0;
771 else if (if_info->b == const0_rtx
772 && GET_CODE (if_info->a) == CONST_INT
773 && INTVAL (if_info->a) == STORE_FLAG_VALUE
774 && (reversed_comparison_code (if_info->cond, if_info->jump)
775 != UNKNOWN))
776 reversep = 1;
777 else
778 return FALSE;
779
780 start_sequence ();
781
782 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
783 if (target)
784 {
785 if (target != if_info->x)
786 noce_emit_move_insn (if_info->x, target);
787
788 seq = get_insns ();
789 unshare_ifcvt_sequence (if_info, seq);
790 end_sequence ();
791 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
792
793 return TRUE;
794 }
795 else
796 {
797 end_sequence ();
798 return FALSE;
799 }
800 }
801
802 /* Convert "if (test) x = a; else x = b", for A and B constant. */
803
804 static int
805 noce_try_store_flag_constants (struct noce_if_info *if_info)
806 {
807 rtx target, seq;
808 int reversep;
809 HOST_WIDE_INT itrue, ifalse, diff, tmp;
810 int normalize, can_reverse;
811 enum machine_mode mode;
812
813 if (! no_new_pseudos
814 && GET_CODE (if_info->a) == CONST_INT
815 && GET_CODE (if_info->b) == CONST_INT)
816 {
817 mode = GET_MODE (if_info->x);
818 ifalse = INTVAL (if_info->a);
819 itrue = INTVAL (if_info->b);
820
821 /* Make sure we can represent the difference between the two values. */
822 if ((itrue - ifalse > 0)
823 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
824 return FALSE;
825
826 diff = trunc_int_for_mode (itrue - ifalse, mode);
827
828 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
829 != UNKNOWN);
830
831 reversep = 0;
832 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
833 normalize = 0;
834 else if (ifalse == 0 && exact_log2 (itrue) >= 0
835 && (STORE_FLAG_VALUE == 1
836 || BRANCH_COST >= 2))
837 normalize = 1;
838 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
839 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
840 normalize = 1, reversep = 1;
841 else if (itrue == -1
842 && (STORE_FLAG_VALUE == -1
843 || BRANCH_COST >= 2))
844 normalize = -1;
845 else if (ifalse == -1 && can_reverse
846 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
847 normalize = -1, reversep = 1;
848 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
849 || BRANCH_COST >= 3)
850 normalize = -1;
851 else
852 return FALSE;
853
854 if (reversep)
855 {
856 tmp = itrue; itrue = ifalse; ifalse = tmp;
857 diff = trunc_int_for_mode (-diff, mode);
858 }
859
860 start_sequence ();
861 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
862 if (! target)
863 {
864 end_sequence ();
865 return FALSE;
866 }
867
868 /* if (test) x = 3; else x = 4;
869 => x = 3 + (test == 0); */
870 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
871 {
872 target = expand_simple_binop (mode,
873 (diff == STORE_FLAG_VALUE
874 ? PLUS : MINUS),
875 GEN_INT (ifalse), target, if_info->x, 0,
876 OPTAB_WIDEN);
877 }
878
879 /* if (test) x = 8; else x = 0;
880 => x = (test != 0) << 3; */
881 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
882 {
883 target = expand_simple_binop (mode, ASHIFT,
884 target, GEN_INT (tmp), if_info->x, 0,
885 OPTAB_WIDEN);
886 }
887
888 /* if (test) x = -1; else x = b;
889 => x = -(test != 0) | b; */
890 else if (itrue == -1)
891 {
892 target = expand_simple_binop (mode, IOR,
893 target, GEN_INT (ifalse), if_info->x, 0,
894 OPTAB_WIDEN);
895 }
896
897 /* if (test) x = a; else x = b;
898 => x = (-(test != 0) & (b - a)) + a; */
899 else
900 {
901 target = expand_simple_binop (mode, AND,
902 target, GEN_INT (diff), if_info->x, 0,
903 OPTAB_WIDEN);
904 if (target)
905 target = expand_simple_binop (mode, PLUS,
906 target, GEN_INT (ifalse),
907 if_info->x, 0, OPTAB_WIDEN);
908 }
909
910 if (! target)
911 {
912 end_sequence ();
913 return FALSE;
914 }
915
916 if (target != if_info->x)
917 noce_emit_move_insn (if_info->x, target);
918
919 seq = get_insns ();
920 unshare_ifcvt_sequence (if_info, seq);
921 end_sequence ();
922
923 if (seq_contains_jump (seq))
924 return FALSE;
925
926 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
927
928 return TRUE;
929 }
930
931 return FALSE;
932 }
933
934 /* Convert "if (test) foo++" into "foo += (test != 0)", and
935 similarly for "foo--". */
936
937 static int
938 noce_try_addcc (struct noce_if_info *if_info)
939 {
940 rtx target, seq;
941 int subtract, normalize;
942
943 if (! no_new_pseudos
944 && GET_CODE (if_info->a) == PLUS
945 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
946 && (reversed_comparison_code (if_info->cond, if_info->jump)
947 != UNKNOWN))
948 {
949 rtx cond = if_info->cond;
950 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
951
952 /* First try to use addcc pattern. */
953 if (general_operand (XEXP (cond, 0), VOIDmode)
954 && general_operand (XEXP (cond, 1), VOIDmode))
955 {
956 start_sequence ();
957 target = emit_conditional_add (if_info->x, code,
958 XEXP (cond, 0),
959 XEXP (cond, 1),
960 VOIDmode,
961 if_info->b,
962 XEXP (if_info->a, 1),
963 GET_MODE (if_info->x),
964 (code == LTU || code == GEU
965 || code == LEU || code == GTU));
966 if (target)
967 {
968 if (target != if_info->x)
969 noce_emit_move_insn (if_info->x, target);
970
971 seq = get_insns ();
972 unshare_ifcvt_sequence (if_info, seq);
973 end_sequence ();
974 emit_insn_before_setloc (seq, if_info->jump,
975 INSN_LOCATOR (if_info->insn_a));
976 return TRUE;
977 }
978 end_sequence ();
979 }
980
981 /* If that fails, construct conditional increment or decrement using
982 setcc. */
983 if (BRANCH_COST >= 2
984 && (XEXP (if_info->a, 1) == const1_rtx
985 || XEXP (if_info->a, 1) == constm1_rtx))
986 {
987 start_sequence ();
988 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
989 subtract = 0, normalize = 0;
990 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
991 subtract = 1, normalize = 0;
992 else
993 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
994
995
996 target = noce_emit_store_flag (if_info,
997 gen_reg_rtx (GET_MODE (if_info->x)),
998 1, normalize);
999
1000 if (target)
1001 target = expand_simple_binop (GET_MODE (if_info->x),
1002 subtract ? MINUS : PLUS,
1003 if_info->b, target, if_info->x,
1004 0, OPTAB_WIDEN);
1005 if (target)
1006 {
1007 if (target != if_info->x)
1008 noce_emit_move_insn (if_info->x, target);
1009
1010 seq = get_insns ();
1011 unshare_ifcvt_sequence (if_info, seq);
1012 end_sequence ();
1013
1014 if (seq_contains_jump (seq))
1015 return FALSE;
1016
1017 emit_insn_before_setloc (seq, if_info->jump,
1018 INSN_LOCATOR (if_info->insn_a));
1019
1020 return TRUE;
1021 }
1022 end_sequence ();
1023 }
1024 }
1025
1026 return FALSE;
1027 }
1028
1029 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1030
1031 static int
1032 noce_try_store_flag_mask (struct noce_if_info *if_info)
1033 {
1034 rtx target, seq;
1035 int reversep;
1036
1037 reversep = 0;
1038 if (! no_new_pseudos
1039 && (BRANCH_COST >= 2
1040 || STORE_FLAG_VALUE == -1)
1041 && ((if_info->a == const0_rtx
1042 && rtx_equal_p (if_info->b, if_info->x))
1043 || ((reversep = (reversed_comparison_code (if_info->cond,
1044 if_info->jump)
1045 != UNKNOWN))
1046 && if_info->b == const0_rtx
1047 && rtx_equal_p (if_info->a, if_info->x))))
1048 {
1049 start_sequence ();
1050 target = noce_emit_store_flag (if_info,
1051 gen_reg_rtx (GET_MODE (if_info->x)),
1052 reversep, -1);
1053 if (target)
1054 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1055 if_info->x,
1056 target, if_info->x, 0,
1057 OPTAB_WIDEN);
1058
1059 if (target)
1060 {
1061 if (target != if_info->x)
1062 noce_emit_move_insn (if_info->x, target);
1063
1064 seq = get_insns ();
1065 unshare_ifcvt_sequence (if_info, seq);
1066 end_sequence ();
1067
1068 if (seq_contains_jump (seq))
1069 return FALSE;
1070
1071 emit_insn_before_setloc (seq, if_info->jump,
1072 INSN_LOCATOR (if_info->insn_a));
1073
1074 return TRUE;
1075 }
1076
1077 end_sequence ();
1078 }
1079
1080 return FALSE;
1081 }
1082
1083 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1084
1085 static rtx
1086 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1087 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1088 {
1089 /* If earliest == jump, try to build the cmove insn directly.
1090 This is helpful when combine has created some complex condition
1091 (like for alpha's cmovlbs) that we can't hope to regenerate
1092 through the normal interface. */
1093
1094 if (if_info->cond_earliest == if_info->jump)
1095 {
1096 rtx tmp;
1097
1098 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1099 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1100 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1101
1102 start_sequence ();
1103 tmp = emit_insn (tmp);
1104
1105 if (recog_memoized (tmp) >= 0)
1106 {
1107 tmp = get_insns ();
1108 end_sequence ();
1109 emit_insn (tmp);
1110
1111 return x;
1112 }
1113
1114 end_sequence ();
1115 }
1116
1117 /* Don't even try if the comparison operands are weird. */
1118 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1119 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1120 return NULL_RTX;
1121
1122 #if HAVE_conditional_move
1123 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1124 vtrue, vfalse, GET_MODE (x),
1125 (code == LTU || code == GEU
1126 || code == LEU || code == GTU));
1127 #else
1128 /* We'll never get here, as noce_process_if_block doesn't call the
1129 functions involved. Ifdef code, however, should be discouraged
1130 because it leads to typos in the code not selected. However,
1131 emit_conditional_move won't exist either. */
1132 return NULL_RTX;
1133 #endif
1134 }
1135
1136 /* Try only simple constants and registers here. More complex cases
1137 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1138 has had a go at it. */
1139
1140 static int
1141 noce_try_cmove (struct noce_if_info *if_info)
1142 {
1143 enum rtx_code code;
1144 rtx target, seq;
1145
1146 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1147 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1148 {
1149 start_sequence ();
1150
1151 code = GET_CODE (if_info->cond);
1152 target = noce_emit_cmove (if_info, if_info->x, code,
1153 XEXP (if_info->cond, 0),
1154 XEXP (if_info->cond, 1),
1155 if_info->a, if_info->b);
1156
1157 if (target)
1158 {
1159 if (target != if_info->x)
1160 noce_emit_move_insn (if_info->x, target);
1161
1162 seq = get_insns ();
1163 unshare_ifcvt_sequence (if_info, seq);
1164 end_sequence ();
1165 emit_insn_before_setloc (seq, if_info->jump,
1166 INSN_LOCATOR (if_info->insn_a));
1167 return TRUE;
1168 }
1169 else
1170 {
1171 end_sequence ();
1172 return FALSE;
1173 }
1174 }
1175
1176 return FALSE;
1177 }
1178
1179 /* Try more complex cases involving conditional_move. */
1180
1181 static int
1182 noce_try_cmove_arith (struct noce_if_info *if_info)
1183 {
1184 rtx a = if_info->a;
1185 rtx b = if_info->b;
1186 rtx x = if_info->x;
1187 rtx insn_a, insn_b;
1188 rtx tmp, target;
1189 int is_mem = 0;
1190 enum rtx_code code;
1191
1192 /* A conditional move from two memory sources is equivalent to a
1193 conditional on their addresses followed by a load. Don't do this
1194 early because it'll screw alias analysis. Note that we've
1195 already checked for no side effects. */
1196 if (! no_new_pseudos && cse_not_expected
1197 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1198 && BRANCH_COST >= 5)
1199 {
1200 a = XEXP (a, 0);
1201 b = XEXP (b, 0);
1202 x = gen_reg_rtx (Pmode);
1203 is_mem = 1;
1204 }
1205
1206 /* ??? We could handle this if we knew that a load from A or B could
1207 not fault. This is also true if we've already loaded
1208 from the address along the path from ENTRY. */
1209 else if (may_trap_p (a) || may_trap_p (b))
1210 return FALSE;
1211
1212 /* if (test) x = a + b; else x = c - d;
1213 => y = a + b;
1214 x = c - d;
1215 if (test)
1216 x = y;
1217 */
1218
1219 code = GET_CODE (if_info->cond);
1220 insn_a = if_info->insn_a;
1221 insn_b = if_info->insn_b;
1222
1223 /* Possibly rearrange operands to make things come out more natural. */
1224 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1225 {
1226 int reversep = 0;
1227 if (rtx_equal_p (b, x))
1228 reversep = 1;
1229 else if (general_operand (b, GET_MODE (b)))
1230 reversep = 1;
1231
1232 if (reversep)
1233 {
1234 code = reversed_comparison_code (if_info->cond, if_info->jump);
1235 tmp = a, a = b, b = tmp;
1236 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1237 }
1238 }
1239
1240 start_sequence ();
1241
1242 /* If either operand is complex, load it into a register first.
1243 The best way to do this is to copy the original insn. In this
1244 way we preserve any clobbers etc that the insn may have had.
1245 This is of course not possible in the IS_MEM case. */
1246 if (! general_operand (a, GET_MODE (a)))
1247 {
1248 rtx set;
1249
1250 if (no_new_pseudos)
1251 goto end_seq_and_fail;
1252
1253 if (is_mem)
1254 {
1255 tmp = gen_reg_rtx (GET_MODE (a));
1256 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1257 }
1258 else if (! insn_a)
1259 goto end_seq_and_fail;
1260 else
1261 {
1262 a = gen_reg_rtx (GET_MODE (a));
1263 tmp = copy_rtx (insn_a);
1264 set = single_set (tmp);
1265 SET_DEST (set) = a;
1266 tmp = emit_insn (PATTERN (tmp));
1267 }
1268 if (recog_memoized (tmp) < 0)
1269 goto end_seq_and_fail;
1270 }
1271 if (! general_operand (b, GET_MODE (b)))
1272 {
1273 rtx set;
1274
1275 if (no_new_pseudos)
1276 goto end_seq_and_fail;
1277
1278 if (is_mem)
1279 {
1280 tmp = gen_reg_rtx (GET_MODE (b));
1281 tmp = emit_insn (gen_rtx_SET (VOIDmode,
1282 tmp,
1283 b));
1284 }
1285 else if (! insn_b)
1286 goto end_seq_and_fail;
1287 else
1288 {
1289 b = gen_reg_rtx (GET_MODE (b));
1290 tmp = copy_rtx (insn_b);
1291 set = single_set (tmp);
1292 SET_DEST (set) = b;
1293 tmp = emit_insn (PATTERN (tmp));
1294 }
1295 if (recog_memoized (tmp) < 0)
1296 goto end_seq_and_fail;
1297 }
1298
1299 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1300 XEXP (if_info->cond, 1), a, b);
1301
1302 if (! target)
1303 goto end_seq_and_fail;
1304
1305 /* If we're handling a memory for above, emit the load now. */
1306 if (is_mem)
1307 {
1308 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1309
1310 /* Copy over flags as appropriate. */
1311 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1312 MEM_VOLATILE_P (tmp) = 1;
1313 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1314 MEM_IN_STRUCT_P (tmp) = 1;
1315 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1316 MEM_SCALAR_P (tmp) = 1;
1317 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1318 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1319 set_mem_align (tmp,
1320 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1321
1322 noce_emit_move_insn (if_info->x, tmp);
1323 }
1324 else if (target != x)
1325 noce_emit_move_insn (x, target);
1326
1327 tmp = get_insns ();
1328 unshare_ifcvt_sequence (if_info, tmp);
1329 end_sequence ();
1330 emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1331 return TRUE;
1332
1333 end_seq_and_fail:
1334 end_sequence ();
1335 return FALSE;
1336 }
1337
1338 /* For most cases, the simplified condition we found is the best
1339 choice, but this is not the case for the min/max/abs transforms.
1340 For these we wish to know that it is A or B in the condition. */
1341
1342 static rtx
1343 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1344 rtx *earliest)
1345 {
1346 rtx cond, set, insn;
1347 int reverse;
1348
1349 /* If target is already mentioned in the known condition, return it. */
1350 if (reg_mentioned_p (target, if_info->cond))
1351 {
1352 *earliest = if_info->cond_earliest;
1353 return if_info->cond;
1354 }
1355
1356 set = pc_set (if_info->jump);
1357 cond = XEXP (SET_SRC (set), 0);
1358 reverse
1359 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1360 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1361
1362 /* If we're looking for a constant, try to make the conditional
1363 have that constant in it. There are two reasons why it may
1364 not have the constant we want:
1365
1366 1. GCC may have needed to put the constant in a register, because
1367 the target can't compare directly against that constant. For
1368 this case, we look for a SET immediately before the comparison
1369 that puts a constant in that register.
1370
1371 2. GCC may have canonicalized the conditional, for example
1372 replacing "if x < 4" with "if x <= 3". We can undo that (or
1373 make equivalent types of changes) to get the constants we need
1374 if they're off by one in the right direction. */
1375
1376 if (GET_CODE (target) == CONST_INT)
1377 {
1378 enum rtx_code code = GET_CODE (if_info->cond);
1379 rtx op_a = XEXP (if_info->cond, 0);
1380 rtx op_b = XEXP (if_info->cond, 1);
1381 rtx prev_insn;
1382
1383 /* First, look to see if we put a constant in a register. */
1384 prev_insn = PREV_INSN (if_info->cond_earliest);
1385 if (prev_insn
1386 && INSN_P (prev_insn)
1387 && GET_CODE (PATTERN (prev_insn)) == SET)
1388 {
1389 rtx src = find_reg_equal_equiv_note (prev_insn);
1390 if (!src)
1391 src = SET_SRC (PATTERN (prev_insn));
1392 if (GET_CODE (src) == CONST_INT)
1393 {
1394 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1395 op_a = src;
1396 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1397 op_b = src;
1398
1399 if (GET_CODE (op_a) == CONST_INT)
1400 {
1401 rtx tmp = op_a;
1402 op_a = op_b;
1403 op_b = tmp;
1404 code = swap_condition (code);
1405 }
1406 }
1407 }
1408
1409 /* Now, look to see if we can get the right constant by
1410 adjusting the conditional. */
1411 if (GET_CODE (op_b) == CONST_INT)
1412 {
1413 HOST_WIDE_INT desired_val = INTVAL (target);
1414 HOST_WIDE_INT actual_val = INTVAL (op_b);
1415
1416 switch (code)
1417 {
1418 case LT:
1419 if (actual_val == desired_val + 1)
1420 {
1421 code = LE;
1422 op_b = GEN_INT (desired_val);
1423 }
1424 break;
1425 case LE:
1426 if (actual_val == desired_val - 1)
1427 {
1428 code = LT;
1429 op_b = GEN_INT (desired_val);
1430 }
1431 break;
1432 case GT:
1433 if (actual_val == desired_val - 1)
1434 {
1435 code = GE;
1436 op_b = GEN_INT (desired_val);
1437 }
1438 break;
1439 case GE:
1440 if (actual_val == desired_val + 1)
1441 {
1442 code = GT;
1443 op_b = GEN_INT (desired_val);
1444 }
1445 break;
1446 default:
1447 break;
1448 }
1449 }
1450
1451 /* If we made any changes, generate a new conditional that is
1452 equivalent to what we started with, but has the right
1453 constants in it. */
1454 if (code != GET_CODE (if_info->cond)
1455 || op_a != XEXP (if_info->cond, 0)
1456 || op_b != XEXP (if_info->cond, 1))
1457 {
1458 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1459 *earliest = if_info->cond_earliest;
1460 return cond;
1461 }
1462 }
1463
1464 cond = canonicalize_condition (if_info->jump, cond, reverse,
1465 earliest, target, false);
1466 if (! cond || ! reg_mentioned_p (target, cond))
1467 return NULL;
1468
1469 /* We almost certainly searched back to a different place.
1470 Need to re-verify correct lifetimes. */
1471
1472 /* X may not be mentioned in the range (cond_earliest, jump]. */
1473 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1474 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1475 return NULL;
1476
1477 /* A and B may not be modified in the range [cond_earliest, jump). */
1478 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1479 if (INSN_P (insn)
1480 && (modified_in_p (if_info->a, insn)
1481 || modified_in_p (if_info->b, insn)))
1482 return NULL;
1483
1484 return cond;
1485 }
1486
1487 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1488
1489 static int
1490 noce_try_minmax (struct noce_if_info *if_info)
1491 {
1492 rtx cond, earliest, target, seq;
1493 enum rtx_code code, op;
1494 int unsignedp;
1495
1496 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1497 if (no_new_pseudos)
1498 return FALSE;
1499
1500 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1501 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1502 to get the target to tell us... */
1503 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1504 || HONOR_NANS (GET_MODE (if_info->x)))
1505 return FALSE;
1506
1507 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1508 if (!cond)
1509 return FALSE;
1510
1511 /* Verify the condition is of the form we expect, and canonicalize
1512 the comparison code. */
1513 code = GET_CODE (cond);
1514 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1515 {
1516 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1517 return FALSE;
1518 }
1519 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1520 {
1521 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1522 return FALSE;
1523 code = swap_condition (code);
1524 }
1525 else
1526 return FALSE;
1527
1528 /* Determine what sort of operation this is. Note that the code is for
1529 a taken branch, so the code->operation mapping appears backwards. */
1530 switch (code)
1531 {
1532 case LT:
1533 case LE:
1534 case UNLT:
1535 case UNLE:
1536 op = SMAX;
1537 unsignedp = 0;
1538 break;
1539 case GT:
1540 case GE:
1541 case UNGT:
1542 case UNGE:
1543 op = SMIN;
1544 unsignedp = 0;
1545 break;
1546 case LTU:
1547 case LEU:
1548 op = UMAX;
1549 unsignedp = 1;
1550 break;
1551 case GTU:
1552 case GEU:
1553 op = UMIN;
1554 unsignedp = 1;
1555 break;
1556 default:
1557 return FALSE;
1558 }
1559
1560 start_sequence ();
1561
1562 target = expand_simple_binop (GET_MODE (if_info->x), op,
1563 if_info->a, if_info->b,
1564 if_info->x, unsignedp, OPTAB_WIDEN);
1565 if (! target)
1566 {
1567 end_sequence ();
1568 return FALSE;
1569 }
1570 if (target != if_info->x)
1571 noce_emit_move_insn (if_info->x, target);
1572
1573 seq = get_insns ();
1574 unshare_ifcvt_sequence (if_info, seq);
1575 end_sequence ();
1576
1577 if (seq_contains_jump (seq))
1578 return FALSE;
1579
1580 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1581 if_info->cond = cond;
1582 if_info->cond_earliest = earliest;
1583
1584 return TRUE;
1585 }
1586
1587 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1588
1589 static int
1590 noce_try_abs (struct noce_if_info *if_info)
1591 {
1592 rtx cond, earliest, target, seq, a, b, c;
1593 int negate;
1594
1595 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1596 if (no_new_pseudos)
1597 return FALSE;
1598
1599 /* Recognize A and B as constituting an ABS or NABS. */
1600 a = if_info->a;
1601 b = if_info->b;
1602 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1603 negate = 0;
1604 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1605 {
1606 c = a; a = b; b = c;
1607 negate = 1;
1608 }
1609 else
1610 return FALSE;
1611
1612 cond = noce_get_alt_condition (if_info, b, &earliest);
1613 if (!cond)
1614 return FALSE;
1615
1616 /* Verify the condition is of the form we expect. */
1617 if (rtx_equal_p (XEXP (cond, 0), b))
1618 c = XEXP (cond, 1);
1619 else if (rtx_equal_p (XEXP (cond, 1), b))
1620 c = XEXP (cond, 0);
1621 else
1622 return FALSE;
1623
1624 /* Verify that C is zero. Search backward through the block for
1625 a REG_EQUAL note if necessary. */
1626 if (REG_P (c))
1627 {
1628 rtx insn, note = NULL;
1629 for (insn = earliest;
1630 insn != BB_HEAD (if_info->test_bb);
1631 insn = PREV_INSN (insn))
1632 if (INSN_P (insn)
1633 && ((note = find_reg_note (insn, REG_EQUAL, c))
1634 || (note = find_reg_note (insn, REG_EQUIV, c))))
1635 break;
1636 if (! note)
1637 return FALSE;
1638 c = XEXP (note, 0);
1639 }
1640 if (GET_CODE (c) == MEM
1641 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1642 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1643 c = get_pool_constant (XEXP (c, 0));
1644
1645 /* Work around funny ideas get_condition has wrt canonicalization.
1646 Note that these rtx constants are known to be CONST_INT, and
1647 therefore imply integer comparisons. */
1648 if (c == constm1_rtx && GET_CODE (cond) == GT)
1649 ;
1650 else if (c == const1_rtx && GET_CODE (cond) == LT)
1651 ;
1652 else if (c != CONST0_RTX (GET_MODE (b)))
1653 return FALSE;
1654
1655 /* Determine what sort of operation this is. */
1656 switch (GET_CODE (cond))
1657 {
1658 case LT:
1659 case LE:
1660 case UNLT:
1661 case UNLE:
1662 negate = !negate;
1663 break;
1664 case GT:
1665 case GE:
1666 case UNGT:
1667 case UNGE:
1668 break;
1669 default:
1670 return FALSE;
1671 }
1672
1673 start_sequence ();
1674
1675 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1676
1677 /* ??? It's a quandary whether cmove would be better here, especially
1678 for integers. Perhaps combine will clean things up. */
1679 if (target && negate)
1680 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1681
1682 if (! target)
1683 {
1684 end_sequence ();
1685 return FALSE;
1686 }
1687
1688 if (target != if_info->x)
1689 noce_emit_move_insn (if_info->x, target);
1690
1691 seq = get_insns ();
1692 unshare_ifcvt_sequence (if_info, seq);
1693 end_sequence ();
1694
1695 if (seq_contains_jump (seq))
1696 return FALSE;
1697
1698 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1699 if_info->cond = cond;
1700 if_info->cond_earliest = earliest;
1701
1702 return TRUE;
1703 }
1704
1705 /* Similar to get_condition, only the resulting condition must be
1706 valid at JUMP, instead of at EARLIEST. */
1707
1708 static rtx
1709 noce_get_condition (rtx jump, rtx *earliest)
1710 {
1711 rtx cond, set, tmp, insn;
1712 bool reverse;
1713
1714 if (! any_condjump_p (jump))
1715 return NULL_RTX;
1716
1717 set = pc_set (jump);
1718
1719 /* If this branches to JUMP_LABEL when the condition is false,
1720 reverse the condition. */
1721 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1722 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1723
1724 /* If the condition variable is a register and is MODE_INT, accept it. */
1725
1726 cond = XEXP (SET_SRC (set), 0);
1727 tmp = XEXP (cond, 0);
1728 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1729 {
1730 *earliest = jump;
1731
1732 if (reverse)
1733 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1734 GET_MODE (cond), tmp, XEXP (cond, 1));
1735 return cond;
1736 }
1737
1738 /* Otherwise, fall back on canonicalize_condition to do the dirty
1739 work of manipulating MODE_CC values and COMPARE rtx codes. */
1740
1741 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
1742 false);
1743 if (!tmp)
1744 return NULL_RTX;
1745
1746 /* We are going to insert code before JUMP, not before EARLIEST.
1747 We must therefore be certain that the given condition is valid
1748 at JUMP by virtue of not having been modified since. */
1749 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1750 if (INSN_P (insn) && modified_in_p (tmp, insn))
1751 break;
1752 if (insn == jump)
1753 return tmp;
1754
1755 /* The condition was modified. See if we can get a partial result
1756 that doesn't follow all the reversals. Perhaps combine can fold
1757 them together later. */
1758 tmp = XEXP (tmp, 0);
1759 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1760 return NULL_RTX;
1761 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp,
1762 false);
1763 if (!tmp)
1764 return NULL_RTX;
1765
1766 /* For sanity's sake, re-validate the new result. */
1767 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1768 if (INSN_P (insn) && modified_in_p (tmp, insn))
1769 return NULL_RTX;
1770
1771 return tmp;
1772 }
1773
1774 /* Return true if OP is ok for if-then-else processing. */
1775
1776 static int
1777 noce_operand_ok (rtx op)
1778 {
1779 /* We special-case memories, so handle any of them with
1780 no address side effects. */
1781 if (GET_CODE (op) == MEM)
1782 return ! side_effects_p (XEXP (op, 0));
1783
1784 if (side_effects_p (op))
1785 return FALSE;
1786
1787 return ! may_trap_p (op);
1788 }
1789
1790 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1791 without using conditional execution. Return TRUE if we were
1792 successful at converting the block. */
1793
1794 static int
1795 noce_process_if_block (struct ce_if_block * ce_info)
1796 {
1797 basic_block test_bb = ce_info->test_bb; /* test block */
1798 basic_block then_bb = ce_info->then_bb; /* THEN */
1799 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1800 struct noce_if_info if_info;
1801 rtx insn_a, insn_b;
1802 rtx set_a, set_b;
1803 rtx orig_x, x, a, b;
1804 rtx jump, cond;
1805
1806 /* We're looking for patterns of the form
1807
1808 (1) if (...) x = a; else x = b;
1809 (2) x = b; if (...) x = a;
1810 (3) if (...) x = a; // as if with an initial x = x.
1811
1812 The later patterns require jumps to be more expensive.
1813
1814 ??? For future expansion, look for multiple X in such patterns. */
1815
1816 /* If test is comprised of && or || elements, don't handle it unless it is
1817 the special case of && elements without an ELSE block. */
1818 if (ce_info->num_multiple_test_blocks)
1819 {
1820 if (else_bb || ! ce_info->and_and_p)
1821 return FALSE;
1822
1823 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1824 ce_info->num_multiple_test_blocks = 0;
1825 ce_info->num_and_and_blocks = 0;
1826 ce_info->num_or_or_blocks = 0;
1827 }
1828
1829 /* If this is not a standard conditional jump, we can't parse it. */
1830 jump = BB_END (test_bb);
1831 cond = noce_get_condition (jump, &if_info.cond_earliest);
1832 if (! cond)
1833 return FALSE;
1834
1835 /* If the conditional jump is more than just a conditional
1836 jump, then we can not do if-conversion on this block. */
1837 if (! onlyjump_p (jump))
1838 return FALSE;
1839
1840 /* We must be comparing objects whose modes imply the size. */
1841 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1842 return FALSE;
1843
1844 /* Look for one of the potential sets. */
1845 insn_a = first_active_insn (then_bb);
1846 if (! insn_a
1847 || insn_a != last_active_insn (then_bb, FALSE)
1848 || (set_a = single_set (insn_a)) == NULL_RTX)
1849 return FALSE;
1850
1851 x = SET_DEST (set_a);
1852 a = SET_SRC (set_a);
1853
1854 /* Look for the other potential set. Make sure we've got equivalent
1855 destinations. */
1856 /* ??? This is overconservative. Storing to two different mems is
1857 as easy as conditionally computing the address. Storing to a
1858 single mem merely requires a scratch memory to use as one of the
1859 destination addresses; often the memory immediately below the
1860 stack pointer is available for this. */
1861 set_b = NULL_RTX;
1862 if (else_bb)
1863 {
1864 insn_b = first_active_insn (else_bb);
1865 if (! insn_b
1866 || insn_b != last_active_insn (else_bb, FALSE)
1867 || (set_b = single_set (insn_b)) == NULL_RTX
1868 || ! rtx_equal_p (x, SET_DEST (set_b)))
1869 return FALSE;
1870 }
1871 else
1872 {
1873 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1874 /* We're going to be moving the evaluation of B down from above
1875 COND_EARLIEST to JUMP. Make sure the relevant data is still
1876 intact. */
1877 if (! insn_b
1878 || GET_CODE (insn_b) != INSN
1879 || (set_b = single_set (insn_b)) == NULL_RTX
1880 || ! rtx_equal_p (x, SET_DEST (set_b))
1881 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1882 || modified_between_p (SET_SRC (set_b),
1883 PREV_INSN (if_info.cond_earliest), jump)
1884 /* Likewise with X. In particular this can happen when
1885 noce_get_condition looks farther back in the instruction
1886 stream than one might expect. */
1887 || reg_overlap_mentioned_p (x, cond)
1888 || reg_overlap_mentioned_p (x, a)
1889 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1890 insn_b = set_b = NULL_RTX;
1891 }
1892
1893 /* If x has side effects then only the if-then-else form is safe to
1894 convert. But even in that case we would need to restore any notes
1895 (such as REG_INC) at then end. That can be tricky if
1896 noce_emit_move_insn expands to more than one insn, so disable the
1897 optimization entirely for now if there are side effects. */
1898 if (side_effects_p (x))
1899 return FALSE;
1900
1901 b = (set_b ? SET_SRC (set_b) : x);
1902
1903 /* Only operate on register destinations, and even then avoid extending
1904 the lifetime of hard registers on small register class machines. */
1905 orig_x = x;
1906 if (GET_CODE (x) != REG
1907 || (SMALL_REGISTER_CLASSES
1908 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1909 {
1910 if (no_new_pseudos || GET_MODE (x) == BLKmode)
1911 return FALSE;
1912 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1913 ? XEXP (x, 0) : x));
1914 }
1915
1916 /* Don't operate on sources that may trap or are volatile. */
1917 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1918 return FALSE;
1919
1920 /* Set up the info block for our subroutines. */
1921 if_info.test_bb = test_bb;
1922 if_info.cond = cond;
1923 if_info.jump = jump;
1924 if_info.insn_a = insn_a;
1925 if_info.insn_b = insn_b;
1926 if_info.x = x;
1927 if_info.a = a;
1928 if_info.b = b;
1929
1930 /* Try optimizations in some approximation of a useful order. */
1931 /* ??? Should first look to see if X is live incoming at all. If it
1932 isn't, we don't need anything but an unconditional set. */
1933
1934 /* Look and see if A and B are really the same. Avoid creating silly
1935 cmove constructs that no one will fix up later. */
1936 if (rtx_equal_p (a, b))
1937 {
1938 /* If we have an INSN_B, we don't have to create any new rtl. Just
1939 move the instruction that we already have. If we don't have an
1940 INSN_B, that means that A == X, and we've got a noop move. In
1941 that case don't do anything and let the code below delete INSN_A. */
1942 if (insn_b && else_bb)
1943 {
1944 rtx note;
1945
1946 if (else_bb && insn_b == BB_END (else_bb))
1947 BB_END (else_bb) = PREV_INSN (insn_b);
1948 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
1949
1950 /* If there was a REG_EQUAL note, delete it since it may have been
1951 true due to this insn being after a jump. */
1952 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1953 remove_note (insn_b, note);
1954
1955 insn_b = NULL_RTX;
1956 }
1957 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1958 x must be executed twice. */
1959 else if (insn_b && side_effects_p (orig_x))
1960 return FALSE;
1961
1962 x = orig_x;
1963 goto success;
1964 }
1965
1966 /* Disallow the "if (...) x = a;" form (with an implicit "else x = x;")
1967 for most optimizations if writing to x may trap, i.e. its a memory
1968 other than a static var or a stack slot. */
1969 if (! set_b
1970 && GET_CODE (orig_x) == MEM
1971 && ! MEM_NOTRAP_P (orig_x)
1972 && rtx_addr_can_trap_p (XEXP (orig_x, 0)))
1973 {
1974 if (HAVE_conditional_move)
1975 {
1976 if (noce_try_cmove (&if_info))
1977 goto success;
1978 if (! HAVE_conditional_execution
1979 && noce_try_cmove_arith (&if_info))
1980 goto success;
1981 }
1982 return FALSE;
1983 }
1984
1985 if (noce_try_move (&if_info))
1986 goto success;
1987 if (noce_try_store_flag (&if_info))
1988 goto success;
1989 if (noce_try_minmax (&if_info))
1990 goto success;
1991 if (noce_try_abs (&if_info))
1992 goto success;
1993 if (HAVE_conditional_move
1994 && noce_try_cmove (&if_info))
1995 goto success;
1996 if (! HAVE_conditional_execution)
1997 {
1998 if (noce_try_store_flag_constants (&if_info))
1999 goto success;
2000 if (noce_try_addcc (&if_info))
2001 goto success;
2002 if (noce_try_store_flag_mask (&if_info))
2003 goto success;
2004 if (HAVE_conditional_move
2005 && noce_try_cmove_arith (&if_info))
2006 goto success;
2007 }
2008
2009 return FALSE;
2010
2011 success:
2012 /* The original sets may now be killed. */
2013 delete_insn (insn_a);
2014
2015 /* Several special cases here: First, we may have reused insn_b above,
2016 in which case insn_b is now NULL. Second, we want to delete insn_b
2017 if it came from the ELSE block, because follows the now correct
2018 write that appears in the TEST block. However, if we got insn_b from
2019 the TEST block, it may in fact be loading data needed for the comparison.
2020 We'll let life_analysis remove the insn if it's really dead. */
2021 if (insn_b && else_bb)
2022 delete_insn (insn_b);
2023
2024 /* The new insns will have been inserted immediately before the jump. We
2025 should be able to remove the jump with impunity, but the condition itself
2026 may have been modified by gcse to be shared across basic blocks. */
2027 delete_insn (jump);
2028
2029 /* If we used a temporary, fix it up now. */
2030 if (orig_x != x)
2031 {
2032 start_sequence ();
2033 noce_emit_move_insn (orig_x, x);
2034 insn_b = get_insns ();
2035 set_used_flags (orig_x);
2036 unshare_all_rtl_in_chain (insn_b);
2037 end_sequence ();
2038
2039 emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
2040 }
2041
2042 /* Merge the blocks! */
2043 merge_if_block (ce_info);
2044
2045 return TRUE;
2046 }
2047 \f
2048 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2049 straight line code. Return true if successful. */
2050
2051 static int
2052 process_if_block (struct ce_if_block * ce_info)
2053 {
2054 if (! reload_completed
2055 && noce_process_if_block (ce_info))
2056 return TRUE;
2057
2058 if (HAVE_conditional_execution && reload_completed)
2059 {
2060 /* If we have && and || tests, try to first handle combining the && and
2061 || tests into the conditional code, and if that fails, go back and
2062 handle it without the && and ||, which at present handles the && case
2063 if there was no ELSE block. */
2064 if (cond_exec_process_if_block (ce_info, TRUE))
2065 return TRUE;
2066
2067 if (ce_info->num_multiple_test_blocks)
2068 {
2069 cancel_changes (0);
2070
2071 if (cond_exec_process_if_block (ce_info, FALSE))
2072 return TRUE;
2073 }
2074 }
2075
2076 return FALSE;
2077 }
2078
2079 /* Merge the blocks and mark for local life update. */
2080
2081 static void
2082 merge_if_block (struct ce_if_block * ce_info)
2083 {
2084 basic_block test_bb = ce_info->test_bb; /* last test block */
2085 basic_block then_bb = ce_info->then_bb; /* THEN */
2086 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
2087 basic_block join_bb = ce_info->join_bb; /* join block */
2088 basic_block combo_bb;
2089
2090 /* All block merging is done into the lower block numbers. */
2091
2092 combo_bb = test_bb;
2093
2094 /* Merge any basic blocks to handle && and || subtests. Each of
2095 the blocks are on the fallthru path from the predecessor block. */
2096 if (ce_info->num_multiple_test_blocks > 0)
2097 {
2098 basic_block bb = test_bb;
2099 basic_block last_test_bb = ce_info->last_test_bb;
2100 basic_block fallthru = block_fallthru (bb);
2101
2102 do
2103 {
2104 bb = fallthru;
2105 fallthru = block_fallthru (bb);
2106 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2107 delete_from_dominance_info (CDI_POST_DOMINATORS, bb);
2108 merge_blocks (combo_bb, bb);
2109 num_true_changes++;
2110 }
2111 while (bb != last_test_bb);
2112 }
2113
2114 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2115 label, but it might if there were || tests. That label's count should be
2116 zero, and it normally should be removed. */
2117
2118 if (then_bb)
2119 {
2120 if (combo_bb->global_live_at_end)
2121 COPY_REG_SET (combo_bb->global_live_at_end,
2122 then_bb->global_live_at_end);
2123 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2124 delete_from_dominance_info (CDI_POST_DOMINATORS, then_bb);
2125 merge_blocks (combo_bb, then_bb);
2126 num_true_changes++;
2127 }
2128
2129 /* The ELSE block, if it existed, had a label. That label count
2130 will almost always be zero, but odd things can happen when labels
2131 get their addresses taken. */
2132 if (else_bb)
2133 {
2134 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2135 delete_from_dominance_info (CDI_POST_DOMINATORS, else_bb);
2136 merge_blocks (combo_bb, else_bb);
2137 num_true_changes++;
2138 }
2139
2140 /* If there was no join block reported, that means it was not adjacent
2141 to the others, and so we cannot merge them. */
2142
2143 if (! join_bb)
2144 {
2145 rtx last = BB_END (combo_bb);
2146
2147 /* The outgoing edge for the current COMBO block should already
2148 be correct. Verify this. */
2149 if (combo_bb->succ == NULL_EDGE)
2150 {
2151 if (find_reg_note (last, REG_NORETURN, NULL))
2152 ;
2153 else if (GET_CODE (last) == INSN
2154 && GET_CODE (PATTERN (last)) == TRAP_IF
2155 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2156 ;
2157 else
2158 abort ();
2159 }
2160
2161 /* There should still be something at the end of the THEN or ELSE
2162 blocks taking us to our final destination. */
2163 else if (GET_CODE (last) == JUMP_INSN)
2164 ;
2165 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2166 && GET_CODE (last) == CALL_INSN
2167 && SIBLING_CALL_P (last))
2168 ;
2169 else if ((combo_bb->succ->flags & EDGE_EH)
2170 && can_throw_internal (last))
2171 ;
2172 else
2173 abort ();
2174 }
2175
2176 /* The JOIN block may have had quite a number of other predecessors too.
2177 Since we've already merged the TEST, THEN and ELSE blocks, we should
2178 have only one remaining edge from our if-then-else diamond. If there
2179 is more than one remaining edge, it must come from elsewhere. There
2180 may be zero incoming edges if the THEN block didn't actually join
2181 back up (as with a call to abort). */
2182 else if ((join_bb->pred == NULL
2183 || join_bb->pred->pred_next == NULL)
2184 && join_bb != EXIT_BLOCK_PTR)
2185 {
2186 /* We can merge the JOIN. */
2187 if (combo_bb->global_live_at_end)
2188 COPY_REG_SET (combo_bb->global_live_at_end,
2189 join_bb->global_live_at_end);
2190
2191 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2192 delete_from_dominance_info (CDI_POST_DOMINATORS, join_bb);
2193 merge_blocks (combo_bb, join_bb);
2194 num_true_changes++;
2195 }
2196 else
2197 {
2198 /* We cannot merge the JOIN. */
2199
2200 /* The outgoing edge for the current COMBO block should already
2201 be correct. Verify this. */
2202 if (combo_bb->succ->succ_next != NULL_EDGE
2203 || combo_bb->succ->dest != join_bb)
2204 abort ();
2205
2206 /* Remove the jump and cruft from the end of the COMBO block. */
2207 if (join_bb != EXIT_BLOCK_PTR)
2208 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2209 }
2210
2211 num_updated_if_blocks++;
2212 }
2213 \f
2214 /* Find a block ending in a simple IF condition and try to transform it
2215 in some way. When converting a multi-block condition, put the new code
2216 in the first such block and delete the rest. Return a pointer to this
2217 first block if some transformation was done. Return NULL otherwise. */
2218
2219 static basic_block
2220 find_if_header (basic_block test_bb, int pass)
2221 {
2222 ce_if_block_t ce_info;
2223 edge then_edge;
2224 edge else_edge;
2225
2226 /* The kind of block we're looking for has exactly two successors. */
2227 if ((then_edge = test_bb->succ) == NULL_EDGE
2228 || (else_edge = then_edge->succ_next) == NULL_EDGE
2229 || else_edge->succ_next != NULL_EDGE)
2230 return NULL;
2231
2232 /* Neither edge should be abnormal. */
2233 if ((then_edge->flags & EDGE_COMPLEX)
2234 || (else_edge->flags & EDGE_COMPLEX))
2235 return NULL;
2236
2237 /* Nor exit the loop. */
2238 if ((then_edge->flags & EDGE_LOOP_EXIT)
2239 || (else_edge->flags & EDGE_LOOP_EXIT))
2240 return NULL;
2241
2242 /* The THEN edge is canonically the one that falls through. */
2243 if (then_edge->flags & EDGE_FALLTHRU)
2244 ;
2245 else if (else_edge->flags & EDGE_FALLTHRU)
2246 {
2247 edge e = else_edge;
2248 else_edge = then_edge;
2249 then_edge = e;
2250 }
2251 else
2252 /* Otherwise this must be a multiway branch of some sort. */
2253 return NULL;
2254
2255 memset (&ce_info, '\0', sizeof (ce_info));
2256 ce_info.test_bb = test_bb;
2257 ce_info.then_bb = then_edge->dest;
2258 ce_info.else_bb = else_edge->dest;
2259 ce_info.pass = pass;
2260
2261 #ifdef IFCVT_INIT_EXTRA_FIELDS
2262 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2263 #endif
2264
2265 if (find_if_block (&ce_info))
2266 goto success;
2267
2268 if (HAVE_trap && HAVE_conditional_trap
2269 && find_cond_trap (test_bb, then_edge, else_edge))
2270 goto success;
2271
2272 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY
2273 && (! HAVE_conditional_execution || reload_completed))
2274 {
2275 if (find_if_case_1 (test_bb, then_edge, else_edge))
2276 goto success;
2277 if (find_if_case_2 (test_bb, then_edge, else_edge))
2278 goto success;
2279 }
2280
2281 return NULL;
2282
2283 success:
2284 if (rtl_dump_file)
2285 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2286 return ce_info.test_bb;
2287 }
2288
2289 /* Return true if a block has two edges, one of which falls through to the next
2290 block, and the other jumps to a specific block, so that we can tell if the
2291 block is part of an && test or an || test. Returns either -1 or the number
2292 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2293
2294 static int
2295 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2296 {
2297 edge cur_edge;
2298 int fallthru_p = FALSE;
2299 int jump_p = FALSE;
2300 rtx insn;
2301 rtx end;
2302 int n_insns = 0;
2303
2304 if (!cur_bb || !target_bb)
2305 return -1;
2306
2307 /* If no edges, obviously it doesn't jump or fallthru. */
2308 if (cur_bb->succ == NULL_EDGE)
2309 return FALSE;
2310
2311 for (cur_edge = cur_bb->succ;
2312 cur_edge != NULL_EDGE;
2313 cur_edge = cur_edge->succ_next)
2314 {
2315 if (cur_edge->flags & EDGE_COMPLEX)
2316 /* Anything complex isn't what we want. */
2317 return -1;
2318
2319 else if (cur_edge->flags & EDGE_FALLTHRU)
2320 fallthru_p = TRUE;
2321
2322 else if (cur_edge->dest == target_bb)
2323 jump_p = TRUE;
2324
2325 else
2326 return -1;
2327 }
2328
2329 if ((jump_p & fallthru_p) == 0)
2330 return -1;
2331
2332 /* Don't allow calls in the block, since this is used to group && and ||
2333 together for conditional execution support. ??? we should support
2334 conditional execution support across calls for IA-64 some day, but
2335 for now it makes the code simpler. */
2336 end = BB_END (cur_bb);
2337 insn = BB_HEAD (cur_bb);
2338
2339 while (insn != NULL_RTX)
2340 {
2341 if (GET_CODE (insn) == CALL_INSN)
2342 return -1;
2343
2344 if (INSN_P (insn)
2345 && GET_CODE (insn) != JUMP_INSN
2346 && GET_CODE (PATTERN (insn)) != USE
2347 && GET_CODE (PATTERN (insn)) != CLOBBER)
2348 n_insns++;
2349
2350 if (insn == end)
2351 break;
2352
2353 insn = NEXT_INSN (insn);
2354 }
2355
2356 return n_insns;
2357 }
2358
2359 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2360 block. If so, we'll try to convert the insns to not require the branch.
2361 Return TRUE if we were successful at converting the block. */
2362
2363 static int
2364 find_if_block (struct ce_if_block * ce_info)
2365 {
2366 basic_block test_bb = ce_info->test_bb;
2367 basic_block then_bb = ce_info->then_bb;
2368 basic_block else_bb = ce_info->else_bb;
2369 basic_block join_bb = NULL_BLOCK;
2370 edge then_succ = then_bb->succ;
2371 edge else_succ = else_bb->succ;
2372 int then_predecessors;
2373 int else_predecessors;
2374 edge cur_edge;
2375 basic_block next;
2376
2377 ce_info->last_test_bb = test_bb;
2378
2379 /* Discover if any fall through predecessors of the current test basic block
2380 were && tests (which jump to the else block) or || tests (which jump to
2381 the then block). */
2382 if (HAVE_conditional_execution && reload_completed
2383 && test_bb->pred != NULL_EDGE
2384 && test_bb->pred->pred_next == NULL_EDGE
2385 && test_bb->pred->flags == EDGE_FALLTHRU)
2386 {
2387 basic_block bb = test_bb->pred->src;
2388 basic_block target_bb;
2389 int max_insns = MAX_CONDITIONAL_EXECUTE;
2390 int n_insns;
2391
2392 /* Determine if the preceding block is an && or || block. */
2393 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2394 {
2395 ce_info->and_and_p = TRUE;
2396 target_bb = else_bb;
2397 }
2398 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2399 {
2400 ce_info->and_and_p = FALSE;
2401 target_bb = then_bb;
2402 }
2403 else
2404 target_bb = NULL_BLOCK;
2405
2406 if (target_bb && n_insns <= max_insns)
2407 {
2408 int total_insns = 0;
2409 int blocks = 0;
2410
2411 ce_info->last_test_bb = test_bb;
2412
2413 /* Found at least one && or || block, look for more. */
2414 do
2415 {
2416 ce_info->test_bb = test_bb = bb;
2417 total_insns += n_insns;
2418 blocks++;
2419
2420 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2421 break;
2422
2423 bb = bb->pred->src;
2424 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2425 }
2426 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2427
2428 ce_info->num_multiple_test_blocks = blocks;
2429 ce_info->num_multiple_test_insns = total_insns;
2430
2431 if (ce_info->and_and_p)
2432 ce_info->num_and_and_blocks = blocks;
2433 else
2434 ce_info->num_or_or_blocks = blocks;
2435 }
2436 }
2437
2438 /* Count the number of edges the THEN and ELSE blocks have. */
2439 then_predecessors = 0;
2440 for (cur_edge = then_bb->pred;
2441 cur_edge != NULL_EDGE;
2442 cur_edge = cur_edge->pred_next)
2443 {
2444 then_predecessors++;
2445 if (cur_edge->flags & EDGE_COMPLEX)
2446 return FALSE;
2447 }
2448
2449 else_predecessors = 0;
2450 for (cur_edge = else_bb->pred;
2451 cur_edge != NULL_EDGE;
2452 cur_edge = cur_edge->pred_next)
2453 {
2454 else_predecessors++;
2455 if (cur_edge->flags & EDGE_COMPLEX)
2456 return FALSE;
2457 }
2458
2459 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2460 other than any || blocks which jump to the THEN block. */
2461 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2462 return FALSE;
2463
2464 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2465 if (then_succ != NULL_EDGE
2466 && (then_succ->succ_next != NULL_EDGE
2467 || (then_succ->flags & EDGE_COMPLEX)
2468 || (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
2469 return FALSE;
2470
2471 /* If the THEN block has no successors, conditional execution can still
2472 make a conditional call. Don't do this unless the ELSE block has
2473 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2474 Check for the last insn of the THEN block being an indirect jump, which
2475 is listed as not having any successors, but confuses the rest of the CE
2476 code processing. ??? we should fix this in the future. */
2477 if (then_succ == NULL)
2478 {
2479 if (else_bb->pred->pred_next == NULL_EDGE)
2480 {
2481 rtx last_insn = BB_END (then_bb);
2482
2483 while (last_insn
2484 && GET_CODE (last_insn) == NOTE
2485 && last_insn != BB_HEAD (then_bb))
2486 last_insn = PREV_INSN (last_insn);
2487
2488 if (last_insn
2489 && GET_CODE (last_insn) == JUMP_INSN
2490 && ! simplejump_p (last_insn))
2491 return FALSE;
2492
2493 join_bb = else_bb;
2494 else_bb = NULL_BLOCK;
2495 }
2496 else
2497 return FALSE;
2498 }
2499
2500 /* If the THEN block's successor is the other edge out of the TEST block,
2501 then we have an IF-THEN combo without an ELSE. */
2502 else if (then_succ->dest == else_bb)
2503 {
2504 join_bb = else_bb;
2505 else_bb = NULL_BLOCK;
2506 }
2507
2508 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2509 has exactly one predecessor and one successor, and the outgoing edge
2510 is not complex, then we have an IF-THEN-ELSE combo. */
2511 else if (else_succ != NULL_EDGE
2512 && then_succ->dest == else_succ->dest
2513 && else_bb->pred->pred_next == NULL_EDGE
2514 && else_succ->succ_next == NULL_EDGE
2515 && ! (else_succ->flags & EDGE_COMPLEX)
2516 && ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
2517 join_bb = else_succ->dest;
2518
2519 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2520 else
2521 return FALSE;
2522
2523 num_possible_if_blocks++;
2524
2525 if (rtl_dump_file)
2526 {
2527 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2528 (else_bb) ? "-ELSE" : "",
2529 ce_info->pass,
2530 test_bb->index, (BB_HEAD (test_bb)) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
2531 then_bb->index, (BB_HEAD (then_bb)) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
2532
2533 if (else_bb)
2534 fprintf (rtl_dump_file, ", else %d [%d]",
2535 else_bb->index, (BB_HEAD (else_bb)) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
2536
2537 fprintf (rtl_dump_file, ", join %d [%d]",
2538 join_bb->index, (BB_HEAD (join_bb)) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
2539
2540 if (ce_info->num_multiple_test_blocks > 0)
2541 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2542 ce_info->num_multiple_test_blocks,
2543 (ce_info->and_and_p) ? "&&" : "||",
2544 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2545 ce_info->last_test_bb->index,
2546 ((BB_HEAD (ce_info->last_test_bb))
2547 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
2548 : -1));
2549
2550 fputc ('\n', rtl_dump_file);
2551 }
2552
2553 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2554 first condition for free, since we've already asserted that there's a
2555 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2556 we checked the FALLTHRU flag, those are already adjacent to the last IF
2557 block. */
2558 /* ??? As an enhancement, move the ELSE block. Have to deal with
2559 BLOCK notes, if by no other means than aborting the merge if they
2560 exist. Sticky enough I don't want to think about it now. */
2561 next = then_bb;
2562 if (else_bb && (next = next->next_bb) != else_bb)
2563 return FALSE;
2564 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2565 {
2566 if (else_bb)
2567 join_bb = NULL;
2568 else
2569 return FALSE;
2570 }
2571
2572 /* Do the real work. */
2573 ce_info->else_bb = else_bb;
2574 ce_info->join_bb = join_bb;
2575
2576 return process_if_block (ce_info);
2577 }
2578
2579 /* Convert a branch over a trap, or a branch
2580 to a trap, into a conditional trap. */
2581
2582 static int
2583 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2584 {
2585 basic_block then_bb = then_edge->dest;
2586 basic_block else_bb = else_edge->dest;
2587 basic_block other_bb, trap_bb;
2588 rtx trap, jump, cond, cond_earliest, seq;
2589 enum rtx_code code;
2590
2591 /* Locate the block with the trap instruction. */
2592 /* ??? While we look for no successors, we really ought to allow
2593 EH successors. Need to fix merge_if_block for that to work. */
2594 if ((trap = block_has_only_trap (then_bb)) != NULL)
2595 trap_bb = then_bb, other_bb = else_bb;
2596 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2597 trap_bb = else_bb, other_bb = then_bb;
2598 else
2599 return FALSE;
2600
2601 if (rtl_dump_file)
2602 {
2603 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2604 test_bb->index, trap_bb->index);
2605 }
2606
2607 /* If this is not a standard conditional jump, we can't parse it. */
2608 jump = BB_END (test_bb);
2609 cond = noce_get_condition (jump, &cond_earliest);
2610 if (! cond)
2611 return FALSE;
2612
2613 /* If the conditional jump is more than just a conditional jump, then
2614 we can not do if-conversion on this block. */
2615 if (! onlyjump_p (jump))
2616 return FALSE;
2617
2618 /* We must be comparing objects whose modes imply the size. */
2619 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2620 return FALSE;
2621
2622 /* Reverse the comparison code, if necessary. */
2623 code = GET_CODE (cond);
2624 if (then_bb == trap_bb)
2625 {
2626 code = reversed_comparison_code (cond, jump);
2627 if (code == UNKNOWN)
2628 return FALSE;
2629 }
2630
2631 /* Attempt to generate the conditional trap. */
2632 seq = gen_cond_trap (code, XEXP (cond, 0),
2633 XEXP (cond, 1),
2634 TRAP_CODE (PATTERN (trap)));
2635 if (seq == NULL)
2636 return FALSE;
2637
2638 num_true_changes++;
2639
2640 /* Emit the new insns before cond_earliest. */
2641 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2642
2643 /* Delete the trap block if possible. */
2644 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2645 if (trap_bb->pred == NULL)
2646 {
2647 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2648 delete_from_dominance_info (CDI_POST_DOMINATORS, trap_bb);
2649 delete_block (trap_bb);
2650 }
2651
2652 /* If the non-trap block and the test are now adjacent, merge them.
2653 Otherwise we must insert a direct branch. */
2654 if (test_bb->next_bb == other_bb)
2655 {
2656 struct ce_if_block new_ce_info;
2657 delete_insn (jump);
2658 memset (&new_ce_info, '\0', sizeof (new_ce_info));
2659 new_ce_info.test_bb = test_bb;
2660 new_ce_info.then_bb = NULL;
2661 new_ce_info.else_bb = NULL;
2662 new_ce_info.join_bb = other_bb;
2663 merge_if_block (&new_ce_info);
2664 }
2665 else
2666 {
2667 rtx lab, newjump;
2668
2669 lab = JUMP_LABEL (jump);
2670 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2671 LABEL_NUSES (lab) += 1;
2672 JUMP_LABEL (newjump) = lab;
2673 emit_barrier_after (newjump);
2674
2675 delete_insn (jump);
2676 }
2677
2678 return TRUE;
2679 }
2680
2681 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2682 return it. */
2683
2684 static rtx
2685 block_has_only_trap (basic_block bb)
2686 {
2687 rtx trap;
2688
2689 /* We're not the exit block. */
2690 if (bb == EXIT_BLOCK_PTR)
2691 return NULL_RTX;
2692
2693 /* The block must have no successors. */
2694 if (bb->succ)
2695 return NULL_RTX;
2696
2697 /* The only instruction in the THEN block must be the trap. */
2698 trap = first_active_insn (bb);
2699 if (! (trap == BB_END (bb)
2700 && GET_CODE (PATTERN (trap)) == TRAP_IF
2701 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2702 return NULL_RTX;
2703
2704 return trap;
2705 }
2706
2707 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2708 transformable, but not necessarily the other. There need be no
2709 JOIN block.
2710
2711 Return TRUE if we were successful at converting the block.
2712
2713 Cases we'd like to look at:
2714
2715 (1)
2716 if (test) goto over; // x not live
2717 x = a;
2718 goto label;
2719 over:
2720
2721 becomes
2722
2723 x = a;
2724 if (! test) goto label;
2725
2726 (2)
2727 if (test) goto E; // x not live
2728 x = big();
2729 goto L;
2730 E:
2731 x = b;
2732 goto M;
2733
2734 becomes
2735
2736 x = b;
2737 if (test) goto M;
2738 x = big();
2739 goto L;
2740
2741 (3) // This one's really only interesting for targets that can do
2742 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2743 // it results in multiple branches on a cache line, which often
2744 // does not sit well with predictors.
2745
2746 if (test1) goto E; // predicted not taken
2747 x = a;
2748 if (test2) goto F;
2749 ...
2750 E:
2751 x = b;
2752 J:
2753
2754 becomes
2755
2756 x = a;
2757 if (test1) goto E;
2758 if (test2) goto F;
2759
2760 Notes:
2761
2762 (A) Don't do (2) if the branch is predicted against the block we're
2763 eliminating. Do it anyway if we can eliminate a branch; this requires
2764 that the sole successor of the eliminated block postdominate the other
2765 side of the if.
2766
2767 (B) With CE, on (3) we can steal from both sides of the if, creating
2768
2769 if (test1) x = a;
2770 if (!test1) x = b;
2771 if (test1) goto J;
2772 if (test2) goto F;
2773 ...
2774 J:
2775
2776 Again, this is most useful if J postdominates.
2777
2778 (C) CE substitutes for helpful life information.
2779
2780 (D) These heuristics need a lot of work. */
2781
2782 /* Tests for case 1 above. */
2783
2784 static int
2785 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
2786 {
2787 basic_block then_bb = then_edge->dest;
2788 basic_block else_bb = else_edge->dest, new_bb;
2789 edge then_succ = then_bb->succ;
2790 int then_bb_index;
2791
2792 /* THEN has one successor. */
2793 if (!then_succ || then_succ->succ_next != NULL)
2794 return FALSE;
2795
2796 /* THEN does not fall through, but is not strange either. */
2797 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2798 return FALSE;
2799
2800 /* THEN has one predecessor. */
2801 if (then_bb->pred->pred_next != NULL)
2802 return FALSE;
2803
2804 /* THEN must do something. */
2805 if (forwarder_block_p (then_bb))
2806 return FALSE;
2807
2808 num_possible_if_blocks++;
2809 if (rtl_dump_file)
2810 fprintf (rtl_dump_file,
2811 "\nIF-CASE-1 found, start %d, then %d\n",
2812 test_bb->index, then_bb->index);
2813
2814 /* THEN is small. */
2815 if (count_bb_insns (then_bb) > BRANCH_COST)
2816 return FALSE;
2817
2818 /* Registers set are dead, or are predicable. */
2819 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2820 then_bb->succ->dest, 1))
2821 return FALSE;
2822
2823 /* Conversion went ok, including moving the insns and fixing up the
2824 jump. Adjust the CFG to match. */
2825
2826 bitmap_operation (test_bb->global_live_at_end,
2827 else_bb->global_live_at_start,
2828 then_bb->global_live_at_end, BITMAP_IOR);
2829
2830 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2831 then_bb_index = then_bb->index;
2832 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2833 delete_from_dominance_info (CDI_POST_DOMINATORS, then_bb);
2834 delete_block (then_bb);
2835
2836 /* Make rest of code believe that the newly created block is the THEN_BB
2837 block we removed. */
2838 if (new_bb)
2839 {
2840 new_bb->index = then_bb_index;
2841 BASIC_BLOCK (then_bb_index) = new_bb;
2842 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2843 add_to_dominance_info (CDI_POST_DOMINATORS, new_bb);
2844 }
2845 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2846 later. */
2847
2848 num_true_changes++;
2849 num_updated_if_blocks++;
2850
2851 return TRUE;
2852 }
2853
2854 /* Test for case 2 above. */
2855
2856 static int
2857 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
2858 {
2859 basic_block then_bb = then_edge->dest;
2860 basic_block else_bb = else_edge->dest;
2861 edge else_succ = else_bb->succ;
2862 rtx note;
2863
2864 /* ELSE has one successor. */
2865 if (!else_succ || else_succ->succ_next != NULL)
2866 return FALSE;
2867
2868 /* ELSE outgoing edge is not complex. */
2869 if (else_succ->flags & EDGE_COMPLEX)
2870 return FALSE;
2871
2872 /* ELSE has one predecessor. */
2873 if (else_bb->pred->pred_next != NULL)
2874 return FALSE;
2875
2876 /* THEN is not EXIT. */
2877 if (then_bb->index < 0)
2878 return FALSE;
2879
2880 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2881 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
2882 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2883 ;
2884 else if (else_succ->dest->index < 0
2885 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
2886 else_succ->dest))
2887 ;
2888 else
2889 return FALSE;
2890
2891 num_possible_if_blocks++;
2892 if (rtl_dump_file)
2893 fprintf (rtl_dump_file,
2894 "\nIF-CASE-2 found, start %d, else %d\n",
2895 test_bb->index, else_bb->index);
2896
2897 /* ELSE is small. */
2898 if (count_bb_insns (else_bb) > BRANCH_COST)
2899 return FALSE;
2900
2901 /* Registers set are dead, or are predicable. */
2902 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2903 return FALSE;
2904
2905 /* Conversion went ok, including moving the insns and fixing up the
2906 jump. Adjust the CFG to match. */
2907
2908 bitmap_operation (test_bb->global_live_at_end,
2909 then_bb->global_live_at_start,
2910 else_bb->global_live_at_end, BITMAP_IOR);
2911
2912 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY)
2913 delete_from_dominance_info (CDI_POST_DOMINATORS, else_bb);
2914 delete_block (else_bb);
2915
2916 num_true_changes++;
2917 num_updated_if_blocks++;
2918
2919 /* ??? We may now fallthru from one of THEN's successors into a join
2920 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2921
2922 return TRUE;
2923 }
2924
2925 /* A subroutine of dead_or_predicable called through for_each_rtx.
2926 Return 1 if a memory is found. */
2927
2928 static int
2929 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
2930 {
2931 return GET_CODE (*px) == MEM;
2932 }
2933
2934 /* Used by the code above to perform the actual rtl transformations.
2935 Return TRUE if successful.
2936
2937 TEST_BB is the block containing the conditional branch. MERGE_BB
2938 is the block containing the code to manipulate. NEW_DEST is the
2939 label TEST_BB should be branching to after the conversion.
2940 REVERSEP is true if the sense of the branch should be reversed. */
2941
2942 static int
2943 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
2944 basic_block other_bb, basic_block new_dest, int reversep)
2945 {
2946 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2947
2948 jump = BB_END (test_bb);
2949
2950 /* Find the extent of the real code in the merge block. */
2951 head = BB_HEAD (merge_bb);
2952 end = BB_END (merge_bb);
2953
2954 if (GET_CODE (head) == CODE_LABEL)
2955 head = NEXT_INSN (head);
2956 if (GET_CODE (head) == NOTE)
2957 {
2958 if (head == end)
2959 {
2960 head = end = NULL_RTX;
2961 goto no_body;
2962 }
2963 head = NEXT_INSN (head);
2964 }
2965
2966 if (GET_CODE (end) == JUMP_INSN)
2967 {
2968 if (head == end)
2969 {
2970 head = end = NULL_RTX;
2971 goto no_body;
2972 }
2973 end = PREV_INSN (end);
2974 }
2975
2976 /* Disable handling dead code by conditional execution if the machine needs
2977 to do anything funny with the tests, etc. */
2978 #ifndef IFCVT_MODIFY_TESTS
2979 if (HAVE_conditional_execution)
2980 {
2981 /* In the conditional execution case, we have things easy. We know
2982 the condition is reversible. We don't have to check life info
2983 because we're going to conditionally execute the code anyway.
2984 All that's left is making sure the insns involved can actually
2985 be predicated. */
2986
2987 rtx cond, prob_val;
2988
2989 cond = cond_exec_get_condition (jump);
2990 if (! cond)
2991 return FALSE;
2992
2993 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2994 if (prob_val)
2995 prob_val = XEXP (prob_val, 0);
2996
2997 if (reversep)
2998 {
2999 enum rtx_code rev = reversed_comparison_code (cond, jump);
3000 if (rev == UNKNOWN)
3001 return FALSE;
3002 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3003 XEXP (cond, 1));
3004 if (prob_val)
3005 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3006 }
3007
3008 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
3009 prob_val, 0))
3010 goto cancel;
3011
3012 earliest = jump;
3013 }
3014 else
3015 #endif
3016 {
3017 /* In the non-conditional execution case, we have to verify that there
3018 are no trapping operations, no calls, no references to memory, and
3019 that any registers modified are dead at the branch site. */
3020
3021 rtx insn, cond, prev;
3022 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
3023 regset merge_set, tmp, test_live, test_set;
3024 struct propagate_block_info *pbi;
3025 int i, fail = 0;
3026
3027 /* Check for no calls or trapping operations. */
3028 for (insn = head; ; insn = NEXT_INSN (insn))
3029 {
3030 if (GET_CODE (insn) == CALL_INSN)
3031 return FALSE;
3032 if (INSN_P (insn))
3033 {
3034 if (may_trap_p (PATTERN (insn)))
3035 return FALSE;
3036
3037 /* ??? Even non-trapping memories such as stack frame
3038 references must be avoided. For stores, we collect
3039 no lifetime info; for reads, we'd have to assert
3040 true_dependence false against every store in the
3041 TEST range. */
3042 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3043 return FALSE;
3044 }
3045 if (insn == end)
3046 break;
3047 }
3048
3049 if (! any_condjump_p (jump))
3050 return FALSE;
3051
3052 /* Find the extent of the conditional. */
3053 cond = noce_get_condition (jump, &earliest);
3054 if (! cond)
3055 return FALSE;
3056
3057 /* Collect:
3058 MERGE_SET = set of registers set in MERGE_BB
3059 TEST_LIVE = set of registers live at EARLIEST
3060 TEST_SET = set of registers set between EARLIEST and the
3061 end of the block. */
3062
3063 tmp = INITIALIZE_REG_SET (tmp_head);
3064 merge_set = INITIALIZE_REG_SET (merge_set_head);
3065 test_live = INITIALIZE_REG_SET (test_live_head);
3066 test_set = INITIALIZE_REG_SET (test_set_head);
3067
3068 /* ??? bb->local_set is only valid during calculate_global_regs_live,
3069 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
3070 since we've already asserted that MERGE_BB is small. */
3071 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
3072
3073 /* For small register class machines, don't lengthen lifetimes of
3074 hard registers before reload. */
3075 if (SMALL_REGISTER_CLASSES && ! reload_completed)
3076 {
3077 EXECUTE_IF_SET_IN_BITMAP
3078 (merge_set, 0, i,
3079 {
3080 if (i < FIRST_PSEUDO_REGISTER
3081 && ! fixed_regs[i]
3082 && ! global_regs[i])
3083 fail = 1;
3084 });
3085 }
3086
3087 /* For TEST, we're interested in a range of insns, not a whole block.
3088 Moreover, we're interested in the insns live from OTHER_BB. */
3089
3090 COPY_REG_SET (test_live, other_bb->global_live_at_start);
3091 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3092 0);
3093
3094 for (insn = jump; ; insn = prev)
3095 {
3096 prev = propagate_one_insn (pbi, insn);
3097 if (insn == earliest)
3098 break;
3099 }
3100
3101 free_propagate_block_info (pbi);
3102
3103 /* We can perform the transformation if
3104 MERGE_SET & (TEST_SET | TEST_LIVE)
3105 and
3106 TEST_SET & merge_bb->global_live_at_start
3107 are empty. */
3108
3109 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3110 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3111 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3112
3113 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3114 BITMAP_AND);
3115 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3116
3117 FREE_REG_SET (tmp);
3118 FREE_REG_SET (merge_set);
3119 FREE_REG_SET (test_live);
3120 FREE_REG_SET (test_set);
3121
3122 if (fail)
3123 return FALSE;
3124 }
3125
3126 no_body:
3127 /* We don't want to use normal invert_jump or redirect_jump because
3128 we don't want to delete_insn called. Also, we want to do our own
3129 change group management. */
3130
3131 old_dest = JUMP_LABEL (jump);
3132 if (other_bb != new_dest)
3133 {
3134 new_label = block_label (new_dest);
3135 if (reversep
3136 ? ! invert_jump_1 (jump, new_label)
3137 : ! redirect_jump_1 (jump, new_label))
3138 goto cancel;
3139 }
3140
3141 if (! apply_change_group ())
3142 return FALSE;
3143
3144 if (other_bb != new_dest)
3145 {
3146 if (old_dest)
3147 LABEL_NUSES (old_dest) -= 1;
3148 if (new_label)
3149 LABEL_NUSES (new_label) += 1;
3150 JUMP_LABEL (jump) = new_label;
3151 if (reversep)
3152 invert_br_probabilities (jump);
3153
3154 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3155 if (reversep)
3156 {
3157 gcov_type count, probability;
3158 count = BRANCH_EDGE (test_bb)->count;
3159 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3160 FALLTHRU_EDGE (test_bb)->count = count;
3161 probability = BRANCH_EDGE (test_bb)->probability;
3162 BRANCH_EDGE (test_bb)->probability
3163 = FALLTHRU_EDGE (test_bb)->probability;
3164 FALLTHRU_EDGE (test_bb)->probability = probability;
3165 update_br_prob_note (test_bb);
3166 }
3167 }
3168
3169 /* Move the insns out of MERGE_BB to before the branch. */
3170 if (head != NULL)
3171 {
3172 if (end == BB_END (merge_bb))
3173 BB_END (merge_bb) = PREV_INSN (head);
3174
3175 if (squeeze_notes (&head, &end))
3176 return TRUE;
3177
3178 reorder_insns (head, end, PREV_INSN (earliest));
3179 }
3180
3181 /* Remove the jump and edge if we can. */
3182 if (other_bb == new_dest)
3183 {
3184 delete_insn (jump);
3185 remove_edge (BRANCH_EDGE (test_bb));
3186 /* ??? Can't merge blocks here, as then_bb is still in use.
3187 At minimum, the merge will get done just before bb-reorder. */
3188 }
3189
3190 return TRUE;
3191
3192 cancel:
3193 cancel_changes (0);
3194 return FALSE;
3195 }
3196 \f
3197 /* Main entry point for all if-conversion. */
3198
3199 void
3200 if_convert (int x_life_data_ok)
3201 {
3202 basic_block bb;
3203 int pass;
3204
3205 num_possible_if_blocks = 0;
3206 num_updated_if_blocks = 0;
3207 num_true_changes = 0;
3208 life_data_ok = (x_life_data_ok != 0);
3209
3210 if (! (* targetm.cannot_modify_jumps_p) ())
3211 mark_loop_exit_edges ();
3212
3213 /* Free up basic_block_for_insn so that we don't have to keep it
3214 up to date, either here or in merge_blocks. */
3215 free_basic_block_vars (1);
3216
3217 /* Compute postdominators if we think we'll use them. */
3218 if (HAVE_conditional_execution || life_data_ok)
3219 calculate_dominance_info (CDI_POST_DOMINATORS);
3220
3221 if (life_data_ok)
3222 clear_bb_flags ();
3223
3224 /* Go through each of the basic blocks looking for things to convert. If we
3225 have conditional execution, we make multiple passes to allow us to handle
3226 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3227 pass = 0;
3228 do
3229 {
3230 cond_exec_changed_p = FALSE;
3231 pass++;
3232
3233 #ifdef IFCVT_MULTIPLE_DUMPS
3234 if (rtl_dump_file && pass > 1)
3235 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3236 #endif
3237
3238 FOR_EACH_BB (bb)
3239 {
3240 basic_block new_bb;
3241 while ((new_bb = find_if_header (bb, pass)))
3242 bb = new_bb;
3243 }
3244
3245 #ifdef IFCVT_MULTIPLE_DUMPS
3246 if (rtl_dump_file && cond_exec_changed_p)
3247 print_rtl_with_bb (rtl_dump_file, get_insns ());
3248 #endif
3249 }
3250 while (cond_exec_changed_p);
3251
3252 #ifdef IFCVT_MULTIPLE_DUMPS
3253 if (rtl_dump_file)
3254 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3255 #endif
3256
3257 free_dominance_info (CDI_POST_DOMINATORS);
3258
3259 if (rtl_dump_file)
3260 fflush (rtl_dump_file);
3261
3262 clear_aux_for_blocks ();
3263
3264 /* Rebuild life info for basic blocks that require it. */
3265 if (num_true_changes && life_data_ok)
3266 {
3267 /* If we allocated new pseudos, we must resize the array for sched1. */
3268 if (max_regno < max_reg_num ())
3269 {
3270 max_regno = max_reg_num ();
3271 allocate_reg_info (max_regno, FALSE, FALSE);
3272 }
3273 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3274 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3275 | PROP_KILL_DEAD_CODE);
3276 }
3277
3278 /* Write the final stats. */
3279 if (rtl_dump_file && num_possible_if_blocks > 0)
3280 {
3281 fprintf (rtl_dump_file,
3282 "\n%d possible IF blocks searched.\n",
3283 num_possible_if_blocks);
3284 fprintf (rtl_dump_file,
3285 "%d IF blocks converted.\n",
3286 num_updated_if_blocks);
3287 fprintf (rtl_dump_file,
3288 "%d true changes made.\n\n\n",
3289 num_true_changes);
3290 }
3291
3292 #ifdef ENABLE_CHECKING
3293 verify_flow_info ();
3294 #endif
3295 }