tree.h (PHI_CHAIN): New.
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
43
44
45 #ifndef HAVE_conditional_execution
46 #define HAVE_conditional_execution 0
47 #endif
48 #ifndef HAVE_conditional_move
49 #define HAVE_conditional_move 0
50 #endif
51 #ifndef HAVE_incscc
52 #define HAVE_incscc 0
53 #endif
54 #ifndef HAVE_decscc
55 #define HAVE_decscc 0
56 #endif
57 #ifndef HAVE_trap
58 #define HAVE_trap 0
59 #endif
60 #ifndef HAVE_conditional_trap
61 #define HAVE_conditional_trap 0
62 #endif
63
64 #ifndef MAX_CONDITIONAL_EXECUTE
65 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
66 #endif
67
68 #define NULL_EDGE ((struct edge_def *)NULL)
69 #define NULL_BLOCK ((struct basic_block_def *)NULL)
70
71 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
72 static int num_possible_if_blocks;
73
74 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
75 execution. */
76 static int num_updated_if_blocks;
77
78 /* # of changes made which require life information to be updated. */
79 static int num_true_changes;
80
81 /* Whether conditional execution changes were made. */
82 static int cond_exec_changed_p;
83
84 /* True if life data ok at present. */
85 static bool life_data_ok;
86
87 /* Forward references. */
88 static int count_bb_insns (basic_block);
89 static rtx first_active_insn (basic_block);
90 static rtx last_active_insn (basic_block, int);
91 static int seq_contains_jump (rtx);
92 static basic_block block_fallthru (basic_block);
93 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
94 static rtx cond_exec_get_condition (rtx);
95 static int cond_exec_process_if_block (ce_if_block_t *, int);
96 static rtx noce_get_condition (rtx, rtx *);
97 static int noce_operand_ok (rtx);
98 static int noce_process_if_block (ce_if_block_t *);
99 static int process_if_block (ce_if_block_t *);
100 static void merge_if_block (ce_if_block_t *);
101 static int find_cond_trap (basic_block, edge, edge);
102 static basic_block find_if_header (basic_block, int);
103 static int block_jumps_and_fallthru_p (basic_block, basic_block);
104 static int find_if_block (ce_if_block_t *);
105 static int find_if_case_1 (basic_block, edge, edge);
106 static int find_if_case_2 (basic_block, edge, edge);
107 static int find_memory (rtx *, void *);
108 static int dead_or_predicable (basic_block, basic_block, basic_block,
109 basic_block, int);
110 static void noce_emit_move_insn (rtx, rtx);
111 static rtx block_has_only_trap (basic_block);
112 static void mark_loop_exit_edges (void);
113 \f
114 /* Sets EDGE_LOOP_EXIT flag for all loop exits. */
115 static void
116 mark_loop_exit_edges (void)
117 {
118 struct loops loops;
119 basic_block bb;
120 edge e;
121
122 flow_loops_find (&loops, LOOP_TREE);
123 free_dominance_info (CDI_DOMINATORS);
124
125 if (loops.num > 1)
126 {
127 FOR_EACH_BB (bb)
128 {
129 for (e = bb->succ; e; e = e->succ_next)
130 {
131 if (find_common_loop (bb->loop_father, e->dest->loop_father)
132 != bb->loop_father)
133 e->flags |= EDGE_LOOP_EXIT;
134 else
135 e->flags &= ~EDGE_LOOP_EXIT;
136 }
137 }
138 }
139
140 flow_loops_free (&loops);
141 }
142
143 /* Count the number of non-jump active insns in BB. */
144
145 static int
146 count_bb_insns (basic_block bb)
147 {
148 int count = 0;
149 rtx insn = BB_HEAD (bb);
150
151 while (1)
152 {
153 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
154 count++;
155
156 if (insn == BB_END (bb))
157 break;
158 insn = NEXT_INSN (insn);
159 }
160
161 return count;
162 }
163
164 /* Return the first non-jump active insn in the basic block. */
165
166 static rtx
167 first_active_insn (basic_block bb)
168 {
169 rtx insn = BB_HEAD (bb);
170
171 if (GET_CODE (insn) == CODE_LABEL)
172 {
173 if (insn == BB_END (bb))
174 return NULL_RTX;
175 insn = NEXT_INSN (insn);
176 }
177
178 while (GET_CODE (insn) == NOTE)
179 {
180 if (insn == BB_END (bb))
181 return NULL_RTX;
182 insn = NEXT_INSN (insn);
183 }
184
185 if (GET_CODE (insn) == JUMP_INSN)
186 return NULL_RTX;
187
188 return insn;
189 }
190
191 /* Return the last non-jump active (non-jump) insn in the basic block. */
192
193 static rtx
194 last_active_insn (basic_block bb, int skip_use_p)
195 {
196 rtx insn = BB_END (bb);
197 rtx head = BB_HEAD (bb);
198
199 while (GET_CODE (insn) == NOTE
200 || GET_CODE (insn) == JUMP_INSN
201 || (skip_use_p
202 && GET_CODE (insn) == INSN
203 && GET_CODE (PATTERN (insn)) == USE))
204 {
205 if (insn == head)
206 return NULL_RTX;
207 insn = PREV_INSN (insn);
208 }
209
210 if (GET_CODE (insn) == CODE_LABEL)
211 return NULL_RTX;
212
213 return insn;
214 }
215
216 /* It is possible, especially when having dealt with multi-word
217 arithmetic, for the expanders to have emitted jumps. Search
218 through the sequence and return TRUE if a jump exists so that
219 we can abort the conversion. */
220
221 static int
222 seq_contains_jump (rtx insn)
223 {
224 while (insn)
225 {
226 if (GET_CODE (insn) == JUMP_INSN)
227 return 1;
228 insn = NEXT_INSN (insn);
229 }
230 return 0;
231 }
232
233 static basic_block
234 block_fallthru (basic_block bb)
235 {
236 edge e;
237
238 for (e = bb->succ;
239 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
240 e = e->succ_next)
241 ;
242
243 return (e) ? e->dest : NULL_BLOCK;
244 }
245 \f
246 /* Go through a bunch of insns, converting them to conditional
247 execution format if possible. Return TRUE if all of the non-note
248 insns were processed. */
249
250 static int
251 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
252 /* if block information */rtx start,
253 /* first insn to look at */rtx end,
254 /* last insn to look at */rtx test,
255 /* conditional execution test */rtx prob_val,
256 /* probability of branch taken. */int mod_ok)
257 {
258 int must_be_last = FALSE;
259 rtx insn;
260 rtx xtest;
261 rtx pattern;
262
263 if (!start || !end)
264 return FALSE;
265
266 for (insn = start; ; insn = NEXT_INSN (insn))
267 {
268 if (GET_CODE (insn) == NOTE)
269 goto insn_done;
270
271 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
272 abort ();
273
274 /* Remove USE insns that get in the way. */
275 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
276 {
277 /* ??? Ug. Actually unlinking the thing is problematic,
278 given what we'd have to coordinate with our callers. */
279 PUT_CODE (insn, NOTE);
280 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
281 NOTE_SOURCE_FILE (insn) = 0;
282 goto insn_done;
283 }
284
285 /* Last insn wasn't last? */
286 if (must_be_last)
287 return FALSE;
288
289 if (modified_in_p (test, insn))
290 {
291 if (!mod_ok)
292 return FALSE;
293 must_be_last = TRUE;
294 }
295
296 /* Now build the conditional form of the instruction. */
297 pattern = PATTERN (insn);
298 xtest = copy_rtx (test);
299
300 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
301 two conditions. */
302 if (GET_CODE (pattern) == COND_EXEC)
303 {
304 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
305 return FALSE;
306
307 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
308 COND_EXEC_TEST (pattern));
309 pattern = COND_EXEC_CODE (pattern);
310 }
311
312 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
313
314 /* If the machine needs to modify the insn being conditionally executed,
315 say for example to force a constant integer operand into a temp
316 register, do so here. */
317 #ifdef IFCVT_MODIFY_INSN
318 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
319 if (! pattern)
320 return FALSE;
321 #endif
322
323 validate_change (insn, &PATTERN (insn), pattern, 1);
324
325 if (GET_CODE (insn) == CALL_INSN && prob_val)
326 validate_change (insn, &REG_NOTES (insn),
327 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
328 REG_NOTES (insn)), 1);
329
330 insn_done:
331 if (insn == end)
332 break;
333 }
334
335 return TRUE;
336 }
337
338 /* Return the condition for a jump. Do not do any special processing. */
339
340 static rtx
341 cond_exec_get_condition (rtx jump)
342 {
343 rtx test_if, cond;
344
345 if (any_condjump_p (jump))
346 test_if = SET_SRC (pc_set (jump));
347 else
348 return NULL_RTX;
349 cond = XEXP (test_if, 0);
350
351 /* If this branches to JUMP_LABEL when the condition is false,
352 reverse the condition. */
353 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
354 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
355 {
356 enum rtx_code rev = reversed_comparison_code (cond, jump);
357 if (rev == UNKNOWN)
358 return NULL_RTX;
359
360 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
361 XEXP (cond, 1));
362 }
363
364 return cond;
365 }
366
367 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
368 to conditional execution. Return TRUE if we were successful at
369 converting the block. */
370
371 static int
372 cond_exec_process_if_block (ce_if_block_t * ce_info,
373 /* if block information */int do_multiple_p)
374 {
375 basic_block test_bb = ce_info->test_bb; /* last test block */
376 basic_block then_bb = ce_info->then_bb; /* THEN */
377 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
378 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
379 rtx then_start; /* first insn in THEN block */
380 rtx then_end; /* last insn + 1 in THEN block */
381 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
382 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
383 int max; /* max # of insns to convert. */
384 int then_mod_ok; /* whether conditional mods are ok in THEN */
385 rtx true_expr; /* test for else block insns */
386 rtx false_expr; /* test for then block insns */
387 rtx true_prob_val; /* probability of else block */
388 rtx false_prob_val; /* probability of then block */
389 int n_insns;
390 enum rtx_code false_code;
391
392 /* If test is comprised of && or || elements, and we've failed at handling
393 all of them together, just use the last test if it is the special case of
394 && elements without an ELSE block. */
395 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
396 {
397 if (else_bb || ! ce_info->and_and_p)
398 return FALSE;
399
400 ce_info->test_bb = test_bb = ce_info->last_test_bb;
401 ce_info->num_multiple_test_blocks = 0;
402 ce_info->num_and_and_blocks = 0;
403 ce_info->num_or_or_blocks = 0;
404 }
405
406 /* Find the conditional jump to the ELSE or JOIN part, and isolate
407 the test. */
408 test_expr = cond_exec_get_condition (BB_END (test_bb));
409 if (! test_expr)
410 return FALSE;
411
412 /* If the conditional jump is more than just a conditional jump,
413 then we can not do conditional execution conversion on this block. */
414 if (! onlyjump_p (BB_END (test_bb)))
415 return FALSE;
416
417 /* Collect the bounds of where we're to search, skipping any labels, jumps
418 and notes at the beginning and end of the block. Then count the total
419 number of insns and see if it is small enough to convert. */
420 then_start = first_active_insn (then_bb);
421 then_end = last_active_insn (then_bb, TRUE);
422 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
423 max = MAX_CONDITIONAL_EXECUTE;
424
425 if (else_bb)
426 {
427 max *= 2;
428 else_start = first_active_insn (else_bb);
429 else_end = last_active_insn (else_bb, TRUE);
430 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
431 }
432
433 if (n_insns > max)
434 return FALSE;
435
436 /* Map test_expr/test_jump into the appropriate MD tests to use on
437 the conditionally executed code. */
438
439 true_expr = test_expr;
440
441 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
442 if (false_code != UNKNOWN)
443 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
444 XEXP (true_expr, 0), XEXP (true_expr, 1));
445 else
446 false_expr = NULL_RTX;
447
448 #ifdef IFCVT_MODIFY_TESTS
449 /* If the machine description needs to modify the tests, such as setting a
450 conditional execution register from a comparison, it can do so here. */
451 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
452
453 /* See if the conversion failed. */
454 if (!true_expr || !false_expr)
455 goto fail;
456 #endif
457
458 true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
459 if (true_prob_val)
460 {
461 true_prob_val = XEXP (true_prob_val, 0);
462 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
463 }
464 else
465 false_prob_val = NULL_RTX;
466
467 /* If we have && or || tests, do them here. These tests are in the adjacent
468 blocks after the first block containing the test. */
469 if (ce_info->num_multiple_test_blocks > 0)
470 {
471 basic_block bb = test_bb;
472 basic_block last_test_bb = ce_info->last_test_bb;
473
474 if (! false_expr)
475 goto fail;
476
477 do
478 {
479 rtx start, end;
480 rtx t, f;
481
482 bb = block_fallthru (bb);
483 start = first_active_insn (bb);
484 end = last_active_insn (bb, TRUE);
485 if (start
486 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
487 false_prob_val, FALSE))
488 goto fail;
489
490 /* If the conditional jump is more than just a conditional jump, then
491 we can not do conditional execution conversion on this block. */
492 if (! onlyjump_p (BB_END (bb)))
493 goto fail;
494
495 /* Find the conditional jump and isolate the test. */
496 t = cond_exec_get_condition (BB_END (bb));
497 if (! t)
498 goto fail;
499
500 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
501 GET_MODE (t),
502 XEXP (t, 0),
503 XEXP (t, 1));
504
505 if (ce_info->and_and_p)
506 {
507 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
508 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
509 }
510 else
511 {
512 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
513 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
514 }
515
516 /* If the machine description needs to modify the tests, such as
517 setting a conditional execution register from a comparison, it can
518 do so here. */
519 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
520 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
521
522 /* See if the conversion failed. */
523 if (!t || !f)
524 goto fail;
525 #endif
526
527 true_expr = t;
528 false_expr = f;
529 }
530 while (bb != last_test_bb);
531 }
532
533 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
534 on then THEN block. */
535 then_mod_ok = (else_bb == NULL_BLOCK);
536
537 /* Go through the THEN and ELSE blocks converting the insns if possible
538 to conditional execution. */
539
540 if (then_end
541 && (! false_expr
542 || ! cond_exec_process_insns (ce_info, then_start, then_end,
543 false_expr, false_prob_val,
544 then_mod_ok)))
545 goto fail;
546
547 if (else_bb && else_end
548 && ! cond_exec_process_insns (ce_info, else_start, else_end,
549 true_expr, true_prob_val, TRUE))
550 goto fail;
551
552 /* If we cannot apply the changes, fail. Do not go through the normal fail
553 processing, since apply_change_group will call cancel_changes. */
554 if (! apply_change_group ())
555 {
556 #ifdef IFCVT_MODIFY_CANCEL
557 /* Cancel any machine dependent changes. */
558 IFCVT_MODIFY_CANCEL (ce_info);
559 #endif
560 return FALSE;
561 }
562
563 #ifdef IFCVT_MODIFY_FINAL
564 /* Do any machine dependent final modifications. */
565 IFCVT_MODIFY_FINAL (ce_info);
566 #endif
567
568 /* Conversion succeeded. */
569 if (dump_file)
570 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
571 n_insns, (n_insns == 1) ? " was" : "s were");
572
573 /* Merge the blocks! */
574 merge_if_block (ce_info);
575 cond_exec_changed_p = TRUE;
576 return TRUE;
577
578 fail:
579 #ifdef IFCVT_MODIFY_CANCEL
580 /* Cancel any machine dependent changes. */
581 IFCVT_MODIFY_CANCEL (ce_info);
582 #endif
583
584 cancel_changes (0);
585 return FALSE;
586 }
587 \f
588 /* Used by noce_process_if_block to communicate with its subroutines.
589
590 The subroutines know that A and B may be evaluated freely. They
591 know that X is a register. They should insert new instructions
592 before cond_earliest. */
593
594 struct noce_if_info
595 {
596 basic_block test_bb;
597 rtx insn_a, insn_b;
598 rtx x, a, b;
599 rtx jump, cond, cond_earliest;
600 };
601
602 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
603 static int noce_try_move (struct noce_if_info *);
604 static int noce_try_store_flag (struct noce_if_info *);
605 static int noce_try_addcc (struct noce_if_info *);
606 static int noce_try_store_flag_constants (struct noce_if_info *);
607 static int noce_try_store_flag_mask (struct noce_if_info *);
608 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
609 rtx, rtx, rtx);
610 static int noce_try_cmove (struct noce_if_info *);
611 static int noce_try_cmove_arith (struct noce_if_info *);
612 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
613 static int noce_try_minmax (struct noce_if_info *);
614 static int noce_try_abs (struct noce_if_info *);
615 static int noce_try_sign_mask (struct noce_if_info *);
616
617 /* Helper function for noce_try_store_flag*. */
618
619 static rtx
620 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
621 int normalize)
622 {
623 rtx cond = if_info->cond;
624 int cond_complex;
625 enum rtx_code code;
626
627 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
628 || ! general_operand (XEXP (cond, 1), VOIDmode));
629
630 /* If earliest == jump, or when the condition is complex, try to
631 build the store_flag insn directly. */
632
633 if (cond_complex)
634 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
635
636 if (reversep)
637 code = reversed_comparison_code (cond, if_info->jump);
638 else
639 code = GET_CODE (cond);
640
641 if ((if_info->cond_earliest == if_info->jump || cond_complex)
642 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
643 {
644 rtx tmp;
645
646 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
647 XEXP (cond, 1));
648 tmp = gen_rtx_SET (VOIDmode, x, tmp);
649
650 start_sequence ();
651 tmp = emit_insn (tmp);
652
653 if (recog_memoized (tmp) >= 0)
654 {
655 tmp = get_insns ();
656 end_sequence ();
657 emit_insn (tmp);
658
659 if_info->cond_earliest = if_info->jump;
660
661 return x;
662 }
663
664 end_sequence ();
665 }
666
667 /* Don't even try if the comparison operands or the mode of X are weird. */
668 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
669 return NULL_RTX;
670
671 return emit_store_flag (x, code, XEXP (cond, 0),
672 XEXP (cond, 1), VOIDmode,
673 (code == LTU || code == LEU
674 || code == GEU || code == GTU), normalize);
675 }
676
677 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
678 X is the destination/target and Y is the value to copy. */
679
680 static void
681 noce_emit_move_insn (rtx x, rtx y)
682 {
683 enum machine_mode outmode, inmode;
684 rtx outer, inner;
685 int bitpos;
686
687 if (GET_CODE (x) != STRICT_LOW_PART)
688 {
689 emit_move_insn (x, y);
690 return;
691 }
692
693 outer = XEXP (x, 0);
694 inner = XEXP (outer, 0);
695 outmode = GET_MODE (outer);
696 inmode = GET_MODE (inner);
697 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
698 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
699 GET_MODE_BITSIZE (inmode));
700 }
701
702 /* Unshare sequence SEQ produced by if conversion. We care to mark
703 all arguments that may be shared with outer instruction stream. */
704 static void
705 unshare_ifcvt_sequence (struct noce_if_info *if_info, rtx seq)
706 {
707 set_used_flags (if_info->x);
708 set_used_flags (if_info->cond);
709 unshare_all_rtl_in_chain (seq);
710 }
711
712 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
713 "if (a == b) x = a; else x = b" into "x = b". */
714
715 static int
716 noce_try_move (struct noce_if_info *if_info)
717 {
718 rtx cond = if_info->cond;
719 enum rtx_code code = GET_CODE (cond);
720 rtx y, seq;
721
722 if (code != NE && code != EQ)
723 return FALSE;
724
725 /* This optimization isn't valid if either A or B could be a NaN
726 or a signed zero. */
727 if (HONOR_NANS (GET_MODE (if_info->x))
728 || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
729 return FALSE;
730
731 /* Check whether the operands of the comparison are A and in
732 either order. */
733 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
734 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
735 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
736 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
737 {
738 y = (code == EQ) ? if_info->a : if_info->b;
739
740 /* Avoid generating the move if the source is the destination. */
741 if (! rtx_equal_p (if_info->x, y))
742 {
743 start_sequence ();
744 noce_emit_move_insn (if_info->x, y);
745 seq = get_insns ();
746 unshare_ifcvt_sequence (if_info, seq);
747 end_sequence ();
748 emit_insn_before_setloc (seq, if_info->jump,
749 INSN_LOCATOR (if_info->insn_a));
750 }
751 return TRUE;
752 }
753 return FALSE;
754 }
755
756 /* Convert "if (test) x = 1; else x = 0".
757
758 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
759 tried in noce_try_store_flag_constants after noce_try_cmove has had
760 a go at the conversion. */
761
762 static int
763 noce_try_store_flag (struct noce_if_info *if_info)
764 {
765 int reversep;
766 rtx target, seq;
767
768 if (GET_CODE (if_info->b) == CONST_INT
769 && INTVAL (if_info->b) == STORE_FLAG_VALUE
770 && if_info->a == const0_rtx)
771 reversep = 0;
772 else if (if_info->b == const0_rtx
773 && GET_CODE (if_info->a) == CONST_INT
774 && INTVAL (if_info->a) == STORE_FLAG_VALUE
775 && (reversed_comparison_code (if_info->cond, if_info->jump)
776 != UNKNOWN))
777 reversep = 1;
778 else
779 return FALSE;
780
781 start_sequence ();
782
783 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
784 if (target)
785 {
786 if (target != if_info->x)
787 noce_emit_move_insn (if_info->x, target);
788
789 seq = get_insns ();
790 unshare_ifcvt_sequence (if_info, seq);
791 end_sequence ();
792 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
793
794 return TRUE;
795 }
796 else
797 {
798 end_sequence ();
799 return FALSE;
800 }
801 }
802
803 /* Convert "if (test) x = a; else x = b", for A and B constant. */
804
805 static int
806 noce_try_store_flag_constants (struct noce_if_info *if_info)
807 {
808 rtx target, seq;
809 int reversep;
810 HOST_WIDE_INT itrue, ifalse, diff, tmp;
811 int normalize, can_reverse;
812 enum machine_mode mode;
813
814 if (! no_new_pseudos
815 && GET_CODE (if_info->a) == CONST_INT
816 && GET_CODE (if_info->b) == CONST_INT)
817 {
818 mode = GET_MODE (if_info->x);
819 ifalse = INTVAL (if_info->a);
820 itrue = INTVAL (if_info->b);
821
822 /* Make sure we can represent the difference between the two values. */
823 if ((itrue - ifalse > 0)
824 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
825 return FALSE;
826
827 diff = trunc_int_for_mode (itrue - ifalse, mode);
828
829 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
830 != UNKNOWN);
831
832 reversep = 0;
833 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
834 normalize = 0;
835 else if (ifalse == 0 && exact_log2 (itrue) >= 0
836 && (STORE_FLAG_VALUE == 1
837 || BRANCH_COST >= 2))
838 normalize = 1;
839 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
840 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
841 normalize = 1, reversep = 1;
842 else if (itrue == -1
843 && (STORE_FLAG_VALUE == -1
844 || BRANCH_COST >= 2))
845 normalize = -1;
846 else if (ifalse == -1 && can_reverse
847 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
848 normalize = -1, reversep = 1;
849 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
850 || BRANCH_COST >= 3)
851 normalize = -1;
852 else
853 return FALSE;
854
855 if (reversep)
856 {
857 tmp = itrue; itrue = ifalse; ifalse = tmp;
858 diff = trunc_int_for_mode (-diff, mode);
859 }
860
861 start_sequence ();
862 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
863 if (! target)
864 {
865 end_sequence ();
866 return FALSE;
867 }
868
869 /* if (test) x = 3; else x = 4;
870 => x = 3 + (test == 0); */
871 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
872 {
873 target = expand_simple_binop (mode,
874 (diff == STORE_FLAG_VALUE
875 ? PLUS : MINUS),
876 GEN_INT (ifalse), target, if_info->x, 0,
877 OPTAB_WIDEN);
878 }
879
880 /* if (test) x = 8; else x = 0;
881 => x = (test != 0) << 3; */
882 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
883 {
884 target = expand_simple_binop (mode, ASHIFT,
885 target, GEN_INT (tmp), if_info->x, 0,
886 OPTAB_WIDEN);
887 }
888
889 /* if (test) x = -1; else x = b;
890 => x = -(test != 0) | b; */
891 else if (itrue == -1)
892 {
893 target = expand_simple_binop (mode, IOR,
894 target, GEN_INT (ifalse), if_info->x, 0,
895 OPTAB_WIDEN);
896 }
897
898 /* if (test) x = a; else x = b;
899 => x = (-(test != 0) & (b - a)) + a; */
900 else
901 {
902 target = expand_simple_binop (mode, AND,
903 target, GEN_INT (diff), if_info->x, 0,
904 OPTAB_WIDEN);
905 if (target)
906 target = expand_simple_binop (mode, PLUS,
907 target, GEN_INT (ifalse),
908 if_info->x, 0, OPTAB_WIDEN);
909 }
910
911 if (! target)
912 {
913 end_sequence ();
914 return FALSE;
915 }
916
917 if (target != if_info->x)
918 noce_emit_move_insn (if_info->x, target);
919
920 seq = get_insns ();
921 unshare_ifcvt_sequence (if_info, seq);
922 end_sequence ();
923
924 if (seq_contains_jump (seq))
925 return FALSE;
926
927 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
928
929 return TRUE;
930 }
931
932 return FALSE;
933 }
934
935 /* Convert "if (test) foo++" into "foo += (test != 0)", and
936 similarly for "foo--". */
937
938 static int
939 noce_try_addcc (struct noce_if_info *if_info)
940 {
941 rtx target, seq;
942 int subtract, normalize;
943
944 if (! no_new_pseudos
945 && GET_CODE (if_info->a) == PLUS
946 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
947 && (reversed_comparison_code (if_info->cond, if_info->jump)
948 != UNKNOWN))
949 {
950 rtx cond = if_info->cond;
951 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
952
953 /* First try to use addcc pattern. */
954 if (general_operand (XEXP (cond, 0), VOIDmode)
955 && general_operand (XEXP (cond, 1), VOIDmode))
956 {
957 start_sequence ();
958 target = emit_conditional_add (if_info->x, code,
959 XEXP (cond, 0),
960 XEXP (cond, 1),
961 VOIDmode,
962 if_info->b,
963 XEXP (if_info->a, 1),
964 GET_MODE (if_info->x),
965 (code == LTU || code == GEU
966 || code == LEU || code == GTU));
967 if (target)
968 {
969 if (target != if_info->x)
970 noce_emit_move_insn (if_info->x, target);
971
972 seq = get_insns ();
973 unshare_ifcvt_sequence (if_info, seq);
974 end_sequence ();
975 emit_insn_before_setloc (seq, if_info->jump,
976 INSN_LOCATOR (if_info->insn_a));
977 return TRUE;
978 }
979 end_sequence ();
980 }
981
982 /* If that fails, construct conditional increment or decrement using
983 setcc. */
984 if (BRANCH_COST >= 2
985 && (XEXP (if_info->a, 1) == const1_rtx
986 || XEXP (if_info->a, 1) == constm1_rtx))
987 {
988 start_sequence ();
989 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
990 subtract = 0, normalize = 0;
991 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
992 subtract = 1, normalize = 0;
993 else
994 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
995
996
997 target = noce_emit_store_flag (if_info,
998 gen_reg_rtx (GET_MODE (if_info->x)),
999 1, normalize);
1000
1001 if (target)
1002 target = expand_simple_binop (GET_MODE (if_info->x),
1003 subtract ? MINUS : PLUS,
1004 if_info->b, target, if_info->x,
1005 0, OPTAB_WIDEN);
1006 if (target)
1007 {
1008 if (target != if_info->x)
1009 noce_emit_move_insn (if_info->x, target);
1010
1011 seq = get_insns ();
1012 unshare_ifcvt_sequence (if_info, seq);
1013 end_sequence ();
1014
1015 if (seq_contains_jump (seq))
1016 return FALSE;
1017
1018 emit_insn_before_setloc (seq, if_info->jump,
1019 INSN_LOCATOR (if_info->insn_a));
1020
1021 return TRUE;
1022 }
1023 end_sequence ();
1024 }
1025 }
1026
1027 return FALSE;
1028 }
1029
1030 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1031
1032 static int
1033 noce_try_store_flag_mask (struct noce_if_info *if_info)
1034 {
1035 rtx target, seq;
1036 int reversep;
1037
1038 reversep = 0;
1039 if (! no_new_pseudos
1040 && (BRANCH_COST >= 2
1041 || STORE_FLAG_VALUE == -1)
1042 && ((if_info->a == const0_rtx
1043 && rtx_equal_p (if_info->b, if_info->x))
1044 || ((reversep = (reversed_comparison_code (if_info->cond,
1045 if_info->jump)
1046 != UNKNOWN))
1047 && if_info->b == const0_rtx
1048 && rtx_equal_p (if_info->a, if_info->x))))
1049 {
1050 start_sequence ();
1051 target = noce_emit_store_flag (if_info,
1052 gen_reg_rtx (GET_MODE (if_info->x)),
1053 reversep, -1);
1054 if (target)
1055 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1056 if_info->x,
1057 target, if_info->x, 0,
1058 OPTAB_WIDEN);
1059
1060 if (target)
1061 {
1062 if (target != if_info->x)
1063 noce_emit_move_insn (if_info->x, target);
1064
1065 seq = get_insns ();
1066 unshare_ifcvt_sequence (if_info, seq);
1067 end_sequence ();
1068
1069 if (seq_contains_jump (seq))
1070 return FALSE;
1071
1072 emit_insn_before_setloc (seq, if_info->jump,
1073 INSN_LOCATOR (if_info->insn_a));
1074
1075 return TRUE;
1076 }
1077
1078 end_sequence ();
1079 }
1080
1081 return FALSE;
1082 }
1083
1084 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1085
1086 static rtx
1087 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1088 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1089 {
1090 /* If earliest == jump, try to build the cmove insn directly.
1091 This is helpful when combine has created some complex condition
1092 (like for alpha's cmovlbs) that we can't hope to regenerate
1093 through the normal interface. */
1094
1095 if (if_info->cond_earliest == if_info->jump)
1096 {
1097 rtx tmp;
1098
1099 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1100 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1101 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1102
1103 start_sequence ();
1104 tmp = emit_insn (tmp);
1105
1106 if (recog_memoized (tmp) >= 0)
1107 {
1108 tmp = get_insns ();
1109 end_sequence ();
1110 emit_insn (tmp);
1111
1112 return x;
1113 }
1114
1115 end_sequence ();
1116 }
1117
1118 /* Don't even try if the comparison operands are weird. */
1119 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1120 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1121 return NULL_RTX;
1122
1123 #if HAVE_conditional_move
1124 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1125 vtrue, vfalse, GET_MODE (x),
1126 (code == LTU || code == GEU
1127 || code == LEU || code == GTU));
1128 #else
1129 /* We'll never get here, as noce_process_if_block doesn't call the
1130 functions involved. Ifdef code, however, should be discouraged
1131 because it leads to typos in the code not selected. However,
1132 emit_conditional_move won't exist either. */
1133 return NULL_RTX;
1134 #endif
1135 }
1136
1137 /* Try only simple constants and registers here. More complex cases
1138 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1139 has had a go at it. */
1140
1141 static int
1142 noce_try_cmove (struct noce_if_info *if_info)
1143 {
1144 enum rtx_code code;
1145 rtx target, seq;
1146
1147 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1148 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1149 {
1150 start_sequence ();
1151
1152 code = GET_CODE (if_info->cond);
1153 target = noce_emit_cmove (if_info, if_info->x, code,
1154 XEXP (if_info->cond, 0),
1155 XEXP (if_info->cond, 1),
1156 if_info->a, if_info->b);
1157
1158 if (target)
1159 {
1160 if (target != if_info->x)
1161 noce_emit_move_insn (if_info->x, target);
1162
1163 seq = get_insns ();
1164 unshare_ifcvt_sequence (if_info, seq);
1165 end_sequence ();
1166 emit_insn_before_setloc (seq, if_info->jump,
1167 INSN_LOCATOR (if_info->insn_a));
1168 return TRUE;
1169 }
1170 else
1171 {
1172 end_sequence ();
1173 return FALSE;
1174 }
1175 }
1176
1177 return FALSE;
1178 }
1179
1180 /* Try more complex cases involving conditional_move. */
1181
1182 static int
1183 noce_try_cmove_arith (struct noce_if_info *if_info)
1184 {
1185 rtx a = if_info->a;
1186 rtx b = if_info->b;
1187 rtx x = if_info->x;
1188 rtx insn_a, insn_b;
1189 rtx tmp, target;
1190 int is_mem = 0;
1191 enum rtx_code code;
1192
1193 /* A conditional move from two memory sources is equivalent to a
1194 conditional on their addresses followed by a load. Don't do this
1195 early because it'll screw alias analysis. Note that we've
1196 already checked for no side effects. */
1197 if (! no_new_pseudos && cse_not_expected
1198 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1199 && BRANCH_COST >= 5)
1200 {
1201 a = XEXP (a, 0);
1202 b = XEXP (b, 0);
1203 x = gen_reg_rtx (Pmode);
1204 is_mem = 1;
1205 }
1206
1207 /* ??? We could handle this if we knew that a load from A or B could
1208 not fault. This is also true if we've already loaded
1209 from the address along the path from ENTRY. */
1210 else if (may_trap_p (a) || may_trap_p (b))
1211 return FALSE;
1212
1213 /* if (test) x = a + b; else x = c - d;
1214 => y = a + b;
1215 x = c - d;
1216 if (test)
1217 x = y;
1218 */
1219
1220 code = GET_CODE (if_info->cond);
1221 insn_a = if_info->insn_a;
1222 insn_b = if_info->insn_b;
1223
1224 /* Possibly rearrange operands to make things come out more natural. */
1225 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1226 {
1227 int reversep = 0;
1228 if (rtx_equal_p (b, x))
1229 reversep = 1;
1230 else if (general_operand (b, GET_MODE (b)))
1231 reversep = 1;
1232
1233 if (reversep)
1234 {
1235 code = reversed_comparison_code (if_info->cond, if_info->jump);
1236 tmp = a, a = b, b = tmp;
1237 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1238 }
1239 }
1240
1241 start_sequence ();
1242
1243 /* If either operand is complex, load it into a register first.
1244 The best way to do this is to copy the original insn. In this
1245 way we preserve any clobbers etc that the insn may have had.
1246 This is of course not possible in the IS_MEM case. */
1247 if (! general_operand (a, GET_MODE (a)))
1248 {
1249 rtx set;
1250
1251 if (no_new_pseudos)
1252 goto end_seq_and_fail;
1253
1254 if (is_mem)
1255 {
1256 tmp = gen_reg_rtx (GET_MODE (a));
1257 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1258 }
1259 else if (! insn_a)
1260 goto end_seq_and_fail;
1261 else
1262 {
1263 a = gen_reg_rtx (GET_MODE (a));
1264 tmp = copy_rtx (insn_a);
1265 set = single_set (tmp);
1266 SET_DEST (set) = a;
1267 tmp = emit_insn (PATTERN (tmp));
1268 }
1269 if (recog_memoized (tmp) < 0)
1270 goto end_seq_and_fail;
1271 }
1272 if (! general_operand (b, GET_MODE (b)))
1273 {
1274 rtx set;
1275
1276 if (no_new_pseudos)
1277 goto end_seq_and_fail;
1278
1279 if (is_mem)
1280 {
1281 tmp = gen_reg_rtx (GET_MODE (b));
1282 tmp = emit_insn (gen_rtx_SET (VOIDmode,
1283 tmp,
1284 b));
1285 }
1286 else if (! insn_b)
1287 goto end_seq_and_fail;
1288 else
1289 {
1290 b = gen_reg_rtx (GET_MODE (b));
1291 tmp = copy_rtx (insn_b);
1292 set = single_set (tmp);
1293 SET_DEST (set) = b;
1294 tmp = emit_insn (PATTERN (tmp));
1295 }
1296 if (recog_memoized (tmp) < 0)
1297 goto end_seq_and_fail;
1298 }
1299
1300 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1301 XEXP (if_info->cond, 1), a, b);
1302
1303 if (! target)
1304 goto end_seq_and_fail;
1305
1306 /* If we're handling a memory for above, emit the load now. */
1307 if (is_mem)
1308 {
1309 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1310
1311 /* Copy over flags as appropriate. */
1312 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1313 MEM_VOLATILE_P (tmp) = 1;
1314 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1315 MEM_IN_STRUCT_P (tmp) = 1;
1316 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1317 MEM_SCALAR_P (tmp) = 1;
1318 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1319 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1320 set_mem_align (tmp,
1321 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1322
1323 noce_emit_move_insn (if_info->x, tmp);
1324 }
1325 else if (target != x)
1326 noce_emit_move_insn (x, target);
1327
1328 tmp = get_insns ();
1329 unshare_ifcvt_sequence (if_info, tmp);
1330 end_sequence ();
1331 emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1332 return TRUE;
1333
1334 end_seq_and_fail:
1335 end_sequence ();
1336 return FALSE;
1337 }
1338
1339 /* For most cases, the simplified condition we found is the best
1340 choice, but this is not the case for the min/max/abs transforms.
1341 For these we wish to know that it is A or B in the condition. */
1342
1343 static rtx
1344 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1345 rtx *earliest)
1346 {
1347 rtx cond, set, insn;
1348 int reverse;
1349
1350 /* If target is already mentioned in the known condition, return it. */
1351 if (reg_mentioned_p (target, if_info->cond))
1352 {
1353 *earliest = if_info->cond_earliest;
1354 return if_info->cond;
1355 }
1356
1357 set = pc_set (if_info->jump);
1358 cond = XEXP (SET_SRC (set), 0);
1359 reverse
1360 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1361 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1362
1363 /* If we're looking for a constant, try to make the conditional
1364 have that constant in it. There are two reasons why it may
1365 not have the constant we want:
1366
1367 1. GCC may have needed to put the constant in a register, because
1368 the target can't compare directly against that constant. For
1369 this case, we look for a SET immediately before the comparison
1370 that puts a constant in that register.
1371
1372 2. GCC may have canonicalized the conditional, for example
1373 replacing "if x < 4" with "if x <= 3". We can undo that (or
1374 make equivalent types of changes) to get the constants we need
1375 if they're off by one in the right direction. */
1376
1377 if (GET_CODE (target) == CONST_INT)
1378 {
1379 enum rtx_code code = GET_CODE (if_info->cond);
1380 rtx op_a = XEXP (if_info->cond, 0);
1381 rtx op_b = XEXP (if_info->cond, 1);
1382 rtx prev_insn;
1383
1384 /* First, look to see if we put a constant in a register. */
1385 prev_insn = PREV_INSN (if_info->cond_earliest);
1386 if (prev_insn
1387 && INSN_P (prev_insn)
1388 && GET_CODE (PATTERN (prev_insn)) == SET)
1389 {
1390 rtx src = find_reg_equal_equiv_note (prev_insn);
1391 if (!src)
1392 src = SET_SRC (PATTERN (prev_insn));
1393 if (GET_CODE (src) == CONST_INT)
1394 {
1395 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1396 op_a = src;
1397 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1398 op_b = src;
1399
1400 if (GET_CODE (op_a) == CONST_INT)
1401 {
1402 rtx tmp = op_a;
1403 op_a = op_b;
1404 op_b = tmp;
1405 code = swap_condition (code);
1406 }
1407 }
1408 }
1409
1410 /* Now, look to see if we can get the right constant by
1411 adjusting the conditional. */
1412 if (GET_CODE (op_b) == CONST_INT)
1413 {
1414 HOST_WIDE_INT desired_val = INTVAL (target);
1415 HOST_WIDE_INT actual_val = INTVAL (op_b);
1416
1417 switch (code)
1418 {
1419 case LT:
1420 if (actual_val == desired_val + 1)
1421 {
1422 code = LE;
1423 op_b = GEN_INT (desired_val);
1424 }
1425 break;
1426 case LE:
1427 if (actual_val == desired_val - 1)
1428 {
1429 code = LT;
1430 op_b = GEN_INT (desired_val);
1431 }
1432 break;
1433 case GT:
1434 if (actual_val == desired_val - 1)
1435 {
1436 code = GE;
1437 op_b = GEN_INT (desired_val);
1438 }
1439 break;
1440 case GE:
1441 if (actual_val == desired_val + 1)
1442 {
1443 code = GT;
1444 op_b = GEN_INT (desired_val);
1445 }
1446 break;
1447 default:
1448 break;
1449 }
1450 }
1451
1452 /* If we made any changes, generate a new conditional that is
1453 equivalent to what we started with, but has the right
1454 constants in it. */
1455 if (code != GET_CODE (if_info->cond)
1456 || op_a != XEXP (if_info->cond, 0)
1457 || op_b != XEXP (if_info->cond, 1))
1458 {
1459 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1460 *earliest = if_info->cond_earliest;
1461 return cond;
1462 }
1463 }
1464
1465 cond = canonicalize_condition (if_info->jump, cond, reverse,
1466 earliest, target, false);
1467 if (! cond || ! reg_mentioned_p (target, cond))
1468 return NULL;
1469
1470 /* We almost certainly searched back to a different place.
1471 Need to re-verify correct lifetimes. */
1472
1473 /* X may not be mentioned in the range (cond_earliest, jump]. */
1474 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1475 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1476 return NULL;
1477
1478 /* A and B may not be modified in the range [cond_earliest, jump). */
1479 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1480 if (INSN_P (insn)
1481 && (modified_in_p (if_info->a, insn)
1482 || modified_in_p (if_info->b, insn)))
1483 return NULL;
1484
1485 return cond;
1486 }
1487
1488 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1489
1490 static int
1491 noce_try_minmax (struct noce_if_info *if_info)
1492 {
1493 rtx cond, earliest, target, seq;
1494 enum rtx_code code, op;
1495 int unsignedp;
1496
1497 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1498 if (no_new_pseudos)
1499 return FALSE;
1500
1501 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1502 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1503 to get the target to tell us... */
1504 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1505 || HONOR_NANS (GET_MODE (if_info->x)))
1506 return FALSE;
1507
1508 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1509 if (!cond)
1510 return FALSE;
1511
1512 /* Verify the condition is of the form we expect, and canonicalize
1513 the comparison code. */
1514 code = GET_CODE (cond);
1515 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1516 {
1517 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1518 return FALSE;
1519 }
1520 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1521 {
1522 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1523 return FALSE;
1524 code = swap_condition (code);
1525 }
1526 else
1527 return FALSE;
1528
1529 /* Determine what sort of operation this is. Note that the code is for
1530 a taken branch, so the code->operation mapping appears backwards. */
1531 switch (code)
1532 {
1533 case LT:
1534 case LE:
1535 case UNLT:
1536 case UNLE:
1537 op = SMAX;
1538 unsignedp = 0;
1539 break;
1540 case GT:
1541 case GE:
1542 case UNGT:
1543 case UNGE:
1544 op = SMIN;
1545 unsignedp = 0;
1546 break;
1547 case LTU:
1548 case LEU:
1549 op = UMAX;
1550 unsignedp = 1;
1551 break;
1552 case GTU:
1553 case GEU:
1554 op = UMIN;
1555 unsignedp = 1;
1556 break;
1557 default:
1558 return FALSE;
1559 }
1560
1561 start_sequence ();
1562
1563 target = expand_simple_binop (GET_MODE (if_info->x), op,
1564 if_info->a, if_info->b,
1565 if_info->x, unsignedp, OPTAB_WIDEN);
1566 if (! target)
1567 {
1568 end_sequence ();
1569 return FALSE;
1570 }
1571 if (target != if_info->x)
1572 noce_emit_move_insn (if_info->x, target);
1573
1574 seq = get_insns ();
1575 unshare_ifcvt_sequence (if_info, seq);
1576 end_sequence ();
1577
1578 if (seq_contains_jump (seq))
1579 return FALSE;
1580
1581 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1582 if_info->cond = cond;
1583 if_info->cond_earliest = earliest;
1584
1585 return TRUE;
1586 }
1587
1588 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1589
1590 static int
1591 noce_try_abs (struct noce_if_info *if_info)
1592 {
1593 rtx cond, earliest, target, seq, a, b, c;
1594 int negate;
1595
1596 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1597 if (no_new_pseudos)
1598 return FALSE;
1599
1600 /* Recognize A and B as constituting an ABS or NABS. */
1601 a = if_info->a;
1602 b = if_info->b;
1603 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1604 negate = 0;
1605 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1606 {
1607 c = a; a = b; b = c;
1608 negate = 1;
1609 }
1610 else
1611 return FALSE;
1612
1613 cond = noce_get_alt_condition (if_info, b, &earliest);
1614 if (!cond)
1615 return FALSE;
1616
1617 /* Verify the condition is of the form we expect. */
1618 if (rtx_equal_p (XEXP (cond, 0), b))
1619 c = XEXP (cond, 1);
1620 else if (rtx_equal_p (XEXP (cond, 1), b))
1621 c = XEXP (cond, 0);
1622 else
1623 return FALSE;
1624
1625 /* Verify that C is zero. Search backward through the block for
1626 a REG_EQUAL note if necessary. */
1627 if (REG_P (c))
1628 {
1629 rtx insn, note = NULL;
1630 for (insn = earliest;
1631 insn != BB_HEAD (if_info->test_bb);
1632 insn = PREV_INSN (insn))
1633 if (INSN_P (insn)
1634 && ((note = find_reg_note (insn, REG_EQUAL, c))
1635 || (note = find_reg_note (insn, REG_EQUIV, c))))
1636 break;
1637 if (! note)
1638 return FALSE;
1639 c = XEXP (note, 0);
1640 }
1641 if (GET_CODE (c) == MEM
1642 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1643 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1644 c = get_pool_constant (XEXP (c, 0));
1645
1646 /* Work around funny ideas get_condition has wrt canonicalization.
1647 Note that these rtx constants are known to be CONST_INT, and
1648 therefore imply integer comparisons. */
1649 if (c == constm1_rtx && GET_CODE (cond) == GT)
1650 ;
1651 else if (c == const1_rtx && GET_CODE (cond) == LT)
1652 ;
1653 else if (c != CONST0_RTX (GET_MODE (b)))
1654 return FALSE;
1655
1656 /* Determine what sort of operation this is. */
1657 switch (GET_CODE (cond))
1658 {
1659 case LT:
1660 case LE:
1661 case UNLT:
1662 case UNLE:
1663 negate = !negate;
1664 break;
1665 case GT:
1666 case GE:
1667 case UNGT:
1668 case UNGE:
1669 break;
1670 default:
1671 return FALSE;
1672 }
1673
1674 start_sequence ();
1675
1676 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1677
1678 /* ??? It's a quandary whether cmove would be better here, especially
1679 for integers. Perhaps combine will clean things up. */
1680 if (target && negate)
1681 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1682
1683 if (! target)
1684 {
1685 end_sequence ();
1686 return FALSE;
1687 }
1688
1689 if (target != if_info->x)
1690 noce_emit_move_insn (if_info->x, target);
1691
1692 seq = get_insns ();
1693 unshare_ifcvt_sequence (if_info, seq);
1694 end_sequence ();
1695
1696 if (seq_contains_jump (seq))
1697 return FALSE;
1698
1699 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1700 if_info->cond = cond;
1701 if_info->cond_earliest = earliest;
1702
1703 return TRUE;
1704 }
1705
1706 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
1707
1708 static int
1709 noce_try_sign_mask (struct noce_if_info *if_info)
1710 {
1711 rtx cond, t, m, c, seq;
1712 enum machine_mode mode;
1713 enum rtx_code code;
1714
1715 if (no_new_pseudos)
1716 return FALSE;
1717
1718 cond = if_info->cond;
1719 code = GET_CODE (cond);
1720 m = XEXP (cond, 0);
1721 c = XEXP (cond, 1);
1722
1723 t = NULL_RTX;
1724 if (if_info->a == const0_rtx)
1725 {
1726 if ((code == LT && c == const0_rtx)
1727 || (code == LE && c == constm1_rtx))
1728 t = if_info->b;
1729 }
1730 else if (if_info->b == const0_rtx)
1731 {
1732 if ((code == GE && c == const0_rtx)
1733 || (code == GT && c == constm1_rtx))
1734 t = if_info->a;
1735 }
1736
1737 if (! t || side_effects_p (t))
1738 return FALSE;
1739
1740 /* We currently don't handle different modes. */
1741 mode = GET_MODE (t);
1742 if (GET_MODE (m) != mode)
1743 return FALSE;
1744
1745 /* This is only profitable if T is cheap. */
1746 if (rtx_cost (t, SET) >= COSTS_N_INSNS (2))
1747 return FALSE;
1748
1749 start_sequence ();
1750 c = gen_int_mode (GET_MODE_BITSIZE (mode) - 1, mode);
1751 m = expand_binop (mode, ashr_optab, m, c, NULL_RTX, 0, OPTAB_DIRECT);
1752 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
1753 : NULL_RTX;
1754
1755 if (!t)
1756 {
1757 end_sequence ();
1758 return FALSE;
1759 }
1760
1761 noce_emit_move_insn (if_info->x, t);
1762 seq = get_insns ();
1763 unshare_ifcvt_sequence (if_info, seq);
1764 end_sequence ();
1765 emit_insn_before_setloc (seq, if_info->jump,
1766 INSN_LOCATOR (if_info->insn_a));
1767 return TRUE;
1768 }
1769
1770
1771 /* Similar to get_condition, only the resulting condition must be
1772 valid at JUMP, instead of at EARLIEST. */
1773
1774 static rtx
1775 noce_get_condition (rtx jump, rtx *earliest)
1776 {
1777 rtx cond, set, tmp, insn;
1778 bool reverse;
1779
1780 if (! any_condjump_p (jump))
1781 return NULL_RTX;
1782
1783 set = pc_set (jump);
1784
1785 /* If this branches to JUMP_LABEL when the condition is false,
1786 reverse the condition. */
1787 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1788 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1789
1790 /* If the condition variable is a register and is MODE_INT, accept it. */
1791
1792 cond = XEXP (SET_SRC (set), 0);
1793 tmp = XEXP (cond, 0);
1794 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1795 {
1796 *earliest = jump;
1797
1798 if (reverse)
1799 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1800 GET_MODE (cond), tmp, XEXP (cond, 1));
1801 return cond;
1802 }
1803
1804 /* Otherwise, fall back on canonicalize_condition to do the dirty
1805 work of manipulating MODE_CC values and COMPARE rtx codes. */
1806
1807 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
1808 false);
1809 if (!tmp)
1810 return NULL_RTX;
1811
1812 /* We are going to insert code before JUMP, not before EARLIEST.
1813 We must therefore be certain that the given condition is valid
1814 at JUMP by virtue of not having been modified since. */
1815 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1816 if (INSN_P (insn) && modified_in_p (tmp, insn))
1817 break;
1818 if (insn == jump)
1819 return tmp;
1820
1821 /* The condition was modified. See if we can get a partial result
1822 that doesn't follow all the reversals. Perhaps combine can fold
1823 them together later. */
1824 tmp = XEXP (tmp, 0);
1825 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1826 return NULL_RTX;
1827 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp,
1828 false);
1829 if (!tmp)
1830 return NULL_RTX;
1831
1832 /* For sanity's sake, re-validate the new result. */
1833 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1834 if (INSN_P (insn) && modified_in_p (tmp, insn))
1835 return NULL_RTX;
1836
1837 return tmp;
1838 }
1839
1840 /* Return true if OP is ok for if-then-else processing. */
1841
1842 static int
1843 noce_operand_ok (rtx op)
1844 {
1845 /* We special-case memories, so handle any of them with
1846 no address side effects. */
1847 if (GET_CODE (op) == MEM)
1848 return ! side_effects_p (XEXP (op, 0));
1849
1850 if (side_effects_p (op))
1851 return FALSE;
1852
1853 return ! may_trap_p (op);
1854 }
1855
1856 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1857 without using conditional execution. Return TRUE if we were
1858 successful at converting the block. */
1859
1860 static int
1861 noce_process_if_block (struct ce_if_block * ce_info)
1862 {
1863 basic_block test_bb = ce_info->test_bb; /* test block */
1864 basic_block then_bb = ce_info->then_bb; /* THEN */
1865 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1866 struct noce_if_info if_info;
1867 rtx insn_a, insn_b;
1868 rtx set_a, set_b;
1869 rtx orig_x, x, a, b;
1870 rtx jump, cond;
1871
1872 /* We're looking for patterns of the form
1873
1874 (1) if (...) x = a; else x = b;
1875 (2) x = b; if (...) x = a;
1876 (3) if (...) x = a; // as if with an initial x = x.
1877
1878 The later patterns require jumps to be more expensive.
1879
1880 ??? For future expansion, look for multiple X in such patterns. */
1881
1882 /* If test is comprised of && or || elements, don't handle it unless it is
1883 the special case of && elements without an ELSE block. */
1884 if (ce_info->num_multiple_test_blocks)
1885 {
1886 if (else_bb || ! ce_info->and_and_p)
1887 return FALSE;
1888
1889 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1890 ce_info->num_multiple_test_blocks = 0;
1891 ce_info->num_and_and_blocks = 0;
1892 ce_info->num_or_or_blocks = 0;
1893 }
1894
1895 /* If this is not a standard conditional jump, we can't parse it. */
1896 jump = BB_END (test_bb);
1897 cond = noce_get_condition (jump, &if_info.cond_earliest);
1898 if (! cond)
1899 return FALSE;
1900
1901 /* If the conditional jump is more than just a conditional
1902 jump, then we can not do if-conversion on this block. */
1903 if (! onlyjump_p (jump))
1904 return FALSE;
1905
1906 /* We must be comparing objects whose modes imply the size. */
1907 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1908 return FALSE;
1909
1910 /* Look for one of the potential sets. */
1911 insn_a = first_active_insn (then_bb);
1912 if (! insn_a
1913 || insn_a != last_active_insn (then_bb, FALSE)
1914 || (set_a = single_set (insn_a)) == NULL_RTX)
1915 return FALSE;
1916
1917 x = SET_DEST (set_a);
1918 a = SET_SRC (set_a);
1919
1920 /* Look for the other potential set. Make sure we've got equivalent
1921 destinations. */
1922 /* ??? This is overconservative. Storing to two different mems is
1923 as easy as conditionally computing the address. Storing to a
1924 single mem merely requires a scratch memory to use as one of the
1925 destination addresses; often the memory immediately below the
1926 stack pointer is available for this. */
1927 set_b = NULL_RTX;
1928 if (else_bb)
1929 {
1930 insn_b = first_active_insn (else_bb);
1931 if (! insn_b
1932 || insn_b != last_active_insn (else_bb, FALSE)
1933 || (set_b = single_set (insn_b)) == NULL_RTX
1934 || ! rtx_equal_p (x, SET_DEST (set_b)))
1935 return FALSE;
1936 }
1937 else
1938 {
1939 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1940 /* We're going to be moving the evaluation of B down from above
1941 COND_EARLIEST to JUMP. Make sure the relevant data is still
1942 intact. */
1943 if (! insn_b
1944 || GET_CODE (insn_b) != INSN
1945 || (set_b = single_set (insn_b)) == NULL_RTX
1946 || ! rtx_equal_p (x, SET_DEST (set_b))
1947 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1948 || modified_between_p (SET_SRC (set_b),
1949 PREV_INSN (if_info.cond_earliest), jump)
1950 /* Likewise with X. In particular this can happen when
1951 noce_get_condition looks farther back in the instruction
1952 stream than one might expect. */
1953 || reg_overlap_mentioned_p (x, cond)
1954 || reg_overlap_mentioned_p (x, a)
1955 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1956 insn_b = set_b = NULL_RTX;
1957 }
1958
1959 /* If x has side effects then only the if-then-else form is safe to
1960 convert. But even in that case we would need to restore any notes
1961 (such as REG_INC) at then end. That can be tricky if
1962 noce_emit_move_insn expands to more than one insn, so disable the
1963 optimization entirely for now if there are side effects. */
1964 if (side_effects_p (x))
1965 return FALSE;
1966
1967 b = (set_b ? SET_SRC (set_b) : x);
1968
1969 /* Only operate on register destinations, and even then avoid extending
1970 the lifetime of hard registers on small register class machines. */
1971 orig_x = x;
1972 if (!REG_P (x)
1973 || (SMALL_REGISTER_CLASSES
1974 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1975 {
1976 if (no_new_pseudos || GET_MODE (x) == BLKmode)
1977 return FALSE;
1978 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1979 ? XEXP (x, 0) : x));
1980 }
1981
1982 /* Don't operate on sources that may trap or are volatile. */
1983 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1984 return FALSE;
1985
1986 /* Set up the info block for our subroutines. */
1987 if_info.test_bb = test_bb;
1988 if_info.cond = cond;
1989 if_info.jump = jump;
1990 if_info.insn_a = insn_a;
1991 if_info.insn_b = insn_b;
1992 if_info.x = x;
1993 if_info.a = a;
1994 if_info.b = b;
1995
1996 /* Try optimizations in some approximation of a useful order. */
1997 /* ??? Should first look to see if X is live incoming at all. If it
1998 isn't, we don't need anything but an unconditional set. */
1999
2000 /* Look and see if A and B are really the same. Avoid creating silly
2001 cmove constructs that no one will fix up later. */
2002 if (rtx_equal_p (a, b))
2003 {
2004 /* If we have an INSN_B, we don't have to create any new rtl. Just
2005 move the instruction that we already have. If we don't have an
2006 INSN_B, that means that A == X, and we've got a noop move. In
2007 that case don't do anything and let the code below delete INSN_A. */
2008 if (insn_b && else_bb)
2009 {
2010 rtx note;
2011
2012 if (else_bb && insn_b == BB_END (else_bb))
2013 BB_END (else_bb) = PREV_INSN (insn_b);
2014 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2015
2016 /* If there was a REG_EQUAL note, delete it since it may have been
2017 true due to this insn being after a jump. */
2018 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2019 remove_note (insn_b, note);
2020
2021 insn_b = NULL_RTX;
2022 }
2023 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2024 x must be executed twice. */
2025 else if (insn_b && side_effects_p (orig_x))
2026 return FALSE;
2027
2028 x = orig_x;
2029 goto success;
2030 }
2031
2032 /* Disallow the "if (...) x = a;" form (with an implicit "else x = x;")
2033 for most optimizations if writing to x may trap, i.e. it's a memory
2034 other than a static var or a stack slot. */
2035 if (! set_b
2036 && GET_CODE (orig_x) == MEM
2037 && ! MEM_NOTRAP_P (orig_x)
2038 && rtx_addr_can_trap_p (XEXP (orig_x, 0)))
2039 {
2040 if (HAVE_conditional_move)
2041 {
2042 if (noce_try_cmove (&if_info))
2043 goto success;
2044 if (! HAVE_conditional_execution
2045 && noce_try_cmove_arith (&if_info))
2046 goto success;
2047 }
2048 return FALSE;
2049 }
2050
2051 if (noce_try_move (&if_info))
2052 goto success;
2053 if (noce_try_store_flag (&if_info))
2054 goto success;
2055 if (noce_try_minmax (&if_info))
2056 goto success;
2057 if (noce_try_abs (&if_info))
2058 goto success;
2059 if (HAVE_conditional_move
2060 && noce_try_cmove (&if_info))
2061 goto success;
2062 if (! HAVE_conditional_execution)
2063 {
2064 if (noce_try_store_flag_constants (&if_info))
2065 goto success;
2066 if (noce_try_addcc (&if_info))
2067 goto success;
2068 if (noce_try_store_flag_mask (&if_info))
2069 goto success;
2070 if (HAVE_conditional_move
2071 && noce_try_cmove_arith (&if_info))
2072 goto success;
2073 if (noce_try_sign_mask (&if_info))
2074 goto success;
2075 }
2076
2077 return FALSE;
2078
2079 success:
2080 /* The original sets may now be killed. */
2081 delete_insn (insn_a);
2082
2083 /* Several special cases here: First, we may have reused insn_b above,
2084 in which case insn_b is now NULL. Second, we want to delete insn_b
2085 if it came from the ELSE block, because follows the now correct
2086 write that appears in the TEST block. However, if we got insn_b from
2087 the TEST block, it may in fact be loading data needed for the comparison.
2088 We'll let life_analysis remove the insn if it's really dead. */
2089 if (insn_b && else_bb)
2090 delete_insn (insn_b);
2091
2092 /* The new insns will have been inserted immediately before the jump. We
2093 should be able to remove the jump with impunity, but the condition itself
2094 may have been modified by gcse to be shared across basic blocks. */
2095 delete_insn (jump);
2096
2097 /* If we used a temporary, fix it up now. */
2098 if (orig_x != x)
2099 {
2100 start_sequence ();
2101 noce_emit_move_insn (orig_x, x);
2102 insn_b = get_insns ();
2103 set_used_flags (orig_x);
2104 unshare_all_rtl_in_chain (insn_b);
2105 end_sequence ();
2106
2107 emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
2108 }
2109
2110 /* Merge the blocks! */
2111 merge_if_block (ce_info);
2112
2113 return TRUE;
2114 }
2115 \f
2116 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2117 straight line code. Return true if successful. */
2118
2119 static int
2120 process_if_block (struct ce_if_block * ce_info)
2121 {
2122 if (! reload_completed
2123 && noce_process_if_block (ce_info))
2124 return TRUE;
2125
2126 if (HAVE_conditional_execution && reload_completed)
2127 {
2128 /* If we have && and || tests, try to first handle combining the && and
2129 || tests into the conditional code, and if that fails, go back and
2130 handle it without the && and ||, which at present handles the && case
2131 if there was no ELSE block. */
2132 if (cond_exec_process_if_block (ce_info, TRUE))
2133 return TRUE;
2134
2135 if (ce_info->num_multiple_test_blocks)
2136 {
2137 cancel_changes (0);
2138
2139 if (cond_exec_process_if_block (ce_info, FALSE))
2140 return TRUE;
2141 }
2142 }
2143
2144 return FALSE;
2145 }
2146
2147 /* Merge the blocks and mark for local life update. */
2148
2149 static void
2150 merge_if_block (struct ce_if_block * ce_info)
2151 {
2152 basic_block test_bb = ce_info->test_bb; /* last test block */
2153 basic_block then_bb = ce_info->then_bb; /* THEN */
2154 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
2155 basic_block join_bb = ce_info->join_bb; /* join block */
2156 basic_block combo_bb;
2157
2158 /* All block merging is done into the lower block numbers. */
2159
2160 combo_bb = test_bb;
2161
2162 /* Merge any basic blocks to handle && and || subtests. Each of
2163 the blocks are on the fallthru path from the predecessor block. */
2164 if (ce_info->num_multiple_test_blocks > 0)
2165 {
2166 basic_block bb = test_bb;
2167 basic_block last_test_bb = ce_info->last_test_bb;
2168 basic_block fallthru = block_fallthru (bb);
2169
2170 do
2171 {
2172 bb = fallthru;
2173 fallthru = block_fallthru (bb);
2174 merge_blocks (combo_bb, bb);
2175 num_true_changes++;
2176 }
2177 while (bb != last_test_bb);
2178 }
2179
2180 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2181 label, but it might if there were || tests. That label's count should be
2182 zero, and it normally should be removed. */
2183
2184 if (then_bb)
2185 {
2186 if (combo_bb->global_live_at_end)
2187 COPY_REG_SET (combo_bb->global_live_at_end,
2188 then_bb->global_live_at_end);
2189 merge_blocks (combo_bb, then_bb);
2190 num_true_changes++;
2191 }
2192
2193 /* The ELSE block, if it existed, had a label. That label count
2194 will almost always be zero, but odd things can happen when labels
2195 get their addresses taken. */
2196 if (else_bb)
2197 {
2198 merge_blocks (combo_bb, else_bb);
2199 num_true_changes++;
2200 }
2201
2202 /* If there was no join block reported, that means it was not adjacent
2203 to the others, and so we cannot merge them. */
2204
2205 if (! join_bb)
2206 {
2207 rtx last = BB_END (combo_bb);
2208
2209 /* The outgoing edge for the current COMBO block should already
2210 be correct. Verify this. */
2211 if (combo_bb->succ == NULL_EDGE)
2212 {
2213 if (find_reg_note (last, REG_NORETURN, NULL))
2214 ;
2215 else if (GET_CODE (last) == INSN
2216 && GET_CODE (PATTERN (last)) == TRAP_IF
2217 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2218 ;
2219 else
2220 abort ();
2221 }
2222
2223 /* There should still be something at the end of the THEN or ELSE
2224 blocks taking us to our final destination. */
2225 else if (GET_CODE (last) == JUMP_INSN)
2226 ;
2227 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2228 && GET_CODE (last) == CALL_INSN
2229 && SIBLING_CALL_P (last))
2230 ;
2231 else if ((combo_bb->succ->flags & EDGE_EH)
2232 && can_throw_internal (last))
2233 ;
2234 else
2235 abort ();
2236 }
2237
2238 /* The JOIN block may have had quite a number of other predecessors too.
2239 Since we've already merged the TEST, THEN and ELSE blocks, we should
2240 have only one remaining edge from our if-then-else diamond. If there
2241 is more than one remaining edge, it must come from elsewhere. There
2242 may be zero incoming edges if the THEN block didn't actually join
2243 back up (as with a call to abort). */
2244 else if ((join_bb->pred == NULL
2245 || join_bb->pred->pred_next == NULL)
2246 && join_bb != EXIT_BLOCK_PTR)
2247 {
2248 /* We can merge the JOIN. */
2249 if (combo_bb->global_live_at_end)
2250 COPY_REG_SET (combo_bb->global_live_at_end,
2251 join_bb->global_live_at_end);
2252
2253 merge_blocks (combo_bb, join_bb);
2254 num_true_changes++;
2255 }
2256 else
2257 {
2258 /* We cannot merge the JOIN. */
2259
2260 /* The outgoing edge for the current COMBO block should already
2261 be correct. Verify this. */
2262 if (combo_bb->succ->succ_next != NULL_EDGE
2263 || combo_bb->succ->dest != join_bb)
2264 abort ();
2265
2266 /* Remove the jump and cruft from the end of the COMBO block. */
2267 if (join_bb != EXIT_BLOCK_PTR)
2268 tidy_fallthru_edge (combo_bb->succ);
2269 }
2270
2271 num_updated_if_blocks++;
2272 }
2273 \f
2274 /* Find a block ending in a simple IF condition and try to transform it
2275 in some way. When converting a multi-block condition, put the new code
2276 in the first such block and delete the rest. Return a pointer to this
2277 first block if some transformation was done. Return NULL otherwise. */
2278
2279 static basic_block
2280 find_if_header (basic_block test_bb, int pass)
2281 {
2282 ce_if_block_t ce_info;
2283 edge then_edge;
2284 edge else_edge;
2285
2286 /* The kind of block we're looking for has exactly two successors. */
2287 if ((then_edge = test_bb->succ) == NULL_EDGE
2288 || (else_edge = then_edge->succ_next) == NULL_EDGE
2289 || else_edge->succ_next != NULL_EDGE)
2290 return NULL;
2291
2292 /* Neither edge should be abnormal. */
2293 if ((then_edge->flags & EDGE_COMPLEX)
2294 || (else_edge->flags & EDGE_COMPLEX))
2295 return NULL;
2296
2297 /* Nor exit the loop. */
2298 if ((then_edge->flags & EDGE_LOOP_EXIT)
2299 || (else_edge->flags & EDGE_LOOP_EXIT))
2300 return NULL;
2301
2302 /* The THEN edge is canonically the one that falls through. */
2303 if (then_edge->flags & EDGE_FALLTHRU)
2304 ;
2305 else if (else_edge->flags & EDGE_FALLTHRU)
2306 {
2307 edge e = else_edge;
2308 else_edge = then_edge;
2309 then_edge = e;
2310 }
2311 else
2312 /* Otherwise this must be a multiway branch of some sort. */
2313 return NULL;
2314
2315 memset (&ce_info, '\0', sizeof (ce_info));
2316 ce_info.test_bb = test_bb;
2317 ce_info.then_bb = then_edge->dest;
2318 ce_info.else_bb = else_edge->dest;
2319 ce_info.pass = pass;
2320
2321 #ifdef IFCVT_INIT_EXTRA_FIELDS
2322 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2323 #endif
2324
2325 if (find_if_block (&ce_info))
2326 goto success;
2327
2328 if (HAVE_trap && HAVE_conditional_trap
2329 && find_cond_trap (test_bb, then_edge, else_edge))
2330 goto success;
2331
2332 if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY
2333 && (! HAVE_conditional_execution || reload_completed))
2334 {
2335 if (find_if_case_1 (test_bb, then_edge, else_edge))
2336 goto success;
2337 if (find_if_case_2 (test_bb, then_edge, else_edge))
2338 goto success;
2339 }
2340
2341 return NULL;
2342
2343 success:
2344 if (dump_file)
2345 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
2346 return ce_info.test_bb;
2347 }
2348
2349 /* Return true if a block has two edges, one of which falls through to the next
2350 block, and the other jumps to a specific block, so that we can tell if the
2351 block is part of an && test or an || test. Returns either -1 or the number
2352 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2353
2354 static int
2355 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2356 {
2357 edge cur_edge;
2358 int fallthru_p = FALSE;
2359 int jump_p = FALSE;
2360 rtx insn;
2361 rtx end;
2362 int n_insns = 0;
2363
2364 if (!cur_bb || !target_bb)
2365 return -1;
2366
2367 /* If no edges, obviously it doesn't jump or fallthru. */
2368 if (cur_bb->succ == NULL_EDGE)
2369 return FALSE;
2370
2371 for (cur_edge = cur_bb->succ;
2372 cur_edge != NULL_EDGE;
2373 cur_edge = cur_edge->succ_next)
2374 {
2375 if (cur_edge->flags & EDGE_COMPLEX)
2376 /* Anything complex isn't what we want. */
2377 return -1;
2378
2379 else if (cur_edge->flags & EDGE_FALLTHRU)
2380 fallthru_p = TRUE;
2381
2382 else if (cur_edge->dest == target_bb)
2383 jump_p = TRUE;
2384
2385 else
2386 return -1;
2387 }
2388
2389 if ((jump_p & fallthru_p) == 0)
2390 return -1;
2391
2392 /* Don't allow calls in the block, since this is used to group && and ||
2393 together for conditional execution support. ??? we should support
2394 conditional execution support across calls for IA-64 some day, but
2395 for now it makes the code simpler. */
2396 end = BB_END (cur_bb);
2397 insn = BB_HEAD (cur_bb);
2398
2399 while (insn != NULL_RTX)
2400 {
2401 if (GET_CODE (insn) == CALL_INSN)
2402 return -1;
2403
2404 if (INSN_P (insn)
2405 && GET_CODE (insn) != JUMP_INSN
2406 && GET_CODE (PATTERN (insn)) != USE
2407 && GET_CODE (PATTERN (insn)) != CLOBBER)
2408 n_insns++;
2409
2410 if (insn == end)
2411 break;
2412
2413 insn = NEXT_INSN (insn);
2414 }
2415
2416 return n_insns;
2417 }
2418
2419 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2420 block. If so, we'll try to convert the insns to not require the branch.
2421 Return TRUE if we were successful at converting the block. */
2422
2423 static int
2424 find_if_block (struct ce_if_block * ce_info)
2425 {
2426 basic_block test_bb = ce_info->test_bb;
2427 basic_block then_bb = ce_info->then_bb;
2428 basic_block else_bb = ce_info->else_bb;
2429 basic_block join_bb = NULL_BLOCK;
2430 edge then_succ = then_bb->succ;
2431 edge else_succ = else_bb->succ;
2432 int then_predecessors;
2433 int else_predecessors;
2434 edge cur_edge;
2435 basic_block next;
2436
2437 ce_info->last_test_bb = test_bb;
2438
2439 /* Discover if any fall through predecessors of the current test basic block
2440 were && tests (which jump to the else block) or || tests (which jump to
2441 the then block). */
2442 if (HAVE_conditional_execution && reload_completed
2443 && test_bb->pred != NULL_EDGE
2444 && test_bb->pred->pred_next == NULL_EDGE
2445 && test_bb->pred->flags == EDGE_FALLTHRU)
2446 {
2447 basic_block bb = test_bb->pred->src;
2448 basic_block target_bb;
2449 int max_insns = MAX_CONDITIONAL_EXECUTE;
2450 int n_insns;
2451
2452 /* Determine if the preceding block is an && or || block. */
2453 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2454 {
2455 ce_info->and_and_p = TRUE;
2456 target_bb = else_bb;
2457 }
2458 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2459 {
2460 ce_info->and_and_p = FALSE;
2461 target_bb = then_bb;
2462 }
2463 else
2464 target_bb = NULL_BLOCK;
2465
2466 if (target_bb && n_insns <= max_insns)
2467 {
2468 int total_insns = 0;
2469 int blocks = 0;
2470
2471 ce_info->last_test_bb = test_bb;
2472
2473 /* Found at least one && or || block, look for more. */
2474 do
2475 {
2476 ce_info->test_bb = test_bb = bb;
2477 total_insns += n_insns;
2478 blocks++;
2479
2480 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2481 break;
2482
2483 bb = bb->pred->src;
2484 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2485 }
2486 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2487
2488 ce_info->num_multiple_test_blocks = blocks;
2489 ce_info->num_multiple_test_insns = total_insns;
2490
2491 if (ce_info->and_and_p)
2492 ce_info->num_and_and_blocks = blocks;
2493 else
2494 ce_info->num_or_or_blocks = blocks;
2495 }
2496 }
2497
2498 /* Count the number of edges the THEN and ELSE blocks have. */
2499 then_predecessors = 0;
2500 for (cur_edge = then_bb->pred;
2501 cur_edge != NULL_EDGE;
2502 cur_edge = cur_edge->pred_next)
2503 {
2504 then_predecessors++;
2505 if (cur_edge->flags & EDGE_COMPLEX)
2506 return FALSE;
2507 }
2508
2509 else_predecessors = 0;
2510 for (cur_edge = else_bb->pred;
2511 cur_edge != NULL_EDGE;
2512 cur_edge = cur_edge->pred_next)
2513 {
2514 else_predecessors++;
2515 if (cur_edge->flags & EDGE_COMPLEX)
2516 return FALSE;
2517 }
2518
2519 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2520 other than any || blocks which jump to the THEN block. */
2521 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2522 return FALSE;
2523
2524 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2525 if (then_succ != NULL_EDGE
2526 && (then_succ->succ_next != NULL_EDGE
2527 || (then_succ->flags & EDGE_COMPLEX)
2528 || (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
2529 return FALSE;
2530
2531 /* If the THEN block has no successors, conditional execution can still
2532 make a conditional call. Don't do this unless the ELSE block has
2533 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2534 Check for the last insn of the THEN block being an indirect jump, which
2535 is listed as not having any successors, but confuses the rest of the CE
2536 code processing. ??? we should fix this in the future. */
2537 if (then_succ == NULL)
2538 {
2539 if (else_bb->pred->pred_next == NULL_EDGE)
2540 {
2541 rtx last_insn = BB_END (then_bb);
2542
2543 while (last_insn
2544 && GET_CODE (last_insn) == NOTE
2545 && last_insn != BB_HEAD (then_bb))
2546 last_insn = PREV_INSN (last_insn);
2547
2548 if (last_insn
2549 && GET_CODE (last_insn) == JUMP_INSN
2550 && ! simplejump_p (last_insn))
2551 return FALSE;
2552
2553 join_bb = else_bb;
2554 else_bb = NULL_BLOCK;
2555 }
2556 else
2557 return FALSE;
2558 }
2559
2560 /* If the THEN block's successor is the other edge out of the TEST block,
2561 then we have an IF-THEN combo without an ELSE. */
2562 else if (then_succ->dest == else_bb)
2563 {
2564 join_bb = else_bb;
2565 else_bb = NULL_BLOCK;
2566 }
2567
2568 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2569 has exactly one predecessor and one successor, and the outgoing edge
2570 is not complex, then we have an IF-THEN-ELSE combo. */
2571 else if (else_succ != NULL_EDGE
2572 && then_succ->dest == else_succ->dest
2573 && else_bb->pred->pred_next == NULL_EDGE
2574 && else_succ->succ_next == NULL_EDGE
2575 && ! (else_succ->flags & EDGE_COMPLEX)
2576 && ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
2577 join_bb = else_succ->dest;
2578
2579 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2580 else
2581 return FALSE;
2582
2583 num_possible_if_blocks++;
2584
2585 if (dump_file)
2586 {
2587 fprintf (dump_file,
2588 "\nIF-THEN%s block found, pass %d, start block %d "
2589 "[insn %d], then %d [%d]",
2590 (else_bb) ? "-ELSE" : "",
2591 ce_info->pass,
2592 test_bb->index,
2593 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
2594 then_bb->index,
2595 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
2596
2597 if (else_bb)
2598 fprintf (dump_file, ", else %d [%d]",
2599 else_bb->index,
2600 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
2601
2602 fprintf (dump_file, ", join %d [%d]",
2603 join_bb->index,
2604 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
2605
2606 if (ce_info->num_multiple_test_blocks > 0)
2607 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
2608 ce_info->num_multiple_test_blocks,
2609 (ce_info->and_and_p) ? "&&" : "||",
2610 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2611 ce_info->last_test_bb->index,
2612 ((BB_HEAD (ce_info->last_test_bb))
2613 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
2614 : -1));
2615
2616 fputc ('\n', dump_file);
2617 }
2618
2619 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2620 first condition for free, since we've already asserted that there's a
2621 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2622 we checked the FALLTHRU flag, those are already adjacent to the last IF
2623 block. */
2624 /* ??? As an enhancement, move the ELSE block. Have to deal with
2625 BLOCK notes, if by no other means than aborting the merge if they
2626 exist. Sticky enough I don't want to think about it now. */
2627 next = then_bb;
2628 if (else_bb && (next = next->next_bb) != else_bb)
2629 return FALSE;
2630 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2631 {
2632 if (else_bb)
2633 join_bb = NULL;
2634 else
2635 return FALSE;
2636 }
2637
2638 /* Do the real work. */
2639 ce_info->else_bb = else_bb;
2640 ce_info->join_bb = join_bb;
2641
2642 return process_if_block (ce_info);
2643 }
2644
2645 /* Convert a branch over a trap, or a branch
2646 to a trap, into a conditional trap. */
2647
2648 static int
2649 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2650 {
2651 basic_block then_bb = then_edge->dest;
2652 basic_block else_bb = else_edge->dest;
2653 basic_block other_bb, trap_bb;
2654 rtx trap, jump, cond, cond_earliest, seq;
2655 enum rtx_code code;
2656
2657 /* Locate the block with the trap instruction. */
2658 /* ??? While we look for no successors, we really ought to allow
2659 EH successors. Need to fix merge_if_block for that to work. */
2660 if ((trap = block_has_only_trap (then_bb)) != NULL)
2661 trap_bb = then_bb, other_bb = else_bb;
2662 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2663 trap_bb = else_bb, other_bb = then_bb;
2664 else
2665 return FALSE;
2666
2667 if (dump_file)
2668 {
2669 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2670 test_bb->index, trap_bb->index);
2671 }
2672
2673 /* If this is not a standard conditional jump, we can't parse it. */
2674 jump = BB_END (test_bb);
2675 cond = noce_get_condition (jump, &cond_earliest);
2676 if (! cond)
2677 return FALSE;
2678
2679 /* If the conditional jump is more than just a conditional jump, then
2680 we can not do if-conversion on this block. */
2681 if (! onlyjump_p (jump))
2682 return FALSE;
2683
2684 /* We must be comparing objects whose modes imply the size. */
2685 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2686 return FALSE;
2687
2688 /* Reverse the comparison code, if necessary. */
2689 code = GET_CODE (cond);
2690 if (then_bb == trap_bb)
2691 {
2692 code = reversed_comparison_code (cond, jump);
2693 if (code == UNKNOWN)
2694 return FALSE;
2695 }
2696
2697 /* Attempt to generate the conditional trap. */
2698 seq = gen_cond_trap (code, XEXP (cond, 0),
2699 XEXP (cond, 1),
2700 TRAP_CODE (PATTERN (trap)));
2701 if (seq == NULL)
2702 return FALSE;
2703
2704 num_true_changes++;
2705
2706 /* Emit the new insns before cond_earliest. */
2707 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2708
2709 /* Delete the trap block if possible. */
2710 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2711 if (trap_bb->pred == NULL)
2712 delete_basic_block (trap_bb);
2713
2714 /* If the non-trap block and the test are now adjacent, merge them.
2715 Otherwise we must insert a direct branch. */
2716 if (test_bb->next_bb == other_bb)
2717 {
2718 struct ce_if_block new_ce_info;
2719 delete_insn (jump);
2720 memset (&new_ce_info, '\0', sizeof (new_ce_info));
2721 new_ce_info.test_bb = test_bb;
2722 new_ce_info.then_bb = NULL;
2723 new_ce_info.else_bb = NULL;
2724 new_ce_info.join_bb = other_bb;
2725 merge_if_block (&new_ce_info);
2726 }
2727 else
2728 {
2729 rtx lab, newjump;
2730
2731 lab = JUMP_LABEL (jump);
2732 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2733 LABEL_NUSES (lab) += 1;
2734 JUMP_LABEL (newjump) = lab;
2735 emit_barrier_after (newjump);
2736
2737 delete_insn (jump);
2738 }
2739
2740 return TRUE;
2741 }
2742
2743 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2744 return it. */
2745
2746 static rtx
2747 block_has_only_trap (basic_block bb)
2748 {
2749 rtx trap;
2750
2751 /* We're not the exit block. */
2752 if (bb == EXIT_BLOCK_PTR)
2753 return NULL_RTX;
2754
2755 /* The block must have no successors. */
2756 if (bb->succ)
2757 return NULL_RTX;
2758
2759 /* The only instruction in the THEN block must be the trap. */
2760 trap = first_active_insn (bb);
2761 if (! (trap == BB_END (bb)
2762 && GET_CODE (PATTERN (trap)) == TRAP_IF
2763 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2764 return NULL_RTX;
2765
2766 return trap;
2767 }
2768
2769 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2770 transformable, but not necessarily the other. There need be no
2771 JOIN block.
2772
2773 Return TRUE if we were successful at converting the block.
2774
2775 Cases we'd like to look at:
2776
2777 (1)
2778 if (test) goto over; // x not live
2779 x = a;
2780 goto label;
2781 over:
2782
2783 becomes
2784
2785 x = a;
2786 if (! test) goto label;
2787
2788 (2)
2789 if (test) goto E; // x not live
2790 x = big();
2791 goto L;
2792 E:
2793 x = b;
2794 goto M;
2795
2796 becomes
2797
2798 x = b;
2799 if (test) goto M;
2800 x = big();
2801 goto L;
2802
2803 (3) // This one's really only interesting for targets that can do
2804 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2805 // it results in multiple branches on a cache line, which often
2806 // does not sit well with predictors.
2807
2808 if (test1) goto E; // predicted not taken
2809 x = a;
2810 if (test2) goto F;
2811 ...
2812 E:
2813 x = b;
2814 J:
2815
2816 becomes
2817
2818 x = a;
2819 if (test1) goto E;
2820 if (test2) goto F;
2821
2822 Notes:
2823
2824 (A) Don't do (2) if the branch is predicted against the block we're
2825 eliminating. Do it anyway if we can eliminate a branch; this requires
2826 that the sole successor of the eliminated block postdominate the other
2827 side of the if.
2828
2829 (B) With CE, on (3) we can steal from both sides of the if, creating
2830
2831 if (test1) x = a;
2832 if (!test1) x = b;
2833 if (test1) goto J;
2834 if (test2) goto F;
2835 ...
2836 J:
2837
2838 Again, this is most useful if J postdominates.
2839
2840 (C) CE substitutes for helpful life information.
2841
2842 (D) These heuristics need a lot of work. */
2843
2844 /* Tests for case 1 above. */
2845
2846 static int
2847 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
2848 {
2849 basic_block then_bb = then_edge->dest;
2850 basic_block else_bb = else_edge->dest, new_bb;
2851 edge then_succ = then_bb->succ;
2852 int then_bb_index;
2853
2854 /* If we are partitioning hot/cold basic blocks, we don't want to
2855 mess up unconditional or indirect jumps that cross between hot
2856 and cold sections. */
2857
2858 if (flag_reorder_blocks_and_partition
2859 && ((BB_END (then_bb)
2860 && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
2861 || (BB_END (else_bb)
2862 && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
2863 NULL_RTX))))
2864 return FALSE;
2865
2866 /* THEN has one successor. */
2867 if (!then_succ || then_succ->succ_next != NULL)
2868 return FALSE;
2869
2870 /* THEN does not fall through, but is not strange either. */
2871 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2872 return FALSE;
2873
2874 /* THEN has one predecessor. */
2875 if (then_bb->pred->pred_next != NULL)
2876 return FALSE;
2877
2878 /* THEN must do something. */
2879 if (forwarder_block_p (then_bb))
2880 return FALSE;
2881
2882 num_possible_if_blocks++;
2883 if (dump_file)
2884 fprintf (dump_file,
2885 "\nIF-CASE-1 found, start %d, then %d\n",
2886 test_bb->index, then_bb->index);
2887
2888 /* THEN is small. */
2889 if (count_bb_insns (then_bb) > BRANCH_COST)
2890 return FALSE;
2891
2892 /* Registers set are dead, or are predicable. */
2893 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2894 then_bb->succ->dest, 1))
2895 return FALSE;
2896
2897 /* Conversion went ok, including moving the insns and fixing up the
2898 jump. Adjust the CFG to match. */
2899
2900 bitmap_operation (test_bb->global_live_at_end,
2901 else_bb->global_live_at_start,
2902 then_bb->global_live_at_end, BITMAP_IOR);
2903
2904 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2905 then_bb_index = then_bb->index;
2906 delete_basic_block (then_bb);
2907
2908 /* Make rest of code believe that the newly created block is the THEN_BB
2909 block we removed. */
2910 if (new_bb)
2911 {
2912 new_bb->index = then_bb_index;
2913 BASIC_BLOCK (then_bb_index) = new_bb;
2914 }
2915 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2916 later. */
2917
2918 num_true_changes++;
2919 num_updated_if_blocks++;
2920
2921 return TRUE;
2922 }
2923
2924 /* Test for case 2 above. */
2925
2926 static int
2927 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
2928 {
2929 basic_block then_bb = then_edge->dest;
2930 basic_block else_bb = else_edge->dest;
2931 edge else_succ = else_bb->succ;
2932 rtx note;
2933
2934 /* If we are partitioning hot/cold basic blocks, we don't want to
2935 mess up unconditional or indirect jumps that cross between hot
2936 and cold sections. */
2937
2938 if (flag_reorder_blocks_and_partition
2939 && ((BB_END (then_bb)
2940 && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
2941 || (BB_END (else_bb)
2942 && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
2943 NULL_RTX))))
2944 return FALSE;
2945
2946 /* ELSE has one successor. */
2947 if (!else_succ || else_succ->succ_next != NULL)
2948 return FALSE;
2949
2950 /* ELSE outgoing edge is not complex. */
2951 if (else_succ->flags & EDGE_COMPLEX)
2952 return FALSE;
2953
2954 /* ELSE has one predecessor. */
2955 if (else_bb->pred->pred_next != NULL)
2956 return FALSE;
2957
2958 /* THEN is not EXIT. */
2959 if (then_bb->index < 0)
2960 return FALSE;
2961
2962 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2963 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
2964 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2965 ;
2966 else if (else_succ->dest->index < 0
2967 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
2968 else_succ->dest))
2969 ;
2970 else
2971 return FALSE;
2972
2973 num_possible_if_blocks++;
2974 if (dump_file)
2975 fprintf (dump_file,
2976 "\nIF-CASE-2 found, start %d, else %d\n",
2977 test_bb->index, else_bb->index);
2978
2979 /* ELSE is small. */
2980 if (count_bb_insns (else_bb) > BRANCH_COST)
2981 return FALSE;
2982
2983 /* Registers set are dead, or are predicable. */
2984 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2985 return FALSE;
2986
2987 /* Conversion went ok, including moving the insns and fixing up the
2988 jump. Adjust the CFG to match. */
2989
2990 bitmap_operation (test_bb->global_live_at_end,
2991 then_bb->global_live_at_start,
2992 else_bb->global_live_at_end, BITMAP_IOR);
2993
2994 delete_basic_block (else_bb);
2995
2996 num_true_changes++;
2997 num_updated_if_blocks++;
2998
2999 /* ??? We may now fallthru from one of THEN's successors into a join
3000 block. Rerun cleanup_cfg? Examine things manually? Wait? */
3001
3002 return TRUE;
3003 }
3004
3005 /* A subroutine of dead_or_predicable called through for_each_rtx.
3006 Return 1 if a memory is found. */
3007
3008 static int
3009 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3010 {
3011 return GET_CODE (*px) == MEM;
3012 }
3013
3014 /* Used by the code above to perform the actual rtl transformations.
3015 Return TRUE if successful.
3016
3017 TEST_BB is the block containing the conditional branch. MERGE_BB
3018 is the block containing the code to manipulate. NEW_DEST is the
3019 label TEST_BB should be branching to after the conversion.
3020 REVERSEP is true if the sense of the branch should be reversed. */
3021
3022 static int
3023 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
3024 basic_block other_bb, basic_block new_dest, int reversep)
3025 {
3026 rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3027
3028 jump = BB_END (test_bb);
3029
3030 /* Find the extent of the real code in the merge block. */
3031 head = BB_HEAD (merge_bb);
3032 end = BB_END (merge_bb);
3033
3034 if (GET_CODE (head) == CODE_LABEL)
3035 head = NEXT_INSN (head);
3036 if (GET_CODE (head) == NOTE)
3037 {
3038 if (head == end)
3039 {
3040 head = end = NULL_RTX;
3041 goto no_body;
3042 }
3043 head = NEXT_INSN (head);
3044 }
3045
3046 if (GET_CODE (end) == JUMP_INSN)
3047 {
3048 if (head == end)
3049 {
3050 head = end = NULL_RTX;
3051 goto no_body;
3052 }
3053 end = PREV_INSN (end);
3054 }
3055
3056 /* Disable handling dead code by conditional execution if the machine needs
3057 to do anything funny with the tests, etc. */
3058 #ifndef IFCVT_MODIFY_TESTS
3059 if (HAVE_conditional_execution)
3060 {
3061 /* In the conditional execution case, we have things easy. We know
3062 the condition is reversible. We don't have to check life info
3063 because we're going to conditionally execute the code anyway.
3064 All that's left is making sure the insns involved can actually
3065 be predicated. */
3066
3067 rtx cond, prob_val;
3068
3069 cond = cond_exec_get_condition (jump);
3070 if (! cond)
3071 return FALSE;
3072
3073 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3074 if (prob_val)
3075 prob_val = XEXP (prob_val, 0);
3076
3077 if (reversep)
3078 {
3079 enum rtx_code rev = reversed_comparison_code (cond, jump);
3080 if (rev == UNKNOWN)
3081 return FALSE;
3082 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3083 XEXP (cond, 1));
3084 if (prob_val)
3085 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3086 }
3087
3088 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
3089 prob_val, 0))
3090 goto cancel;
3091
3092 earliest = jump;
3093 }
3094 else
3095 #endif
3096 {
3097 /* In the non-conditional execution case, we have to verify that there
3098 are no trapping operations, no calls, no references to memory, and
3099 that any registers modified are dead at the branch site. */
3100
3101 rtx insn, cond, prev;
3102 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
3103 regset merge_set, tmp, test_live, test_set;
3104 struct propagate_block_info *pbi;
3105 int i, fail = 0;
3106
3107 /* Check for no calls or trapping operations. */
3108 for (insn = head; ; insn = NEXT_INSN (insn))
3109 {
3110 if (GET_CODE (insn) == CALL_INSN)
3111 return FALSE;
3112 if (INSN_P (insn))
3113 {
3114 if (may_trap_p (PATTERN (insn)))
3115 return FALSE;
3116
3117 /* ??? Even non-trapping memories such as stack frame
3118 references must be avoided. For stores, we collect
3119 no lifetime info; for reads, we'd have to assert
3120 true_dependence false against every store in the
3121 TEST range. */
3122 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3123 return FALSE;
3124 }
3125 if (insn == end)
3126 break;
3127 }
3128
3129 if (! any_condjump_p (jump))
3130 return FALSE;
3131
3132 /* Find the extent of the conditional. */
3133 cond = noce_get_condition (jump, &earliest);
3134 if (! cond)
3135 return FALSE;
3136
3137 /* Collect:
3138 MERGE_SET = set of registers set in MERGE_BB
3139 TEST_LIVE = set of registers live at EARLIEST
3140 TEST_SET = set of registers set between EARLIEST and the
3141 end of the block. */
3142
3143 tmp = INITIALIZE_REG_SET (tmp_head);
3144 merge_set = INITIALIZE_REG_SET (merge_set_head);
3145 test_live = INITIALIZE_REG_SET (test_live_head);
3146 test_set = INITIALIZE_REG_SET (test_set_head);
3147
3148 /* ??? bb->local_set is only valid during calculate_global_regs_live,
3149 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
3150 since we've already asserted that MERGE_BB is small. */
3151 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
3152
3153 /* For small register class machines, don't lengthen lifetimes of
3154 hard registers before reload. */
3155 if (SMALL_REGISTER_CLASSES && ! reload_completed)
3156 {
3157 EXECUTE_IF_SET_IN_BITMAP
3158 (merge_set, 0, i,
3159 {
3160 if (i < FIRST_PSEUDO_REGISTER
3161 && ! fixed_regs[i]
3162 && ! global_regs[i])
3163 fail = 1;
3164 });
3165 }
3166
3167 /* For TEST, we're interested in a range of insns, not a whole block.
3168 Moreover, we're interested in the insns live from OTHER_BB. */
3169
3170 COPY_REG_SET (test_live, other_bb->global_live_at_start);
3171 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3172 0);
3173
3174 for (insn = jump; ; insn = prev)
3175 {
3176 prev = propagate_one_insn (pbi, insn);
3177 if (insn == earliest)
3178 break;
3179 }
3180
3181 free_propagate_block_info (pbi);
3182
3183 /* We can perform the transformation if
3184 MERGE_SET & (TEST_SET | TEST_LIVE)
3185 and
3186 TEST_SET & merge_bb->global_live_at_start
3187 are empty. */
3188
3189 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3190 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3191 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3192
3193 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3194 BITMAP_AND);
3195 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3196
3197 FREE_REG_SET (tmp);
3198 FREE_REG_SET (merge_set);
3199 FREE_REG_SET (test_live);
3200 FREE_REG_SET (test_set);
3201
3202 if (fail)
3203 return FALSE;
3204 }
3205
3206 no_body:
3207 /* We don't want to use normal invert_jump or redirect_jump because
3208 we don't want to delete_insn called. Also, we want to do our own
3209 change group management. */
3210
3211 old_dest = JUMP_LABEL (jump);
3212 if (other_bb != new_dest)
3213 {
3214 new_label = block_label (new_dest);
3215 if (reversep
3216 ? ! invert_jump_1 (jump, new_label)
3217 : ! redirect_jump_1 (jump, new_label))
3218 goto cancel;
3219 }
3220
3221 if (! apply_change_group ())
3222 return FALSE;
3223
3224 if (other_bb != new_dest)
3225 {
3226 if (old_dest)
3227 LABEL_NUSES (old_dest) -= 1;
3228 if (new_label)
3229 LABEL_NUSES (new_label) += 1;
3230 JUMP_LABEL (jump) = new_label;
3231 if (reversep)
3232 invert_br_probabilities (jump);
3233
3234 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3235 if (reversep)
3236 {
3237 gcov_type count, probability;
3238 count = BRANCH_EDGE (test_bb)->count;
3239 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3240 FALLTHRU_EDGE (test_bb)->count = count;
3241 probability = BRANCH_EDGE (test_bb)->probability;
3242 BRANCH_EDGE (test_bb)->probability
3243 = FALLTHRU_EDGE (test_bb)->probability;
3244 FALLTHRU_EDGE (test_bb)->probability = probability;
3245 update_br_prob_note (test_bb);
3246 }
3247 }
3248
3249 /* Move the insns out of MERGE_BB to before the branch. */
3250 if (head != NULL)
3251 {
3252 if (end == BB_END (merge_bb))
3253 BB_END (merge_bb) = PREV_INSN (head);
3254
3255 if (squeeze_notes (&head, &end))
3256 return TRUE;
3257
3258 reorder_insns (head, end, PREV_INSN (earliest));
3259 }
3260
3261 /* Remove the jump and edge if we can. */
3262 if (other_bb == new_dest)
3263 {
3264 delete_insn (jump);
3265 remove_edge (BRANCH_EDGE (test_bb));
3266 /* ??? Can't merge blocks here, as then_bb is still in use.
3267 At minimum, the merge will get done just before bb-reorder. */
3268 }
3269
3270 return TRUE;
3271
3272 cancel:
3273 cancel_changes (0);
3274 return FALSE;
3275 }
3276 \f
3277 /* Main entry point for all if-conversion. */
3278
3279 void
3280 if_convert (int x_life_data_ok)
3281 {
3282 basic_block bb;
3283 int pass;
3284
3285 num_possible_if_blocks = 0;
3286 num_updated_if_blocks = 0;
3287 num_true_changes = 0;
3288 life_data_ok = (x_life_data_ok != 0);
3289
3290 if ((! targetm.cannot_modify_jumps_p ())
3291 && (!flag_reorder_blocks_and_partition || !no_new_pseudos))
3292 mark_loop_exit_edges ();
3293
3294 /* Compute postdominators if we think we'll use them. */
3295 if (HAVE_conditional_execution || life_data_ok)
3296 calculate_dominance_info (CDI_POST_DOMINATORS);
3297
3298 if (life_data_ok)
3299 clear_bb_flags ();
3300
3301 /* Go through each of the basic blocks looking for things to convert. If we
3302 have conditional execution, we make multiple passes to allow us to handle
3303 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3304 pass = 0;
3305 do
3306 {
3307 cond_exec_changed_p = FALSE;
3308 pass++;
3309
3310 #ifdef IFCVT_MULTIPLE_DUMPS
3311 if (dump_file && pass > 1)
3312 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
3313 #endif
3314
3315 FOR_EACH_BB (bb)
3316 {
3317 basic_block new_bb;
3318 while ((new_bb = find_if_header (bb, pass)))
3319 bb = new_bb;
3320 }
3321
3322 #ifdef IFCVT_MULTIPLE_DUMPS
3323 if (dump_file && cond_exec_changed_p)
3324 print_rtl_with_bb (dump_file, get_insns ());
3325 #endif
3326 }
3327 while (cond_exec_changed_p);
3328
3329 #ifdef IFCVT_MULTIPLE_DUMPS
3330 if (dump_file)
3331 fprintf (dump_file, "\n\n========== no more changes\n");
3332 #endif
3333
3334 free_dominance_info (CDI_POST_DOMINATORS);
3335
3336 if (dump_file)
3337 fflush (dump_file);
3338
3339 clear_aux_for_blocks ();
3340
3341 /* Rebuild life info for basic blocks that require it. */
3342 if (num_true_changes && life_data_ok)
3343 {
3344 /* If we allocated new pseudos, we must resize the array for sched1. */
3345 if (max_regno < max_reg_num ())
3346 {
3347 max_regno = max_reg_num ();
3348 allocate_reg_info (max_regno, FALSE, FALSE);
3349 }
3350 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3351 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3352 | PROP_KILL_DEAD_CODE);
3353 }
3354
3355 /* Write the final stats. */
3356 if (dump_file && num_possible_if_blocks > 0)
3357 {
3358 fprintf (dump_file,
3359 "\n%d possible IF blocks searched.\n",
3360 num_possible_if_blocks);
3361 fprintf (dump_file,
3362 "%d IF blocks converted.\n",
3363 num_updated_if_blocks);
3364 fprintf (dump_file,
3365 "%d true changes made.\n\n\n",
3366 num_true_changes);
3367 }
3368
3369 #ifdef ENABLE_CHECKING
3370 verify_flow_info ();
3371 #endif
3372 }