re PR rtl-optimization/10817 (gcc does not optimize result=0;if(t!=0)result=t; to...
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
43
44
45 #ifndef HAVE_conditional_execution
46 #define HAVE_conditional_execution 0
47 #endif
48 #ifndef HAVE_conditional_move
49 #define HAVE_conditional_move 0
50 #endif
51 #ifndef HAVE_incscc
52 #define HAVE_incscc 0
53 #endif
54 #ifndef HAVE_decscc
55 #define HAVE_decscc 0
56 #endif
57 #ifndef HAVE_trap
58 #define HAVE_trap 0
59 #endif
60 #ifndef HAVE_conditional_trap
61 #define HAVE_conditional_trap 0
62 #endif
63
64 #ifndef MAX_CONDITIONAL_EXECUTE
65 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
66 #endif
67
68 #define NULL_EDGE ((struct edge_def *)NULL)
69 #define NULL_BLOCK ((struct basic_block_def *)NULL)
70
71 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
72 static int num_possible_if_blocks;
73
74 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
75 execution. */
76 static int num_updated_if_blocks;
77
78 /* # of changes made which require life information to be updated. */
79 static int num_true_changes;
80
81 /* Whether conditional execution changes were made. */
82 static int cond_exec_changed_p;
83
84 /* True if life data ok at present. */
85 static bool life_data_ok;
86
87 /* The post-dominator relation on the original block numbers. */
88 static dominance_info post_dominators;
89
90 /* Forward references. */
91 static int count_bb_insns (basic_block);
92 static rtx first_active_insn (basic_block);
93 static rtx last_active_insn (basic_block, int);
94 static int seq_contains_jump (rtx);
95 static basic_block block_fallthru (basic_block);
96 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
97 static rtx cond_exec_get_condition (rtx);
98 static int cond_exec_process_if_block (ce_if_block_t *, int);
99 static rtx noce_get_condition (rtx, rtx *);
100 static int noce_operand_ok (rtx);
101 static int noce_process_if_block (ce_if_block_t *);
102 static int process_if_block (ce_if_block_t *);
103 static void merge_if_block (ce_if_block_t *);
104 static int find_cond_trap (basic_block, edge, edge);
105 static basic_block find_if_header (basic_block, int);
106 static int block_jumps_and_fallthru_p (basic_block, basic_block);
107 static int find_if_block (ce_if_block_t *);
108 static int find_if_case_1 (basic_block, edge, edge);
109 static int find_if_case_2 (basic_block, edge, edge);
110 static int find_memory (rtx *, void *);
111 static int dead_or_predicable (basic_block, basic_block, basic_block,
112 basic_block, int);
113 static void noce_emit_move_insn (rtx, rtx);
114 static rtx block_has_only_trap (basic_block);
115 static void mark_loop_exit_edges (void);
116 \f
117 /* Sets EDGE_LOOP_EXIT flag for all loop exits. */
118 static void
119 mark_loop_exit_edges (void)
120 {
121 struct loops loops;
122 basic_block bb;
123 edge e;
124
125 flow_loops_find (&loops, LOOP_TREE);
126
127 if (loops.num > 1)
128 {
129 FOR_EACH_BB (bb)
130 {
131 for (e = bb->succ; e; e = e->succ_next)
132 {
133 if (find_common_loop (bb->loop_father, e->dest->loop_father)
134 != bb->loop_father)
135 e->flags |= EDGE_LOOP_EXIT;
136 else
137 e->flags &= ~EDGE_LOOP_EXIT;
138 }
139 }
140 }
141
142 flow_loops_free (&loops);
143 }
144
145 /* Count the number of non-jump active insns in BB. */
146
147 static int
148 count_bb_insns (basic_block bb)
149 {
150 int count = 0;
151 rtx insn = bb->head;
152
153 while (1)
154 {
155 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
156 count++;
157
158 if (insn == bb->end)
159 break;
160 insn = NEXT_INSN (insn);
161 }
162
163 return count;
164 }
165
166 /* Return the first non-jump active insn in the basic block. */
167
168 static rtx
169 first_active_insn (basic_block bb)
170 {
171 rtx insn = bb->head;
172
173 if (GET_CODE (insn) == CODE_LABEL)
174 {
175 if (insn == bb->end)
176 return NULL_RTX;
177 insn = NEXT_INSN (insn);
178 }
179
180 while (GET_CODE (insn) == NOTE)
181 {
182 if (insn == bb->end)
183 return NULL_RTX;
184 insn = NEXT_INSN (insn);
185 }
186
187 if (GET_CODE (insn) == JUMP_INSN)
188 return NULL_RTX;
189
190 return insn;
191 }
192
193 /* Return the last non-jump active (non-jump) insn in the basic block. */
194
195 static rtx
196 last_active_insn (basic_block bb, int skip_use_p)
197 {
198 rtx insn = bb->end;
199 rtx head = bb->head;
200
201 while (GET_CODE (insn) == NOTE
202 || GET_CODE (insn) == JUMP_INSN
203 || (skip_use_p
204 && GET_CODE (insn) == INSN
205 && GET_CODE (PATTERN (insn)) == USE))
206 {
207 if (insn == head)
208 return NULL_RTX;
209 insn = PREV_INSN (insn);
210 }
211
212 if (GET_CODE (insn) == CODE_LABEL)
213 return NULL_RTX;
214
215 return insn;
216 }
217
218 /* It is possible, especially when having dealt with multi-word
219 arithmetic, for the expanders to have emitted jumps. Search
220 through the sequence and return TRUE if a jump exists so that
221 we can abort the conversion. */
222
223 static int
224 seq_contains_jump (rtx insn)
225 {
226 while (insn)
227 {
228 if (GET_CODE (insn) == JUMP_INSN)
229 return 1;
230 insn = NEXT_INSN (insn);
231 }
232 return 0;
233 }
234
235 static basic_block
236 block_fallthru (basic_block bb)
237 {
238 edge e;
239
240 for (e = bb->succ;
241 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
242 e = e->succ_next)
243 ;
244
245 return (e) ? e->dest : NULL_BLOCK;
246 }
247 \f
248 /* Go through a bunch of insns, converting them to conditional
249 execution format if possible. Return TRUE if all of the non-note
250 insns were processed. */
251
252 static int
253 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
254 /* if block information */rtx start,
255 /* first insn to look at */rtx end,
256 /* last insn to look at */rtx test,
257 /* conditional execution test */rtx prob_val,
258 /* probability of branch taken. */int mod_ok)
259 {
260 int must_be_last = FALSE;
261 rtx insn;
262 rtx xtest;
263 rtx pattern;
264
265 if (!start || !end)
266 return FALSE;
267
268 for (insn = start; ; insn = NEXT_INSN (insn))
269 {
270 if (GET_CODE (insn) == NOTE)
271 goto insn_done;
272
273 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
274 abort ();
275
276 /* Remove USE insns that get in the way. */
277 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
278 {
279 /* ??? Ug. Actually unlinking the thing is problematic,
280 given what we'd have to coordinate with our callers. */
281 PUT_CODE (insn, NOTE);
282 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
283 NOTE_SOURCE_FILE (insn) = 0;
284 goto insn_done;
285 }
286
287 /* Last insn wasn't last? */
288 if (must_be_last)
289 return FALSE;
290
291 if (modified_in_p (test, insn))
292 {
293 if (!mod_ok)
294 return FALSE;
295 must_be_last = TRUE;
296 }
297
298 /* Now build the conditional form of the instruction. */
299 pattern = PATTERN (insn);
300 xtest = copy_rtx (test);
301
302 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
303 two conditions. */
304 if (GET_CODE (pattern) == COND_EXEC)
305 {
306 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
307 return FALSE;
308
309 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
310 COND_EXEC_TEST (pattern));
311 pattern = COND_EXEC_CODE (pattern);
312 }
313
314 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
315
316 /* If the machine needs to modify the insn being conditionally executed,
317 say for example to force a constant integer operand into a temp
318 register, do so here. */
319 #ifdef IFCVT_MODIFY_INSN
320 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
321 if (! pattern)
322 return FALSE;
323 #endif
324
325 validate_change (insn, &PATTERN (insn), pattern, 1);
326
327 if (GET_CODE (insn) == CALL_INSN && prob_val)
328 validate_change (insn, &REG_NOTES (insn),
329 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
330 REG_NOTES (insn)), 1);
331
332 insn_done:
333 if (insn == end)
334 break;
335 }
336
337 return TRUE;
338 }
339
340 /* Return the condition for a jump. Do not do any special processing. */
341
342 static rtx
343 cond_exec_get_condition (rtx jump)
344 {
345 rtx test_if, cond;
346
347 if (any_condjump_p (jump))
348 test_if = SET_SRC (pc_set (jump));
349 else
350 return NULL_RTX;
351 cond = XEXP (test_if, 0);
352
353 /* If this branches to JUMP_LABEL when the condition is false,
354 reverse the condition. */
355 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
356 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
357 {
358 enum rtx_code rev = reversed_comparison_code (cond, jump);
359 if (rev == UNKNOWN)
360 return NULL_RTX;
361
362 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
363 XEXP (cond, 1));
364 }
365
366 return cond;
367 }
368
369 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
370 to conditional execution. Return TRUE if we were successful at
371 converting the block. */
372
373 static int
374 cond_exec_process_if_block (ce_if_block_t * ce_info,
375 /* if block information */int do_multiple_p)
376 {
377 basic_block test_bb = ce_info->test_bb; /* last test block */
378 basic_block then_bb = ce_info->then_bb; /* THEN */
379 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
380 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
381 rtx then_start; /* first insn in THEN block */
382 rtx then_end; /* last insn + 1 in THEN block */
383 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
384 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
385 int max; /* max # of insns to convert. */
386 int then_mod_ok; /* whether conditional mods are ok in THEN */
387 rtx true_expr; /* test for else block insns */
388 rtx false_expr; /* test for then block insns */
389 rtx true_prob_val; /* probability of else block */
390 rtx false_prob_val; /* probability of then block */
391 int n_insns;
392 enum rtx_code false_code;
393
394 /* If test is comprised of && or || elements, and we've failed at handling
395 all of them together, just use the last test if it is the special case of
396 && elements without an ELSE block. */
397 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
398 {
399 if (else_bb || ! ce_info->and_and_p)
400 return FALSE;
401
402 ce_info->test_bb = test_bb = ce_info->last_test_bb;
403 ce_info->num_multiple_test_blocks = 0;
404 ce_info->num_and_and_blocks = 0;
405 ce_info->num_or_or_blocks = 0;
406 }
407
408 /* Find the conditional jump to the ELSE or JOIN part, and isolate
409 the test. */
410 test_expr = cond_exec_get_condition (test_bb->end);
411 if (! test_expr)
412 return FALSE;
413
414 /* If the conditional jump is more than just a conditional jump,
415 then we can not do conditional execution conversion on this block. */
416 if (! onlyjump_p (test_bb->end))
417 return FALSE;
418
419 /* Collect the bounds of where we're to search, skipping any labels, jumps
420 and notes at the beginning and end of the block. Then count the total
421 number of insns and see if it is small enough to convert. */
422 then_start = first_active_insn (then_bb);
423 then_end = last_active_insn (then_bb, TRUE);
424 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
425 max = MAX_CONDITIONAL_EXECUTE;
426
427 if (else_bb)
428 {
429 max *= 2;
430 else_start = first_active_insn (else_bb);
431 else_end = last_active_insn (else_bb, TRUE);
432 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
433 }
434
435 if (n_insns > max)
436 return FALSE;
437
438 /* Map test_expr/test_jump into the appropriate MD tests to use on
439 the conditionally executed code. */
440
441 true_expr = test_expr;
442
443 false_code = reversed_comparison_code (true_expr, test_bb->end);
444 if (false_code != UNKNOWN)
445 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
446 XEXP (true_expr, 0), XEXP (true_expr, 1));
447 else
448 false_expr = NULL_RTX;
449
450 #ifdef IFCVT_MODIFY_TESTS
451 /* If the machine description needs to modify the tests, such as setting a
452 conditional execution register from a comparison, it can do so here. */
453 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
454
455 /* See if the conversion failed */
456 if (!true_expr || !false_expr)
457 goto fail;
458 #endif
459
460 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
461 if (true_prob_val)
462 {
463 true_prob_val = XEXP (true_prob_val, 0);
464 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
465 }
466 else
467 false_prob_val = NULL_RTX;
468
469 /* If we have && or || tests, do them here. These tests are in the adjacent
470 blocks after the first block containing the test. */
471 if (ce_info->num_multiple_test_blocks > 0)
472 {
473 basic_block bb = test_bb;
474 basic_block last_test_bb = ce_info->last_test_bb;
475
476 if (! false_expr)
477 goto fail;
478
479 do
480 {
481 rtx start, end;
482 rtx t, f;
483
484 bb = block_fallthru (bb);
485 start = first_active_insn (bb);
486 end = last_active_insn (bb, TRUE);
487 if (start
488 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
489 false_prob_val, FALSE))
490 goto fail;
491
492 /* If the conditional jump is more than just a conditional jump, then
493 we can not do conditional execution conversion on this block. */
494 if (! onlyjump_p (bb->end))
495 goto fail;
496
497 /* Find the conditional jump and isolate the test. */
498 t = cond_exec_get_condition (bb->end);
499 if (! t)
500 goto fail;
501
502 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
503 GET_MODE (t),
504 XEXP (t, 0),
505 XEXP (t, 1));
506
507 if (ce_info->and_and_p)
508 {
509 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
510 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
511 }
512 else
513 {
514 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
515 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
516 }
517
518 /* If the machine description needs to modify the tests, such as
519 setting a conditional execution register from a comparison, it can
520 do so here. */
521 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
522 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
523
524 /* See if the conversion failed */
525 if (!t || !f)
526 goto fail;
527 #endif
528
529 true_expr = t;
530 false_expr = f;
531 }
532 while (bb != last_test_bb);
533 }
534
535 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
536 on then THEN block. */
537 then_mod_ok = (else_bb == NULL_BLOCK);
538
539 /* Go through the THEN and ELSE blocks converting the insns if possible
540 to conditional execution. */
541
542 if (then_end
543 && (! false_expr
544 || ! cond_exec_process_insns (ce_info, then_start, then_end,
545 false_expr, false_prob_val,
546 then_mod_ok)))
547 goto fail;
548
549 if (else_bb && else_end
550 && ! cond_exec_process_insns (ce_info, else_start, else_end,
551 true_expr, true_prob_val, TRUE))
552 goto fail;
553
554 /* If we cannot apply the changes, fail. Do not go through the normal fail
555 processing, since apply_change_group will call cancel_changes. */
556 if (! apply_change_group ())
557 {
558 #ifdef IFCVT_MODIFY_CANCEL
559 /* Cancel any machine dependent changes. */
560 IFCVT_MODIFY_CANCEL (ce_info);
561 #endif
562 return FALSE;
563 }
564
565 #ifdef IFCVT_MODIFY_FINAL
566 /* Do any machine dependent final modifications */
567 IFCVT_MODIFY_FINAL (ce_info);
568 #endif
569
570 /* Conversion succeeded. */
571 if (rtl_dump_file)
572 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
573 n_insns, (n_insns == 1) ? " was" : "s were");
574
575 /* Merge the blocks! */
576 merge_if_block (ce_info);
577 cond_exec_changed_p = TRUE;
578 return TRUE;
579
580 fail:
581 #ifdef IFCVT_MODIFY_CANCEL
582 /* Cancel any machine dependent changes. */
583 IFCVT_MODIFY_CANCEL (ce_info);
584 #endif
585
586 cancel_changes (0);
587 return FALSE;
588 }
589 \f
590 /* Used by noce_process_if_block to communicate with its subroutines.
591
592 The subroutines know that A and B may be evaluated freely. They
593 know that X is a register. They should insert new instructions
594 before cond_earliest. */
595
596 struct noce_if_info
597 {
598 basic_block test_bb;
599 rtx insn_a, insn_b;
600 rtx x, a, b;
601 rtx jump, cond, cond_earliest;
602 };
603
604 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
605 static int noce_try_move (struct noce_if_info *);
606 static int noce_try_store_flag (struct noce_if_info *);
607 static int noce_try_addcc (struct noce_if_info *);
608 static int noce_try_store_flag_constants (struct noce_if_info *);
609 static int noce_try_store_flag_mask (struct noce_if_info *);
610 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
611 rtx, rtx, rtx);
612 static int noce_try_cmove (struct noce_if_info *);
613 static int noce_try_cmove_arith (struct noce_if_info *);
614 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
615 static int noce_try_minmax (struct noce_if_info *);
616 static int noce_try_abs (struct noce_if_info *);
617
618 /* Helper function for noce_try_store_flag*. */
619
620 static rtx
621 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
622 int normalize)
623 {
624 rtx cond = if_info->cond;
625 int cond_complex;
626 enum rtx_code code;
627
628 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
629 || ! general_operand (XEXP (cond, 1), VOIDmode));
630
631 /* If earliest == jump, or when the condition is complex, try to
632 build the store_flag insn directly. */
633
634 if (cond_complex)
635 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
636
637 if (reversep)
638 code = reversed_comparison_code (cond, if_info->jump);
639 else
640 code = GET_CODE (cond);
641
642 if ((if_info->cond_earliest == if_info->jump || cond_complex)
643 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
644 {
645 rtx tmp;
646
647 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
648 XEXP (cond, 1));
649 tmp = gen_rtx_SET (VOIDmode, x, tmp);
650
651 start_sequence ();
652 tmp = emit_insn (tmp);
653
654 if (recog_memoized (tmp) >= 0)
655 {
656 tmp = get_insns ();
657 end_sequence ();
658 emit_insn (tmp);
659
660 if_info->cond_earliest = if_info->jump;
661
662 return x;
663 }
664
665 end_sequence ();
666 }
667
668 /* Don't even try if the comparison operands or the mode of X are weird. */
669 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
670 return NULL_RTX;
671
672 return emit_store_flag (x, code, XEXP (cond, 0),
673 XEXP (cond, 1), VOIDmode,
674 (code == LTU || code == LEU
675 || code == GEU || code == GTU), normalize);
676 }
677
678 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
679 X is the destination/target and Y is the value to copy. */
680
681 static void
682 noce_emit_move_insn (rtx x, rtx y)
683 {
684 enum machine_mode outmode, inmode;
685 rtx outer, inner;
686 int bitpos;
687
688 if (GET_CODE (x) != STRICT_LOW_PART)
689 {
690 emit_move_insn (x, y);
691 return;
692 }
693
694 outer = XEXP (x, 0);
695 inner = XEXP (outer, 0);
696 outmode = GET_MODE (outer);
697 inmode = GET_MODE (inner);
698 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
699 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
700 GET_MODE_BITSIZE (inmode));
701 }
702
703 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
704 "if (a == b) x = a; else x = b" into "x = b". */
705
706 static int
707 noce_try_move (struct noce_if_info *if_info)
708 {
709 rtx cond = if_info->cond;
710 enum rtx_code code = GET_CODE (cond);
711 rtx y, seq;
712
713 if (code != NE && code != EQ)
714 return FALSE;
715
716 /* This optimization isn't valid if either A or B could be a NaN
717 or a signed zero. */
718 if (HONOR_NANS (GET_MODE (if_info->x))
719 || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
720 return FALSE;
721
722 /* Check whether the operands of the comparison are A and in
723 either order. */
724 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
725 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
726 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
727 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
728 {
729 y = (code == EQ) ? if_info->a : if_info->b;
730
731 /* Avoid generating the move if the source is the destination. */
732 if (! rtx_equal_p (if_info->x, y))
733 {
734 start_sequence ();
735 noce_emit_move_insn (if_info->x, y);
736 seq = get_insns ();
737 end_sequence ();
738 emit_insn_before_setloc (seq, if_info->jump,
739 INSN_LOCATOR (if_info->insn_a));
740 }
741 return TRUE;
742 }
743 return FALSE;
744 }
745
746 /* Convert "if (test) x = 1; else x = 0".
747
748 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
749 tried in noce_try_store_flag_constants after noce_try_cmove has had
750 a go at the conversion. */
751
752 static int
753 noce_try_store_flag (struct noce_if_info *if_info)
754 {
755 int reversep;
756 rtx target, seq;
757
758 if (GET_CODE (if_info->b) == CONST_INT
759 && INTVAL (if_info->b) == STORE_FLAG_VALUE
760 && if_info->a == const0_rtx)
761 reversep = 0;
762 else if (if_info->b == const0_rtx
763 && GET_CODE (if_info->a) == CONST_INT
764 && INTVAL (if_info->a) == STORE_FLAG_VALUE
765 && (reversed_comparison_code (if_info->cond, if_info->jump)
766 != UNKNOWN))
767 reversep = 1;
768 else
769 return FALSE;
770
771 start_sequence ();
772
773 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
774 if (target)
775 {
776 if (target != if_info->x)
777 noce_emit_move_insn (if_info->x, target);
778
779 seq = get_insns ();
780 end_sequence ();
781 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
782
783 return TRUE;
784 }
785 else
786 {
787 end_sequence ();
788 return FALSE;
789 }
790 }
791
792 /* Convert "if (test) x = a; else x = b", for A and B constant. */
793
794 static int
795 noce_try_store_flag_constants (struct noce_if_info *if_info)
796 {
797 rtx target, seq;
798 int reversep;
799 HOST_WIDE_INT itrue, ifalse, diff, tmp;
800 int normalize, can_reverse;
801 enum machine_mode mode;
802
803 if (! no_new_pseudos
804 && GET_CODE (if_info->a) == CONST_INT
805 && GET_CODE (if_info->b) == CONST_INT)
806 {
807 mode = GET_MODE (if_info->x);
808 ifalse = INTVAL (if_info->a);
809 itrue = INTVAL (if_info->b);
810
811 /* Make sure we can represent the difference between the two values. */
812 if ((itrue - ifalse > 0)
813 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
814 return FALSE;
815
816 diff = trunc_int_for_mode (itrue - ifalse, mode);
817
818 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
819 != UNKNOWN);
820
821 reversep = 0;
822 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
823 normalize = 0;
824 else if (ifalse == 0 && exact_log2 (itrue) >= 0
825 && (STORE_FLAG_VALUE == 1
826 || BRANCH_COST >= 2))
827 normalize = 1;
828 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
829 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
830 normalize = 1, reversep = 1;
831 else if (itrue == -1
832 && (STORE_FLAG_VALUE == -1
833 || BRANCH_COST >= 2))
834 normalize = -1;
835 else if (ifalse == -1 && can_reverse
836 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
837 normalize = -1, reversep = 1;
838 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
839 || BRANCH_COST >= 3)
840 normalize = -1;
841 else
842 return FALSE;
843
844 if (reversep)
845 {
846 tmp = itrue; itrue = ifalse; ifalse = tmp;
847 diff = trunc_int_for_mode (-diff, mode);
848 }
849
850 start_sequence ();
851 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
852 if (! target)
853 {
854 end_sequence ();
855 return FALSE;
856 }
857
858 /* if (test) x = 3; else x = 4;
859 => x = 3 + (test == 0); */
860 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
861 {
862 target = expand_simple_binop (mode,
863 (diff == STORE_FLAG_VALUE
864 ? PLUS : MINUS),
865 GEN_INT (ifalse), target, if_info->x, 0,
866 OPTAB_WIDEN);
867 }
868
869 /* if (test) x = 8; else x = 0;
870 => x = (test != 0) << 3; */
871 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
872 {
873 target = expand_simple_binop (mode, ASHIFT,
874 target, GEN_INT (tmp), if_info->x, 0,
875 OPTAB_WIDEN);
876 }
877
878 /* if (test) x = -1; else x = b;
879 => x = -(test != 0) | b; */
880 else if (itrue == -1)
881 {
882 target = expand_simple_binop (mode, IOR,
883 target, GEN_INT (ifalse), if_info->x, 0,
884 OPTAB_WIDEN);
885 }
886
887 /* if (test) x = a; else x = b;
888 => x = (-(test != 0) & (b - a)) + a; */
889 else
890 {
891 target = expand_simple_binop (mode, AND,
892 target, GEN_INT (diff), if_info->x, 0,
893 OPTAB_WIDEN);
894 if (target)
895 target = expand_simple_binop (mode, PLUS,
896 target, GEN_INT (ifalse),
897 if_info->x, 0, OPTAB_WIDEN);
898 }
899
900 if (! target)
901 {
902 end_sequence ();
903 return FALSE;
904 }
905
906 if (target != if_info->x)
907 noce_emit_move_insn (if_info->x, target);
908
909 seq = get_insns ();
910 end_sequence ();
911
912 if (seq_contains_jump (seq))
913 return FALSE;
914
915 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
916
917 return TRUE;
918 }
919
920 return FALSE;
921 }
922
923 /* Convert "if (test) foo++" into "foo += (test != 0)", and
924 similarly for "foo--". */
925
926 static int
927 noce_try_addcc (struct noce_if_info *if_info)
928 {
929 rtx target, seq;
930 int subtract, normalize;
931
932 if (! no_new_pseudos
933 && GET_CODE (if_info->a) == PLUS
934 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
935 && (reversed_comparison_code (if_info->cond, if_info->jump)
936 != UNKNOWN))
937 {
938 rtx cond = if_info->cond;
939 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
940
941 /* First try to use addcc pattern. */
942 if (general_operand (XEXP (cond, 0), VOIDmode)
943 && general_operand (XEXP (cond, 1), VOIDmode))
944 {
945 start_sequence ();
946 target = emit_conditional_add (if_info->x, code,
947 XEXP (cond, 0), XEXP (cond, 1),
948 VOIDmode,
949 if_info->b, XEXP (if_info->a, 1),
950 GET_MODE (if_info->x),
951 (code == LTU || code == GEU
952 || code == LEU || code == GTU));
953 if (target)
954 {
955 if (target != if_info->x)
956 noce_emit_move_insn (if_info->x, target);
957
958 seq = get_insns ();
959 end_sequence ();
960 emit_insn_before_setloc (seq, if_info->jump,
961 INSN_LOCATOR (if_info->insn_a));
962 return TRUE;
963 }
964 end_sequence ();
965 }
966
967 /* If that fails, construct conditional increment or decrement using
968 setcc. */
969 if (BRANCH_COST >= 2
970 && (XEXP (if_info->a, 1) == const1_rtx
971 || XEXP (if_info->a, 1) == constm1_rtx))
972 {
973 start_sequence ();
974 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
975 subtract = 0, normalize = 0;
976 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
977 subtract = 1, normalize = 0;
978 else
979 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
980
981
982 target = noce_emit_store_flag (if_info,
983 gen_reg_rtx (GET_MODE (if_info->x)),
984 1, normalize);
985
986 if (target)
987 target = expand_simple_binop (GET_MODE (if_info->x),
988 subtract ? MINUS : PLUS,
989 if_info->b, target, if_info->x,
990 0, OPTAB_WIDEN);
991 if (target)
992 {
993 if (target != if_info->x)
994 noce_emit_move_insn (if_info->x, target);
995
996 seq = get_insns ();
997 end_sequence ();
998
999 if (seq_contains_jump (seq))
1000 return FALSE;
1001
1002 emit_insn_before_setloc (seq, if_info->jump,
1003 INSN_LOCATOR (if_info->insn_a));
1004
1005 return TRUE;
1006 }
1007 end_sequence ();
1008 }
1009 }
1010
1011 return FALSE;
1012 }
1013
1014 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1015
1016 static int
1017 noce_try_store_flag_mask (struct noce_if_info *if_info)
1018 {
1019 rtx target, seq;
1020 int reversep;
1021
1022 reversep = 0;
1023 if (! no_new_pseudos
1024 && (BRANCH_COST >= 2
1025 || STORE_FLAG_VALUE == -1)
1026 && ((if_info->a == const0_rtx
1027 && rtx_equal_p (if_info->b, if_info->x))
1028 || ((reversep = (reversed_comparison_code (if_info->cond,
1029 if_info->jump)
1030 != UNKNOWN))
1031 && if_info->b == const0_rtx
1032 && rtx_equal_p (if_info->a, if_info->x))))
1033 {
1034 start_sequence ();
1035 target = noce_emit_store_flag (if_info,
1036 gen_reg_rtx (GET_MODE (if_info->x)),
1037 reversep, -1);
1038 if (target)
1039 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1040 if_info->x, target, if_info->x, 0,
1041 OPTAB_WIDEN);
1042
1043 if (target)
1044 {
1045 if (target != if_info->x)
1046 noce_emit_move_insn (if_info->x, target);
1047
1048 seq = get_insns ();
1049 end_sequence ();
1050
1051 if (seq_contains_jump (seq))
1052 return FALSE;
1053
1054 emit_insn_before_setloc (seq, if_info->jump,
1055 INSN_LOCATOR (if_info->insn_a));
1056
1057 return TRUE;
1058 }
1059
1060 end_sequence ();
1061 }
1062
1063 return FALSE;
1064 }
1065
1066 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1067
1068 static rtx
1069 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1070 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1071 {
1072 /* If earliest == jump, try to build the cmove insn directly.
1073 This is helpful when combine has created some complex condition
1074 (like for alpha's cmovlbs) that we can't hope to regenerate
1075 through the normal interface. */
1076
1077 if (if_info->cond_earliest == if_info->jump)
1078 {
1079 rtx tmp;
1080
1081 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1082 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1083 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1084
1085 start_sequence ();
1086 tmp = emit_insn (tmp);
1087
1088 if (recog_memoized (tmp) >= 0)
1089 {
1090 tmp = get_insns ();
1091 end_sequence ();
1092 emit_insn (tmp);
1093
1094 return x;
1095 }
1096
1097 end_sequence ();
1098 }
1099
1100 /* Don't even try if the comparison operands are weird. */
1101 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1102 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1103 return NULL_RTX;
1104
1105 #if HAVE_conditional_move
1106 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1107 vtrue, vfalse, GET_MODE (x),
1108 (code == LTU || code == GEU
1109 || code == LEU || code == GTU));
1110 #else
1111 /* We'll never get here, as noce_process_if_block doesn't call the
1112 functions involved. Ifdef code, however, should be discouraged
1113 because it leads to typos in the code not selected. However,
1114 emit_conditional_move won't exist either. */
1115 return NULL_RTX;
1116 #endif
1117 }
1118
1119 /* Try only simple constants and registers here. More complex cases
1120 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1121 has had a go at it. */
1122
1123 static int
1124 noce_try_cmove (struct noce_if_info *if_info)
1125 {
1126 enum rtx_code code;
1127 rtx target, seq;
1128
1129 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1130 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1131 {
1132 start_sequence ();
1133
1134 code = GET_CODE (if_info->cond);
1135 target = noce_emit_cmove (if_info, if_info->x, code,
1136 XEXP (if_info->cond, 0),
1137 XEXP (if_info->cond, 1),
1138 if_info->a, if_info->b);
1139
1140 if (target)
1141 {
1142 if (target != if_info->x)
1143 noce_emit_move_insn (if_info->x, target);
1144
1145 seq = get_insns ();
1146 end_sequence ();
1147 emit_insn_before_setloc (seq, if_info->jump,
1148 INSN_LOCATOR (if_info->insn_a));
1149 return TRUE;
1150 }
1151 else
1152 {
1153 end_sequence ();
1154 return FALSE;
1155 }
1156 }
1157
1158 return FALSE;
1159 }
1160
1161 /* Try more complex cases involving conditional_move. */
1162
1163 static int
1164 noce_try_cmove_arith (struct noce_if_info *if_info)
1165 {
1166 rtx a = if_info->a;
1167 rtx b = if_info->b;
1168 rtx x = if_info->x;
1169 rtx insn_a, insn_b;
1170 rtx tmp, target;
1171 int is_mem = 0;
1172 enum rtx_code code;
1173
1174 /* A conditional move from two memory sources is equivalent to a
1175 conditional on their addresses followed by a load. Don't do this
1176 early because it'll screw alias analysis. Note that we've
1177 already checked for no side effects. */
1178 if (! no_new_pseudos && cse_not_expected
1179 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1180 && BRANCH_COST >= 5)
1181 {
1182 a = XEXP (a, 0);
1183 b = XEXP (b, 0);
1184 x = gen_reg_rtx (Pmode);
1185 is_mem = 1;
1186 }
1187
1188 /* ??? We could handle this if we knew that a load from A or B could
1189 not fault. This is also true if we've already loaded
1190 from the address along the path from ENTRY. */
1191 else if (may_trap_p (a) || may_trap_p (b))
1192 return FALSE;
1193
1194 /* if (test) x = a + b; else x = c - d;
1195 => y = a + b;
1196 x = c - d;
1197 if (test)
1198 x = y;
1199 */
1200
1201 code = GET_CODE (if_info->cond);
1202 insn_a = if_info->insn_a;
1203 insn_b = if_info->insn_b;
1204
1205 /* Possibly rearrange operands to make things come out more natural. */
1206 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1207 {
1208 int reversep = 0;
1209 if (rtx_equal_p (b, x))
1210 reversep = 1;
1211 else if (general_operand (b, GET_MODE (b)))
1212 reversep = 1;
1213
1214 if (reversep)
1215 {
1216 code = reversed_comparison_code (if_info->cond, if_info->jump);
1217 tmp = a, a = b, b = tmp;
1218 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1219 }
1220 }
1221
1222 start_sequence ();
1223
1224 /* If either operand is complex, load it into a register first.
1225 The best way to do this is to copy the original insn. In this
1226 way we preserve any clobbers etc that the insn may have had.
1227 This is of course not possible in the IS_MEM case. */
1228 if (! general_operand (a, GET_MODE (a)))
1229 {
1230 rtx set;
1231
1232 if (no_new_pseudos)
1233 goto end_seq_and_fail;
1234
1235 if (is_mem)
1236 {
1237 tmp = gen_reg_rtx (GET_MODE (a));
1238 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1239 }
1240 else if (! insn_a)
1241 goto end_seq_and_fail;
1242 else
1243 {
1244 a = gen_reg_rtx (GET_MODE (a));
1245 tmp = copy_rtx (insn_a);
1246 set = single_set (tmp);
1247 SET_DEST (set) = a;
1248 tmp = emit_insn (PATTERN (tmp));
1249 }
1250 if (recog_memoized (tmp) < 0)
1251 goto end_seq_and_fail;
1252 }
1253 if (! general_operand (b, GET_MODE (b)))
1254 {
1255 rtx set;
1256
1257 if (no_new_pseudos)
1258 goto end_seq_and_fail;
1259
1260 if (is_mem)
1261 {
1262 tmp = gen_reg_rtx (GET_MODE (b));
1263 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1264 }
1265 else if (! insn_b)
1266 goto end_seq_and_fail;
1267 else
1268 {
1269 b = gen_reg_rtx (GET_MODE (b));
1270 tmp = copy_rtx (insn_b);
1271 set = single_set (tmp);
1272 SET_DEST (set) = b;
1273 tmp = emit_insn (PATTERN (tmp));
1274 }
1275 if (recog_memoized (tmp) < 0)
1276 goto end_seq_and_fail;
1277 }
1278
1279 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1280 XEXP (if_info->cond, 1), a, b);
1281
1282 if (! target)
1283 goto end_seq_and_fail;
1284
1285 /* If we're handling a memory for above, emit the load now. */
1286 if (is_mem)
1287 {
1288 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1289
1290 /* Copy over flags as appropriate. */
1291 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1292 MEM_VOLATILE_P (tmp) = 1;
1293 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1294 MEM_IN_STRUCT_P (tmp) = 1;
1295 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1296 MEM_SCALAR_P (tmp) = 1;
1297 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1298 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1299 set_mem_align (tmp,
1300 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1301
1302 noce_emit_move_insn (if_info->x, tmp);
1303 }
1304 else if (target != x)
1305 noce_emit_move_insn (x, target);
1306
1307 tmp = get_insns ();
1308 end_sequence ();
1309 emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1310 return TRUE;
1311
1312 end_seq_and_fail:
1313 end_sequence ();
1314 return FALSE;
1315 }
1316
1317 /* For most cases, the simplified condition we found is the best
1318 choice, but this is not the case for the min/max/abs transforms.
1319 For these we wish to know that it is A or B in the condition. */
1320
1321 static rtx
1322 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1323 rtx *earliest)
1324 {
1325 rtx cond, set, insn;
1326 int reverse;
1327
1328 /* If target is already mentioned in the known condition, return it. */
1329 if (reg_mentioned_p (target, if_info->cond))
1330 {
1331 *earliest = if_info->cond_earliest;
1332 return if_info->cond;
1333 }
1334
1335 set = pc_set (if_info->jump);
1336 cond = XEXP (SET_SRC (set), 0);
1337 reverse
1338 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1339 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1340
1341 /* If we're looking for a constant, try to make the conditional
1342 have that constant in it. There are two reasons why it may
1343 not have the constant we want:
1344
1345 1. GCC may have needed to put the constant in a register, because
1346 the target can't compare directly against that constant. For
1347 this case, we look for a SET immediately before the comparison
1348 that puts a constant in that register.
1349
1350 2. GCC may have canonicalized the conditional, for example
1351 replacing "if x < 4" with "if x <= 3". We can undo that (or
1352 make equivalent types of changes) to get the constants we need
1353 if they're off by one in the right direction. */
1354
1355 if (GET_CODE (target) == CONST_INT)
1356 {
1357 enum rtx_code code = GET_CODE (if_info->cond);
1358 rtx op_a = XEXP (if_info->cond, 0);
1359 rtx op_b = XEXP (if_info->cond, 1);
1360 rtx prev_insn;
1361
1362 /* First, look to see if we put a constant in a register. */
1363 prev_insn = PREV_INSN (if_info->cond_earliest);
1364 if (prev_insn
1365 && INSN_P (prev_insn)
1366 && GET_CODE (PATTERN (prev_insn)) == SET)
1367 {
1368 rtx src = find_reg_equal_equiv_note (prev_insn);
1369 if (!src)
1370 src = SET_SRC (PATTERN (prev_insn));
1371 if (GET_CODE (src) == CONST_INT)
1372 {
1373 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1374 op_a = src;
1375 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1376 op_b = src;
1377
1378 if (GET_CODE (op_a) == CONST_INT)
1379 {
1380 rtx tmp = op_a;
1381 op_a = op_b;
1382 op_b = tmp;
1383 code = swap_condition (code);
1384 }
1385 }
1386 }
1387
1388 /* Now, look to see if we can get the right constant by
1389 adjusting the conditional. */
1390 if (GET_CODE (op_b) == CONST_INT)
1391 {
1392 HOST_WIDE_INT desired_val = INTVAL (target);
1393 HOST_WIDE_INT actual_val = INTVAL (op_b);
1394
1395 switch (code)
1396 {
1397 case LT:
1398 if (actual_val == desired_val + 1)
1399 {
1400 code = LE;
1401 op_b = GEN_INT (desired_val);
1402 }
1403 break;
1404 case LE:
1405 if (actual_val == desired_val - 1)
1406 {
1407 code = LT;
1408 op_b = GEN_INT (desired_val);
1409 }
1410 break;
1411 case GT:
1412 if (actual_val == desired_val - 1)
1413 {
1414 code = GE;
1415 op_b = GEN_INT (desired_val);
1416 }
1417 break;
1418 case GE:
1419 if (actual_val == desired_val + 1)
1420 {
1421 code = GT;
1422 op_b = GEN_INT (desired_val);
1423 }
1424 break;
1425 default:
1426 break;
1427 }
1428 }
1429
1430 /* If we made any changes, generate a new conditional that is
1431 equivalent to what we started with, but has the right
1432 constants in it. */
1433 if (code != GET_CODE (if_info->cond)
1434 || op_a != XEXP (if_info->cond, 0)
1435 || op_b != XEXP (if_info->cond, 1))
1436 {
1437 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1438 *earliest = if_info->cond_earliest;
1439 return cond;
1440 }
1441 }
1442
1443 cond = canonicalize_condition (if_info->jump, cond, reverse,
1444 earliest, target, false);
1445 if (! cond || ! reg_mentioned_p (target, cond))
1446 return NULL;
1447
1448 /* We almost certainly searched back to a different place.
1449 Need to re-verify correct lifetimes. */
1450
1451 /* X may not be mentioned in the range (cond_earliest, jump]. */
1452 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1453 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1454 return NULL;
1455
1456 /* A and B may not be modified in the range [cond_earliest, jump). */
1457 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1458 if (INSN_P (insn)
1459 && (modified_in_p (if_info->a, insn)
1460 || modified_in_p (if_info->b, insn)))
1461 return NULL;
1462
1463 return cond;
1464 }
1465
1466 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1467
1468 static int
1469 noce_try_minmax (struct noce_if_info *if_info)
1470 {
1471 rtx cond, earliest, target, seq;
1472 enum rtx_code code, op;
1473 int unsignedp;
1474
1475 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1476 if (no_new_pseudos)
1477 return FALSE;
1478
1479 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1480 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1481 to get the target to tell us... */
1482 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1483 || HONOR_NANS (GET_MODE (if_info->x)))
1484 return FALSE;
1485
1486 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1487 if (!cond)
1488 return FALSE;
1489
1490 /* Verify the condition is of the form we expect, and canonicalize
1491 the comparison code. */
1492 code = GET_CODE (cond);
1493 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1494 {
1495 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1496 return FALSE;
1497 }
1498 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1499 {
1500 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1501 return FALSE;
1502 code = swap_condition (code);
1503 }
1504 else
1505 return FALSE;
1506
1507 /* Determine what sort of operation this is. Note that the code is for
1508 a taken branch, so the code->operation mapping appears backwards. */
1509 switch (code)
1510 {
1511 case LT:
1512 case LE:
1513 case UNLT:
1514 case UNLE:
1515 op = SMAX;
1516 unsignedp = 0;
1517 break;
1518 case GT:
1519 case GE:
1520 case UNGT:
1521 case UNGE:
1522 op = SMIN;
1523 unsignedp = 0;
1524 break;
1525 case LTU:
1526 case LEU:
1527 op = UMAX;
1528 unsignedp = 1;
1529 break;
1530 case GTU:
1531 case GEU:
1532 op = UMIN;
1533 unsignedp = 1;
1534 break;
1535 default:
1536 return FALSE;
1537 }
1538
1539 start_sequence ();
1540
1541 target = expand_simple_binop (GET_MODE (if_info->x), op,
1542 if_info->a, if_info->b,
1543 if_info->x, unsignedp, OPTAB_WIDEN);
1544 if (! target)
1545 {
1546 end_sequence ();
1547 return FALSE;
1548 }
1549 if (target != if_info->x)
1550 noce_emit_move_insn (if_info->x, target);
1551
1552 seq = get_insns ();
1553 end_sequence ();
1554
1555 if (seq_contains_jump (seq))
1556 return FALSE;
1557
1558 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1559 if_info->cond = cond;
1560 if_info->cond_earliest = earliest;
1561
1562 return TRUE;
1563 }
1564
1565 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1566
1567 static int
1568 noce_try_abs (struct noce_if_info *if_info)
1569 {
1570 rtx cond, earliest, target, seq, a, b, c;
1571 int negate;
1572
1573 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1574 if (no_new_pseudos)
1575 return FALSE;
1576
1577 /* Recognize A and B as constituting an ABS or NABS. */
1578 a = if_info->a;
1579 b = if_info->b;
1580 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1581 negate = 0;
1582 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1583 {
1584 c = a; a = b; b = c;
1585 negate = 1;
1586 }
1587 else
1588 return FALSE;
1589
1590 cond = noce_get_alt_condition (if_info, b, &earliest);
1591 if (!cond)
1592 return FALSE;
1593
1594 /* Verify the condition is of the form we expect. */
1595 if (rtx_equal_p (XEXP (cond, 0), b))
1596 c = XEXP (cond, 1);
1597 else if (rtx_equal_p (XEXP (cond, 1), b))
1598 c = XEXP (cond, 0);
1599 else
1600 return FALSE;
1601
1602 /* Verify that C is zero. Search backward through the block for
1603 a REG_EQUAL note if necessary. */
1604 if (REG_P (c))
1605 {
1606 rtx insn, note = NULL;
1607 for (insn = earliest;
1608 insn != if_info->test_bb->head;
1609 insn = PREV_INSN (insn))
1610 if (INSN_P (insn)
1611 && ((note = find_reg_note (insn, REG_EQUAL, c))
1612 || (note = find_reg_note (insn, REG_EQUIV, c))))
1613 break;
1614 if (! note)
1615 return FALSE;
1616 c = XEXP (note, 0);
1617 }
1618 if (GET_CODE (c) == MEM
1619 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1620 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1621 c = get_pool_constant (XEXP (c, 0));
1622
1623 /* Work around funny ideas get_condition has wrt canonicalization.
1624 Note that these rtx constants are known to be CONST_INT, and
1625 therefore imply integer comparisons. */
1626 if (c == constm1_rtx && GET_CODE (cond) == GT)
1627 ;
1628 else if (c == const1_rtx && GET_CODE (cond) == LT)
1629 ;
1630 else if (c != CONST0_RTX (GET_MODE (b)))
1631 return FALSE;
1632
1633 /* Determine what sort of operation this is. */
1634 switch (GET_CODE (cond))
1635 {
1636 case LT:
1637 case LE:
1638 case UNLT:
1639 case UNLE:
1640 negate = !negate;
1641 break;
1642 case GT:
1643 case GE:
1644 case UNGT:
1645 case UNGE:
1646 break;
1647 default:
1648 return FALSE;
1649 }
1650
1651 start_sequence ();
1652
1653 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1654
1655 /* ??? It's a quandry whether cmove would be better here, especially
1656 for integers. Perhaps combine will clean things up. */
1657 if (target && negate)
1658 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1659
1660 if (! target)
1661 {
1662 end_sequence ();
1663 return FALSE;
1664 }
1665
1666 if (target != if_info->x)
1667 noce_emit_move_insn (if_info->x, target);
1668
1669 seq = get_insns ();
1670 end_sequence ();
1671
1672 if (seq_contains_jump (seq))
1673 return FALSE;
1674
1675 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1676 if_info->cond = cond;
1677 if_info->cond_earliest = earliest;
1678
1679 return TRUE;
1680 }
1681
1682 /* Similar to get_condition, only the resulting condition must be
1683 valid at JUMP, instead of at EARLIEST. */
1684
1685 static rtx
1686 noce_get_condition (rtx jump, rtx *earliest)
1687 {
1688 rtx cond, set, tmp, insn;
1689 bool reverse;
1690
1691 if (! any_condjump_p (jump))
1692 return NULL_RTX;
1693
1694 set = pc_set (jump);
1695
1696 /* If this branches to JUMP_LABEL when the condition is false,
1697 reverse the condition. */
1698 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1699 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1700
1701 /* If the condition variable is a register and is MODE_INT, accept it. */
1702
1703 cond = XEXP (SET_SRC (set), 0);
1704 tmp = XEXP (cond, 0);
1705 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1706 {
1707 *earliest = jump;
1708
1709 if (reverse)
1710 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1711 GET_MODE (cond), tmp, XEXP (cond, 1));
1712 return cond;
1713 }
1714
1715 /* Otherwise, fall back on canonicalize_condition to do the dirty
1716 work of manipulating MODE_CC values and COMPARE rtx codes. */
1717
1718 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
1719 false);
1720 if (!tmp)
1721 return NULL_RTX;
1722
1723 /* We are going to insert code before JUMP, not before EARLIEST.
1724 We must therefore be certain that the given condition is valid
1725 at JUMP by virtue of not having been modified since. */
1726 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1727 if (INSN_P (insn) && modified_in_p (tmp, insn))
1728 break;
1729 if (insn == jump)
1730 return tmp;
1731
1732 /* The condition was modified. See if we can get a partial result
1733 that doesn't follow all the reversals. Perhaps combine can fold
1734 them together later. */
1735 tmp = XEXP (tmp, 0);
1736 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1737 return NULL_RTX;
1738 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp,
1739 false);
1740 if (!tmp)
1741 return NULL_RTX;
1742
1743 /* For sanity's sake, re-validate the new result. */
1744 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1745 if (INSN_P (insn) && modified_in_p (tmp, insn))
1746 return NULL_RTX;
1747
1748 return tmp;
1749 }
1750
1751 /* Return true if OP is ok for if-then-else processing. */
1752
1753 static int
1754 noce_operand_ok (rtx op)
1755 {
1756 /* We special-case memories, so handle any of them with
1757 no address side effects. */
1758 if (GET_CODE (op) == MEM)
1759 return ! side_effects_p (XEXP (op, 0));
1760
1761 if (side_effects_p (op))
1762 return FALSE;
1763
1764 return ! may_trap_p (op);
1765 }
1766
1767 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1768 without using conditional execution. Return TRUE if we were
1769 successful at converting the block. */
1770
1771 static int
1772 noce_process_if_block (struct ce_if_block * ce_info)
1773 {
1774 basic_block test_bb = ce_info->test_bb; /* test block */
1775 basic_block then_bb = ce_info->then_bb; /* THEN */
1776 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1777 struct noce_if_info if_info;
1778 rtx insn_a, insn_b;
1779 rtx set_a, set_b;
1780 rtx orig_x, x, a, b;
1781 rtx jump, cond;
1782
1783 /* We're looking for patterns of the form
1784
1785 (1) if (...) x = a; else x = b;
1786 (2) x = b; if (...) x = a;
1787 (3) if (...) x = a; // as if with an initial x = x.
1788
1789 The later patterns require jumps to be more expensive.
1790
1791 ??? For future expansion, look for multiple X in such patterns. */
1792
1793 /* If test is comprised of && or || elements, don't handle it unless it is
1794 the special case of && elements without an ELSE block. */
1795 if (ce_info->num_multiple_test_blocks)
1796 {
1797 if (else_bb || ! ce_info->and_and_p)
1798 return FALSE;
1799
1800 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1801 ce_info->num_multiple_test_blocks = 0;
1802 ce_info->num_and_and_blocks = 0;
1803 ce_info->num_or_or_blocks = 0;
1804 }
1805
1806 /* If this is not a standard conditional jump, we can't parse it. */
1807 jump = test_bb->end;
1808 cond = noce_get_condition (jump, &if_info.cond_earliest);
1809 if (! cond)
1810 return FALSE;
1811
1812 /* If the conditional jump is more than just a conditional
1813 jump, then we can not do if-conversion on this block. */
1814 if (! onlyjump_p (jump))
1815 return FALSE;
1816
1817 /* We must be comparing objects whose modes imply the size. */
1818 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1819 return FALSE;
1820
1821 /* Look for one of the potential sets. */
1822 insn_a = first_active_insn (then_bb);
1823 if (! insn_a
1824 || insn_a != last_active_insn (then_bb, FALSE)
1825 || (set_a = single_set (insn_a)) == NULL_RTX)
1826 return FALSE;
1827
1828 x = SET_DEST (set_a);
1829 a = SET_SRC (set_a);
1830
1831 /* Look for the other potential set. Make sure we've got equivalent
1832 destinations. */
1833 /* ??? This is overconservative. Storing to two different mems is
1834 as easy as conditionally computing the address. Storing to a
1835 single mem merely requires a scratch memory to use as one of the
1836 destination addresses; often the memory immediately below the
1837 stack pointer is available for this. */
1838 set_b = NULL_RTX;
1839 if (else_bb)
1840 {
1841 insn_b = first_active_insn (else_bb);
1842 if (! insn_b
1843 || insn_b != last_active_insn (else_bb, FALSE)
1844 || (set_b = single_set (insn_b)) == NULL_RTX
1845 || ! rtx_equal_p (x, SET_DEST (set_b)))
1846 return FALSE;
1847 }
1848 else
1849 {
1850 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1851 /* We're going to be moving the evaluation of B down from above
1852 COND_EARLIEST to JUMP. Make sure the relevant data is still
1853 intact. */
1854 if (! insn_b
1855 || GET_CODE (insn_b) != INSN
1856 || (set_b = single_set (insn_b)) == NULL_RTX
1857 || ! rtx_equal_p (x, SET_DEST (set_b))
1858 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1859 || modified_between_p (SET_SRC (set_b),
1860 PREV_INSN (if_info.cond_earliest), jump)
1861 /* Likewise with X. In particular this can happen when
1862 noce_get_condition looks farther back in the instruction
1863 stream than one might expect. */
1864 || reg_overlap_mentioned_p (x, cond)
1865 || reg_overlap_mentioned_p (x, a)
1866 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1867 insn_b = set_b = NULL_RTX;
1868 }
1869
1870 /* If x has side effects then only the if-then-else form is safe to
1871 convert. But even in that case we would need to restore any notes
1872 (such as REG_INC) at then end. That can be tricky if
1873 noce_emit_move_insn expands to more than one insn, so disable the
1874 optimization entirely for now if there are side effects. */
1875 if (side_effects_p (x))
1876 return FALSE;
1877
1878 b = (set_b ? SET_SRC (set_b) : x);
1879
1880 /* Only operate on register destinations, and even then avoid extending
1881 the lifetime of hard registers on small register class machines. */
1882 orig_x = x;
1883 if (GET_CODE (x) != REG
1884 || (SMALL_REGISTER_CLASSES
1885 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1886 {
1887 if (no_new_pseudos || GET_MODE (x) == BLKmode)
1888 return FALSE;
1889 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1890 ? XEXP (x, 0) : x));
1891 }
1892
1893 /* Don't operate on sources that may trap or are volatile. */
1894 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1895 return FALSE;
1896
1897 /* Set up the info block for our subroutines. */
1898 if_info.test_bb = test_bb;
1899 if_info.cond = cond;
1900 if_info.jump = jump;
1901 if_info.insn_a = insn_a;
1902 if_info.insn_b = insn_b;
1903 if_info.x = x;
1904 if_info.a = a;
1905 if_info.b = b;
1906
1907 /* Try optimizations in some approximation of a useful order. */
1908 /* ??? Should first look to see if X is live incoming at all. If it
1909 isn't, we don't need anything but an unconditional set. */
1910
1911 /* Look and see if A and B are really the same. Avoid creating silly
1912 cmove constructs that no one will fix up later. */
1913 if (rtx_equal_p (a, b))
1914 {
1915 /* If we have an INSN_B, we don't have to create any new rtl. Just
1916 move the instruction that we already have. If we don't have an
1917 INSN_B, that means that A == X, and we've got a noop move. In
1918 that case don't do anything and let the code below delete INSN_A. */
1919 if (insn_b && else_bb)
1920 {
1921 rtx note;
1922
1923 if (else_bb && insn_b == else_bb->end)
1924 else_bb->end = PREV_INSN (insn_b);
1925 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
1926
1927 /* If there was a REG_EQUAL note, delete it since it may have been
1928 true due to this insn being after a jump. */
1929 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1930 remove_note (insn_b, note);
1931
1932 insn_b = NULL_RTX;
1933 }
1934 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1935 x must be executed twice. */
1936 else if (insn_b && side_effects_p (orig_x))
1937 return FALSE;
1938
1939 x = orig_x;
1940 goto success;
1941 }
1942
1943 if (noce_try_move (&if_info))
1944 goto success;
1945 if (noce_try_store_flag (&if_info))
1946 goto success;
1947 if (noce_try_minmax (&if_info))
1948 goto success;
1949 if (noce_try_abs (&if_info))
1950 goto success;
1951 if (HAVE_conditional_move
1952 && noce_try_cmove (&if_info))
1953 goto success;
1954 if (! HAVE_conditional_execution)
1955 {
1956 if (noce_try_store_flag_constants (&if_info))
1957 goto success;
1958 if (noce_try_addcc (&if_info))
1959 goto success;
1960 if (noce_try_store_flag_mask (&if_info))
1961 goto success;
1962 if (HAVE_conditional_move
1963 && noce_try_cmove_arith (&if_info))
1964 goto success;
1965 }
1966
1967 return FALSE;
1968
1969 success:
1970 /* The original sets may now be killed. */
1971 delete_insn (insn_a);
1972
1973 /* Several special cases here: First, we may have reused insn_b above,
1974 in which case insn_b is now NULL. Second, we want to delete insn_b
1975 if it came from the ELSE block, because follows the now correct
1976 write that appears in the TEST block. However, if we got insn_b from
1977 the TEST block, it may in fact be loading data needed for the comparison.
1978 We'll let life_analysis remove the insn if it's really dead. */
1979 if (insn_b && else_bb)
1980 delete_insn (insn_b);
1981
1982 /* The new insns will have been inserted immediately before the jump. We
1983 should be able to remove the jump with impunity, but the condition itself
1984 may have been modified by gcse to be shared across basic blocks. */
1985 delete_insn (jump);
1986
1987 /* If we used a temporary, fix it up now. */
1988 if (orig_x != x)
1989 {
1990 start_sequence ();
1991 noce_emit_move_insn (copy_rtx (orig_x), x);
1992 insn_b = get_insns ();
1993 end_sequence ();
1994
1995 emit_insn_after_setloc (insn_b, test_bb->end, INSN_LOCATOR (insn_a));
1996 }
1997
1998 /* Merge the blocks! */
1999 merge_if_block (ce_info);
2000
2001 return TRUE;
2002 }
2003 \f
2004 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2005 straight line code. Return true if successful. */
2006
2007 static int
2008 process_if_block (struct ce_if_block * ce_info)
2009 {
2010 if (! reload_completed
2011 && noce_process_if_block (ce_info))
2012 return TRUE;
2013
2014 if (HAVE_conditional_execution && reload_completed)
2015 {
2016 /* If we have && and || tests, try to first handle combining the && and
2017 || tests into the conditional code, and if that fails, go back and
2018 handle it without the && and ||, which at present handles the && case
2019 if there was no ELSE block. */
2020 if (cond_exec_process_if_block (ce_info, TRUE))
2021 return TRUE;
2022
2023 if (ce_info->num_multiple_test_blocks)
2024 {
2025 cancel_changes (0);
2026
2027 if (cond_exec_process_if_block (ce_info, FALSE))
2028 return TRUE;
2029 }
2030 }
2031
2032 return FALSE;
2033 }
2034
2035 /* Merge the blocks and mark for local life update. */
2036
2037 static void
2038 merge_if_block (struct ce_if_block * ce_info)
2039 {
2040 basic_block test_bb = ce_info->test_bb; /* last test block */
2041 basic_block then_bb = ce_info->then_bb; /* THEN */
2042 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
2043 basic_block join_bb = ce_info->join_bb; /* join block */
2044 basic_block combo_bb;
2045
2046 /* All block merging is done into the lower block numbers. */
2047
2048 combo_bb = test_bb;
2049
2050 /* Merge any basic blocks to handle && and || subtests. Each of
2051 the blocks are on the fallthru path from the predecessor block. */
2052 if (ce_info->num_multiple_test_blocks > 0)
2053 {
2054 basic_block bb = test_bb;
2055 basic_block last_test_bb = ce_info->last_test_bb;
2056 basic_block fallthru = block_fallthru (bb);
2057
2058 do
2059 {
2060 bb = fallthru;
2061 fallthru = block_fallthru (bb);
2062 if (post_dominators)
2063 delete_from_dominance_info (post_dominators, bb);
2064 merge_blocks (combo_bb, bb);
2065 num_true_changes++;
2066 }
2067 while (bb != last_test_bb);
2068 }
2069
2070 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2071 label, but it might if there were || tests. That label's count should be
2072 zero, and it normally should be removed. */
2073
2074 if (then_bb)
2075 {
2076 if (combo_bb->global_live_at_end)
2077 COPY_REG_SET (combo_bb->global_live_at_end,
2078 then_bb->global_live_at_end);
2079 if (post_dominators)
2080 delete_from_dominance_info (post_dominators, then_bb);
2081 merge_blocks (combo_bb, then_bb);
2082 num_true_changes++;
2083 }
2084
2085 /* The ELSE block, if it existed, had a label. That label count
2086 will almost always be zero, but odd things can happen when labels
2087 get their addresses taken. */
2088 if (else_bb)
2089 {
2090 if (post_dominators)
2091 delete_from_dominance_info (post_dominators, else_bb);
2092 merge_blocks (combo_bb, else_bb);
2093 num_true_changes++;
2094 }
2095
2096 /* If there was no join block reported, that means it was not adjacent
2097 to the others, and so we cannot merge them. */
2098
2099 if (! join_bb)
2100 {
2101 rtx last = combo_bb->end;
2102
2103 /* The outgoing edge for the current COMBO block should already
2104 be correct. Verify this. */
2105 if (combo_bb->succ == NULL_EDGE)
2106 {
2107 if (find_reg_note (last, REG_NORETURN, NULL))
2108 ;
2109 else if (GET_CODE (last) == INSN
2110 && GET_CODE (PATTERN (last)) == TRAP_IF
2111 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2112 ;
2113 else
2114 abort ();
2115 }
2116
2117 /* There should still be something at the end of the THEN or ELSE
2118 blocks taking us to our final destination. */
2119 else if (GET_CODE (last) == JUMP_INSN)
2120 ;
2121 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2122 && GET_CODE (last) == CALL_INSN
2123 && SIBLING_CALL_P (last))
2124 ;
2125 else if ((combo_bb->succ->flags & EDGE_EH)
2126 && can_throw_internal (last))
2127 ;
2128 else
2129 abort ();
2130 }
2131
2132 /* The JOIN block may have had quite a number of other predecessors too.
2133 Since we've already merged the TEST, THEN and ELSE blocks, we should
2134 have only one remaining edge from our if-then-else diamond. If there
2135 is more than one remaining edge, it must come from elsewhere. There
2136 may be zero incoming edges if the THEN block didn't actually join
2137 back up (as with a call to abort). */
2138 else if ((join_bb->pred == NULL
2139 || join_bb->pred->pred_next == NULL)
2140 && join_bb != EXIT_BLOCK_PTR)
2141 {
2142 /* We can merge the JOIN. */
2143 if (combo_bb->global_live_at_end)
2144 COPY_REG_SET (combo_bb->global_live_at_end,
2145 join_bb->global_live_at_end);
2146
2147 if (post_dominators)
2148 delete_from_dominance_info (post_dominators, join_bb);
2149 merge_blocks (combo_bb, join_bb);
2150 num_true_changes++;
2151 }
2152 else
2153 {
2154 /* We cannot merge the JOIN. */
2155
2156 /* The outgoing edge for the current COMBO block should already
2157 be correct. Verify this. */
2158 if (combo_bb->succ->succ_next != NULL_EDGE
2159 || combo_bb->succ->dest != join_bb)
2160 abort ();
2161
2162 /* Remove the jump and cruft from the end of the COMBO block. */
2163 if (join_bb != EXIT_BLOCK_PTR)
2164 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2165 }
2166
2167 num_updated_if_blocks++;
2168 }
2169 \f
2170 /* Find a block ending in a simple IF condition and try to transform it
2171 in some way. When converting a multi-block condition, put the new code
2172 in the first such block and delete the rest. Return a pointer to this
2173 first block if some transformation was done. Return NULL otherwise. */
2174
2175 static basic_block
2176 find_if_header (basic_block test_bb, int pass)
2177 {
2178 ce_if_block_t ce_info;
2179 edge then_edge;
2180 edge else_edge;
2181
2182 /* The kind of block we're looking for has exactly two successors. */
2183 if ((then_edge = test_bb->succ) == NULL_EDGE
2184 || (else_edge = then_edge->succ_next) == NULL_EDGE
2185 || else_edge->succ_next != NULL_EDGE)
2186 return NULL;
2187
2188 /* Neither edge should be abnormal. */
2189 if ((then_edge->flags & EDGE_COMPLEX)
2190 || (else_edge->flags & EDGE_COMPLEX))
2191 return NULL;
2192
2193 /* Nor exit the loop. */
2194 if ((then_edge->flags & EDGE_LOOP_EXIT)
2195 || (else_edge->flags & EDGE_LOOP_EXIT))
2196 return NULL;
2197
2198 /* The THEN edge is canonically the one that falls through. */
2199 if (then_edge->flags & EDGE_FALLTHRU)
2200 ;
2201 else if (else_edge->flags & EDGE_FALLTHRU)
2202 {
2203 edge e = else_edge;
2204 else_edge = then_edge;
2205 then_edge = e;
2206 }
2207 else
2208 /* Otherwise this must be a multiway branch of some sort. */
2209 return NULL;
2210
2211 memset (&ce_info, '\0', sizeof (ce_info));
2212 ce_info.test_bb = test_bb;
2213 ce_info.then_bb = then_edge->dest;
2214 ce_info.else_bb = else_edge->dest;
2215 ce_info.pass = pass;
2216
2217 #ifdef IFCVT_INIT_EXTRA_FIELDS
2218 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2219 #endif
2220
2221 if (find_if_block (&ce_info))
2222 goto success;
2223
2224 if (HAVE_trap && HAVE_conditional_trap
2225 && find_cond_trap (test_bb, then_edge, else_edge))
2226 goto success;
2227
2228 if (post_dominators
2229 && (! HAVE_conditional_execution || reload_completed))
2230 {
2231 if (find_if_case_1 (test_bb, then_edge, else_edge))
2232 goto success;
2233 if (find_if_case_2 (test_bb, then_edge, else_edge))
2234 goto success;
2235 }
2236
2237 return NULL;
2238
2239 success:
2240 if (rtl_dump_file)
2241 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2242 return ce_info.test_bb;
2243 }
2244
2245 /* Return true if a block has two edges, one of which falls through to the next
2246 block, and the other jumps to a specific block, so that we can tell if the
2247 block is part of an && test or an || test. Returns either -1 or the number
2248 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2249
2250 static int
2251 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2252 {
2253 edge cur_edge;
2254 int fallthru_p = FALSE;
2255 int jump_p = FALSE;
2256 rtx insn;
2257 rtx end;
2258 int n_insns = 0;
2259
2260 if (!cur_bb || !target_bb)
2261 return -1;
2262
2263 /* If no edges, obviously it doesn't jump or fallthru. */
2264 if (cur_bb->succ == NULL_EDGE)
2265 return FALSE;
2266
2267 for (cur_edge = cur_bb->succ;
2268 cur_edge != NULL_EDGE;
2269 cur_edge = cur_edge->succ_next)
2270 {
2271 if (cur_edge->flags & EDGE_COMPLEX)
2272 /* Anything complex isn't what we want. */
2273 return -1;
2274
2275 else if (cur_edge->flags & EDGE_FALLTHRU)
2276 fallthru_p = TRUE;
2277
2278 else if (cur_edge->dest == target_bb)
2279 jump_p = TRUE;
2280
2281 else
2282 return -1;
2283 }
2284
2285 if ((jump_p & fallthru_p) == 0)
2286 return -1;
2287
2288 /* Don't allow calls in the block, since this is used to group && and ||
2289 together for conditional execution support. ??? we should support
2290 conditional execution support across calls for IA-64 some day, but
2291 for now it makes the code simpler. */
2292 end = cur_bb->end;
2293 insn = cur_bb->head;
2294
2295 while (insn != NULL_RTX)
2296 {
2297 if (GET_CODE (insn) == CALL_INSN)
2298 return -1;
2299
2300 if (INSN_P (insn)
2301 && GET_CODE (insn) != JUMP_INSN
2302 && GET_CODE (PATTERN (insn)) != USE
2303 && GET_CODE (PATTERN (insn)) != CLOBBER)
2304 n_insns++;
2305
2306 if (insn == end)
2307 break;
2308
2309 insn = NEXT_INSN (insn);
2310 }
2311
2312 return n_insns;
2313 }
2314
2315 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2316 block. If so, we'll try to convert the insns to not require the branch.
2317 Return TRUE if we were successful at converting the block. */
2318
2319 static int
2320 find_if_block (struct ce_if_block * ce_info)
2321 {
2322 basic_block test_bb = ce_info->test_bb;
2323 basic_block then_bb = ce_info->then_bb;
2324 basic_block else_bb = ce_info->else_bb;
2325 basic_block join_bb = NULL_BLOCK;
2326 edge then_succ = then_bb->succ;
2327 edge else_succ = else_bb->succ;
2328 int then_predecessors;
2329 int else_predecessors;
2330 edge cur_edge;
2331 basic_block next;
2332
2333 ce_info->last_test_bb = test_bb;
2334
2335 /* Discover if any fall through predecessors of the current test basic block
2336 were && tests (which jump to the else block) or || tests (which jump to
2337 the then block). */
2338 if (HAVE_conditional_execution && reload_completed
2339 && test_bb->pred != NULL_EDGE
2340 && test_bb->pred->pred_next == NULL_EDGE
2341 && test_bb->pred->flags == EDGE_FALLTHRU)
2342 {
2343 basic_block bb = test_bb->pred->src;
2344 basic_block target_bb;
2345 int max_insns = MAX_CONDITIONAL_EXECUTE;
2346 int n_insns;
2347
2348 /* Determine if the preceding block is an && or || block. */
2349 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2350 {
2351 ce_info->and_and_p = TRUE;
2352 target_bb = else_bb;
2353 }
2354 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2355 {
2356 ce_info->and_and_p = FALSE;
2357 target_bb = then_bb;
2358 }
2359 else
2360 target_bb = NULL_BLOCK;
2361
2362 if (target_bb && n_insns <= max_insns)
2363 {
2364 int total_insns = 0;
2365 int blocks = 0;
2366
2367 ce_info->last_test_bb = test_bb;
2368
2369 /* Found at least one && or || block, look for more. */
2370 do
2371 {
2372 ce_info->test_bb = test_bb = bb;
2373 total_insns += n_insns;
2374 blocks++;
2375
2376 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2377 break;
2378
2379 bb = bb->pred->src;
2380 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2381 }
2382 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2383
2384 ce_info->num_multiple_test_blocks = blocks;
2385 ce_info->num_multiple_test_insns = total_insns;
2386
2387 if (ce_info->and_and_p)
2388 ce_info->num_and_and_blocks = blocks;
2389 else
2390 ce_info->num_or_or_blocks = blocks;
2391 }
2392 }
2393
2394 /* Count the number of edges the THEN and ELSE blocks have. */
2395 then_predecessors = 0;
2396 for (cur_edge = then_bb->pred;
2397 cur_edge != NULL_EDGE;
2398 cur_edge = cur_edge->pred_next)
2399 {
2400 then_predecessors++;
2401 if (cur_edge->flags & EDGE_COMPLEX)
2402 return FALSE;
2403 }
2404
2405 else_predecessors = 0;
2406 for (cur_edge = else_bb->pred;
2407 cur_edge != NULL_EDGE;
2408 cur_edge = cur_edge->pred_next)
2409 {
2410 else_predecessors++;
2411 if (cur_edge->flags & EDGE_COMPLEX)
2412 return FALSE;
2413 }
2414
2415 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2416 other than any || blocks which jump to the THEN block. */
2417 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2418 return FALSE;
2419
2420 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2421 if (then_succ != NULL_EDGE
2422 && (then_succ->succ_next != NULL_EDGE
2423 || (then_succ->flags & EDGE_COMPLEX)
2424 || (flow2_completed && tablejump_p (then_bb->end, NULL, NULL))))
2425 return FALSE;
2426
2427 /* If the THEN block has no successors, conditional execution can still
2428 make a conditional call. Don't do this unless the ELSE block has
2429 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2430 Check for the last insn of the THEN block being an indirect jump, which
2431 is listed as not having any successors, but confuses the rest of the CE
2432 code processing. ??? we should fix this in the future. */
2433 if (then_succ == NULL)
2434 {
2435 if (else_bb->pred->pred_next == NULL_EDGE)
2436 {
2437 rtx last_insn = then_bb->end;
2438
2439 while (last_insn
2440 && GET_CODE (last_insn) == NOTE
2441 && last_insn != then_bb->head)
2442 last_insn = PREV_INSN (last_insn);
2443
2444 if (last_insn
2445 && GET_CODE (last_insn) == JUMP_INSN
2446 && ! simplejump_p (last_insn))
2447 return FALSE;
2448
2449 join_bb = else_bb;
2450 else_bb = NULL_BLOCK;
2451 }
2452 else
2453 return FALSE;
2454 }
2455
2456 /* If the THEN block's successor is the other edge out of the TEST block,
2457 then we have an IF-THEN combo without an ELSE. */
2458 else if (then_succ->dest == else_bb)
2459 {
2460 join_bb = else_bb;
2461 else_bb = NULL_BLOCK;
2462 }
2463
2464 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2465 has exactly one predecessor and one successor, and the outgoing edge
2466 is not complex, then we have an IF-THEN-ELSE combo. */
2467 else if (else_succ != NULL_EDGE
2468 && then_succ->dest == else_succ->dest
2469 && else_bb->pred->pred_next == NULL_EDGE
2470 && else_succ->succ_next == NULL_EDGE
2471 && ! (else_succ->flags & EDGE_COMPLEX)
2472 && ! (flow2_completed && tablejump_p (else_bb->end, NULL, NULL)))
2473 join_bb = else_succ->dest;
2474
2475 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2476 else
2477 return FALSE;
2478
2479 num_possible_if_blocks++;
2480
2481 if (rtl_dump_file)
2482 {
2483 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2484 (else_bb) ? "-ELSE" : "",
2485 ce_info->pass,
2486 test_bb->index, (test_bb->head) ? (int)INSN_UID (test_bb->head) : -1,
2487 then_bb->index, (then_bb->head) ? (int)INSN_UID (then_bb->head) : -1);
2488
2489 if (else_bb)
2490 fprintf (rtl_dump_file, ", else %d [%d]",
2491 else_bb->index, (else_bb->head) ? (int)INSN_UID (else_bb->head) : -1);
2492
2493 fprintf (rtl_dump_file, ", join %d [%d]",
2494 join_bb->index, (join_bb->head) ? (int)INSN_UID (join_bb->head) : -1);
2495
2496 if (ce_info->num_multiple_test_blocks > 0)
2497 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2498 ce_info->num_multiple_test_blocks,
2499 (ce_info->and_and_p) ? "&&" : "||",
2500 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2501 ce_info->last_test_bb->index,
2502 ((ce_info->last_test_bb->head)
2503 ? (int)INSN_UID (ce_info->last_test_bb->head)
2504 : -1));
2505
2506 fputc ('\n', rtl_dump_file);
2507 }
2508
2509 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2510 first condition for free, since we've already asserted that there's a
2511 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2512 we checked the FALLTHRU flag, those are already adjacent to the last IF
2513 block. */
2514 /* ??? As an enhancement, move the ELSE block. Have to deal with
2515 BLOCK notes, if by no other means than aborting the merge if they
2516 exist. Sticky enough I don't want to think about it now. */
2517 next = then_bb;
2518 if (else_bb && (next = next->next_bb) != else_bb)
2519 return FALSE;
2520 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2521 {
2522 if (else_bb)
2523 join_bb = NULL;
2524 else
2525 return FALSE;
2526 }
2527
2528 /* Do the real work. */
2529 ce_info->else_bb = else_bb;
2530 ce_info->join_bb = join_bb;
2531
2532 return process_if_block (ce_info);
2533 }
2534
2535 /* Convert a branch over a trap, or a branch
2536 to a trap, into a conditional trap. */
2537
2538 static int
2539 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2540 {
2541 basic_block then_bb = then_edge->dest;
2542 basic_block else_bb = else_edge->dest;
2543 basic_block other_bb, trap_bb;
2544 rtx trap, jump, cond, cond_earliest, seq;
2545 enum rtx_code code;
2546
2547 /* Locate the block with the trap instruction. */
2548 /* ??? While we look for no successors, we really ought to allow
2549 EH successors. Need to fix merge_if_block for that to work. */
2550 if ((trap = block_has_only_trap (then_bb)) != NULL)
2551 trap_bb = then_bb, other_bb = else_bb;
2552 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2553 trap_bb = else_bb, other_bb = then_bb;
2554 else
2555 return FALSE;
2556
2557 if (rtl_dump_file)
2558 {
2559 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2560 test_bb->index, trap_bb->index);
2561 }
2562
2563 /* If this is not a standard conditional jump, we can't parse it. */
2564 jump = test_bb->end;
2565 cond = noce_get_condition (jump, &cond_earliest);
2566 if (! cond)
2567 return FALSE;
2568
2569 /* If the conditional jump is more than just a conditional jump, then
2570 we can not do if-conversion on this block. */
2571 if (! onlyjump_p (jump))
2572 return FALSE;
2573
2574 /* We must be comparing objects whose modes imply the size. */
2575 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2576 return FALSE;
2577
2578 /* Reverse the comparison code, if necessary. */
2579 code = GET_CODE (cond);
2580 if (then_bb == trap_bb)
2581 {
2582 code = reversed_comparison_code (cond, jump);
2583 if (code == UNKNOWN)
2584 return FALSE;
2585 }
2586
2587 /* Attempt to generate the conditional trap. */
2588 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2589 TRAP_CODE (PATTERN (trap)));
2590 if (seq == NULL)
2591 return FALSE;
2592
2593 num_true_changes++;
2594
2595 /* Emit the new insns before cond_earliest. */
2596 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2597
2598 /* Delete the trap block if possible. */
2599 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2600 if (trap_bb->pred == NULL)
2601 {
2602 if (post_dominators)
2603 delete_from_dominance_info (post_dominators, trap_bb);
2604 delete_block (trap_bb);
2605 }
2606
2607 /* If the non-trap block and the test are now adjacent, merge them.
2608 Otherwise we must insert a direct branch. */
2609 if (test_bb->next_bb == other_bb)
2610 {
2611 struct ce_if_block new_ce_info;
2612 delete_insn (jump);
2613 memset (&new_ce_info, '\0', sizeof (new_ce_info));
2614 new_ce_info.test_bb = test_bb;
2615 new_ce_info.then_bb = NULL;
2616 new_ce_info.else_bb = NULL;
2617 new_ce_info.join_bb = other_bb;
2618 merge_if_block (&new_ce_info);
2619 }
2620 else
2621 {
2622 rtx lab, newjump;
2623
2624 lab = JUMP_LABEL (jump);
2625 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2626 LABEL_NUSES (lab) += 1;
2627 JUMP_LABEL (newjump) = lab;
2628 emit_barrier_after (newjump);
2629
2630 delete_insn (jump);
2631 }
2632
2633 return TRUE;
2634 }
2635
2636 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2637 return it. */
2638
2639 static rtx
2640 block_has_only_trap (basic_block bb)
2641 {
2642 rtx trap;
2643
2644 /* We're not the exit block. */
2645 if (bb == EXIT_BLOCK_PTR)
2646 return NULL_RTX;
2647
2648 /* The block must have no successors. */
2649 if (bb->succ)
2650 return NULL_RTX;
2651
2652 /* The only instruction in the THEN block must be the trap. */
2653 trap = first_active_insn (bb);
2654 if (! (trap == bb->end
2655 && GET_CODE (PATTERN (trap)) == TRAP_IF
2656 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2657 return NULL_RTX;
2658
2659 return trap;
2660 }
2661
2662 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2663 transformable, but not necessarily the other. There need be no
2664 JOIN block.
2665
2666 Return TRUE if we were successful at converting the block.
2667
2668 Cases we'd like to look at:
2669
2670 (1)
2671 if (test) goto over; // x not live
2672 x = a;
2673 goto label;
2674 over:
2675
2676 becomes
2677
2678 x = a;
2679 if (! test) goto label;
2680
2681 (2)
2682 if (test) goto E; // x not live
2683 x = big();
2684 goto L;
2685 E:
2686 x = b;
2687 goto M;
2688
2689 becomes
2690
2691 x = b;
2692 if (test) goto M;
2693 x = big();
2694 goto L;
2695
2696 (3) // This one's really only interesting for targets that can do
2697 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2698 // it results in multiple branches on a cache line, which often
2699 // does not sit well with predictors.
2700
2701 if (test1) goto E; // predicted not taken
2702 x = a;
2703 if (test2) goto F;
2704 ...
2705 E:
2706 x = b;
2707 J:
2708
2709 becomes
2710
2711 x = a;
2712 if (test1) goto E;
2713 if (test2) goto F;
2714
2715 Notes:
2716
2717 (A) Don't do (2) if the branch is predicted against the block we're
2718 eliminating. Do it anyway if we can eliminate a branch; this requires
2719 that the sole successor of the eliminated block postdominate the other
2720 side of the if.
2721
2722 (B) With CE, on (3) we can steal from both sides of the if, creating
2723
2724 if (test1) x = a;
2725 if (!test1) x = b;
2726 if (test1) goto J;
2727 if (test2) goto F;
2728 ...
2729 J:
2730
2731 Again, this is most useful if J postdominates.
2732
2733 (C) CE substitutes for helpful life information.
2734
2735 (D) These heuristics need a lot of work. */
2736
2737 /* Tests for case 1 above. */
2738
2739 static int
2740 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
2741 {
2742 basic_block then_bb = then_edge->dest;
2743 basic_block else_bb = else_edge->dest, new_bb;
2744 edge then_succ = then_bb->succ;
2745 int then_bb_index;
2746
2747 /* THEN has one successor. */
2748 if (!then_succ || then_succ->succ_next != NULL)
2749 return FALSE;
2750
2751 /* THEN does not fall through, but is not strange either. */
2752 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2753 return FALSE;
2754
2755 /* THEN has one predecessor. */
2756 if (then_bb->pred->pred_next != NULL)
2757 return FALSE;
2758
2759 /* THEN must do something. */
2760 if (forwarder_block_p (then_bb))
2761 return FALSE;
2762
2763 num_possible_if_blocks++;
2764 if (rtl_dump_file)
2765 fprintf (rtl_dump_file,
2766 "\nIF-CASE-1 found, start %d, then %d\n",
2767 test_bb->index, then_bb->index);
2768
2769 /* THEN is small. */
2770 if (count_bb_insns (then_bb) > BRANCH_COST)
2771 return FALSE;
2772
2773 /* Registers set are dead, or are predicable. */
2774 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2775 then_bb->succ->dest, 1))
2776 return FALSE;
2777
2778 /* Conversion went ok, including moving the insns and fixing up the
2779 jump. Adjust the CFG to match. */
2780
2781 bitmap_operation (test_bb->global_live_at_end,
2782 else_bb->global_live_at_start,
2783 then_bb->global_live_at_end, BITMAP_IOR);
2784
2785 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2786 then_bb_index = then_bb->index;
2787 if (post_dominators)
2788 delete_from_dominance_info (post_dominators, then_bb);
2789 delete_block (then_bb);
2790
2791 /* Make rest of code believe that the newly created block is the THEN_BB
2792 block we removed. */
2793 if (new_bb)
2794 {
2795 new_bb->index = then_bb_index;
2796 BASIC_BLOCK (then_bb_index) = new_bb;
2797 if (post_dominators)
2798 add_to_dominance_info (post_dominators, new_bb);
2799 }
2800 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2801 later. */
2802
2803 num_true_changes++;
2804 num_updated_if_blocks++;
2805
2806 return TRUE;
2807 }
2808
2809 /* Test for case 2 above. */
2810
2811 static int
2812 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
2813 {
2814 basic_block then_bb = then_edge->dest;
2815 basic_block else_bb = else_edge->dest;
2816 edge else_succ = else_bb->succ;
2817 rtx note;
2818
2819 /* ELSE has one successor. */
2820 if (!else_succ || else_succ->succ_next != NULL)
2821 return FALSE;
2822
2823 /* ELSE outgoing edge is not complex. */
2824 if (else_succ->flags & EDGE_COMPLEX)
2825 return FALSE;
2826
2827 /* ELSE has one predecessor. */
2828 if (else_bb->pred->pred_next != NULL)
2829 return FALSE;
2830
2831 /* THEN is not EXIT. */
2832 if (then_bb->index < 0)
2833 return FALSE;
2834
2835 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2836 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2837 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2838 ;
2839 else if (else_succ->dest->index < 0
2840 || dominated_by_p (post_dominators, then_bb,
2841 else_succ->dest))
2842 ;
2843 else
2844 return FALSE;
2845
2846 num_possible_if_blocks++;
2847 if (rtl_dump_file)
2848 fprintf (rtl_dump_file,
2849 "\nIF-CASE-2 found, start %d, else %d\n",
2850 test_bb->index, else_bb->index);
2851
2852 /* ELSE is small. */
2853 if (count_bb_insns (else_bb) > BRANCH_COST)
2854 return FALSE;
2855
2856 /* Registers set are dead, or are predicable. */
2857 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2858 return FALSE;
2859
2860 /* Conversion went ok, including moving the insns and fixing up the
2861 jump. Adjust the CFG to match. */
2862
2863 bitmap_operation (test_bb->global_live_at_end,
2864 then_bb->global_live_at_start,
2865 else_bb->global_live_at_end, BITMAP_IOR);
2866
2867 if (post_dominators)
2868 delete_from_dominance_info (post_dominators, else_bb);
2869 delete_block (else_bb);
2870
2871 num_true_changes++;
2872 num_updated_if_blocks++;
2873
2874 /* ??? We may now fallthru from one of THEN's successors into a join
2875 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2876
2877 return TRUE;
2878 }
2879
2880 /* A subroutine of dead_or_predicable called through for_each_rtx.
2881 Return 1 if a memory is found. */
2882
2883 static int
2884 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
2885 {
2886 return GET_CODE (*px) == MEM;
2887 }
2888
2889 /* Used by the code above to perform the actual rtl transformations.
2890 Return TRUE if successful.
2891
2892 TEST_BB is the block containing the conditional branch. MERGE_BB
2893 is the block containing the code to manipulate. NEW_DEST is the
2894 label TEST_BB should be branching to after the conversion.
2895 REVERSEP is true if the sense of the branch should be reversed. */
2896
2897 static int
2898 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
2899 basic_block other_bb, basic_block new_dest, int reversep)
2900 {
2901 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2902
2903 jump = test_bb->end;
2904
2905 /* Find the extent of the real code in the merge block. */
2906 head = merge_bb->head;
2907 end = merge_bb->end;
2908
2909 if (GET_CODE (head) == CODE_LABEL)
2910 head = NEXT_INSN (head);
2911 if (GET_CODE (head) == NOTE)
2912 {
2913 if (head == end)
2914 {
2915 head = end = NULL_RTX;
2916 goto no_body;
2917 }
2918 head = NEXT_INSN (head);
2919 }
2920
2921 if (GET_CODE (end) == JUMP_INSN)
2922 {
2923 if (head == end)
2924 {
2925 head = end = NULL_RTX;
2926 goto no_body;
2927 }
2928 end = PREV_INSN (end);
2929 }
2930
2931 /* Disable handling dead code by conditional execution if the machine needs
2932 to do anything funny with the tests, etc. */
2933 #ifndef IFCVT_MODIFY_TESTS
2934 if (HAVE_conditional_execution)
2935 {
2936 /* In the conditional execution case, we have things easy. We know
2937 the condition is reversible. We don't have to check life info,
2938 becase we're going to conditionally execute the code anyway.
2939 All that's left is making sure the insns involved can actually
2940 be predicated. */
2941
2942 rtx cond, prob_val;
2943
2944 cond = cond_exec_get_condition (jump);
2945 if (! cond)
2946 return FALSE;
2947
2948 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2949 if (prob_val)
2950 prob_val = XEXP (prob_val, 0);
2951
2952 if (reversep)
2953 {
2954 enum rtx_code rev = reversed_comparison_code (cond, jump);
2955 if (rev == UNKNOWN)
2956 return FALSE;
2957 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2958 XEXP (cond, 1));
2959 if (prob_val)
2960 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2961 }
2962
2963 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
2964 prob_val, 0))
2965 goto cancel;
2966
2967 earliest = jump;
2968 }
2969 else
2970 #endif
2971 {
2972 /* In the non-conditional execution case, we have to verify that there
2973 are no trapping operations, no calls, no references to memory, and
2974 that any registers modified are dead at the branch site. */
2975
2976 rtx insn, cond, prev;
2977 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2978 regset merge_set, tmp, test_live, test_set;
2979 struct propagate_block_info *pbi;
2980 int i, fail = 0;
2981
2982 /* Check for no calls or trapping operations. */
2983 for (insn = head; ; insn = NEXT_INSN (insn))
2984 {
2985 if (GET_CODE (insn) == CALL_INSN)
2986 return FALSE;
2987 if (INSN_P (insn))
2988 {
2989 if (may_trap_p (PATTERN (insn)))
2990 return FALSE;
2991
2992 /* ??? Even non-trapping memories such as stack frame
2993 references must be avoided. For stores, we collect
2994 no lifetime info; for reads, we'd have to assert
2995 true_dependence false against every store in the
2996 TEST range. */
2997 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2998 return FALSE;
2999 }
3000 if (insn == end)
3001 break;
3002 }
3003
3004 if (! any_condjump_p (jump))
3005 return FALSE;
3006
3007 /* Find the extent of the conditional. */
3008 cond = noce_get_condition (jump, &earliest);
3009 if (! cond)
3010 return FALSE;
3011
3012 /* Collect:
3013 MERGE_SET = set of registers set in MERGE_BB
3014 TEST_LIVE = set of registers live at EARLIEST
3015 TEST_SET = set of registers set between EARLIEST and the
3016 end of the block. */
3017
3018 tmp = INITIALIZE_REG_SET (tmp_head);
3019 merge_set = INITIALIZE_REG_SET (merge_set_head);
3020 test_live = INITIALIZE_REG_SET (test_live_head);
3021 test_set = INITIALIZE_REG_SET (test_set_head);
3022
3023 /* ??? bb->local_set is only valid during calculate_global_regs_live,
3024 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
3025 since we've already asserted that MERGE_BB is small. */
3026 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
3027
3028 /* For small register class machines, don't lengthen lifetimes of
3029 hard registers before reload. */
3030 if (SMALL_REGISTER_CLASSES && ! reload_completed)
3031 {
3032 EXECUTE_IF_SET_IN_BITMAP
3033 (merge_set, 0, i,
3034 {
3035 if (i < FIRST_PSEUDO_REGISTER
3036 && ! fixed_regs[i]
3037 && ! global_regs[i])
3038 fail = 1;
3039 });
3040 }
3041
3042 /* For TEST, we're interested in a range of insns, not a whole block.
3043 Moreover, we're interested in the insns live from OTHER_BB. */
3044
3045 COPY_REG_SET (test_live, other_bb->global_live_at_start);
3046 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3047 0);
3048
3049 for (insn = jump; ; insn = prev)
3050 {
3051 prev = propagate_one_insn (pbi, insn);
3052 if (insn == earliest)
3053 break;
3054 }
3055
3056 free_propagate_block_info (pbi);
3057
3058 /* We can perform the transformation if
3059 MERGE_SET & (TEST_SET | TEST_LIVE)
3060 and
3061 TEST_SET & merge_bb->global_live_at_start
3062 are empty. */
3063
3064 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3065 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3066 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3067
3068 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3069 BITMAP_AND);
3070 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3071
3072 FREE_REG_SET (tmp);
3073 FREE_REG_SET (merge_set);
3074 FREE_REG_SET (test_live);
3075 FREE_REG_SET (test_set);
3076
3077 if (fail)
3078 return FALSE;
3079 }
3080
3081 no_body:
3082 /* We don't want to use normal invert_jump or redirect_jump because
3083 we don't want to delete_insn called. Also, we want to do our own
3084 change group management. */
3085
3086 old_dest = JUMP_LABEL (jump);
3087 if (other_bb != new_dest)
3088 {
3089 new_label = block_label (new_dest);
3090 if (reversep
3091 ? ! invert_jump_1 (jump, new_label)
3092 : ! redirect_jump_1 (jump, new_label))
3093 goto cancel;
3094 }
3095
3096 if (! apply_change_group ())
3097 return FALSE;
3098
3099 if (other_bb != new_dest)
3100 {
3101 if (old_dest)
3102 LABEL_NUSES (old_dest) -= 1;
3103 if (new_label)
3104 LABEL_NUSES (new_label) += 1;
3105 JUMP_LABEL (jump) = new_label;
3106 if (reversep)
3107 invert_br_probabilities (jump);
3108
3109 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3110 if (reversep)
3111 {
3112 gcov_type count, probability;
3113 count = BRANCH_EDGE (test_bb)->count;
3114 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3115 FALLTHRU_EDGE (test_bb)->count = count;
3116 probability = BRANCH_EDGE (test_bb)->probability;
3117 BRANCH_EDGE (test_bb)->probability
3118 = FALLTHRU_EDGE (test_bb)->probability;
3119 FALLTHRU_EDGE (test_bb)->probability = probability;
3120 update_br_prob_note (test_bb);
3121 }
3122 }
3123
3124 /* Move the insns out of MERGE_BB to before the branch. */
3125 if (head != NULL)
3126 {
3127 if (end == merge_bb->end)
3128 merge_bb->end = PREV_INSN (head);
3129
3130 if (squeeze_notes (&head, &end))
3131 return TRUE;
3132
3133 reorder_insns (head, end, PREV_INSN (earliest));
3134 }
3135
3136 /* Remove the jump and edge if we can. */
3137 if (other_bb == new_dest)
3138 {
3139 delete_insn (jump);
3140 remove_edge (BRANCH_EDGE (test_bb));
3141 /* ??? Can't merge blocks here, as then_bb is still in use.
3142 At minimum, the merge will get done just before bb-reorder. */
3143 }
3144
3145 return TRUE;
3146
3147 cancel:
3148 cancel_changes (0);
3149 return FALSE;
3150 }
3151 \f
3152 /* Main entry point for all if-conversion. */
3153
3154 void
3155 if_convert (int x_life_data_ok)
3156 {
3157 basic_block bb;
3158 int pass;
3159
3160 num_possible_if_blocks = 0;
3161 num_updated_if_blocks = 0;
3162 num_true_changes = 0;
3163 life_data_ok = (x_life_data_ok != 0);
3164
3165 if (! (* targetm.cannot_modify_jumps_p) ())
3166 mark_loop_exit_edges ();
3167
3168 /* Free up basic_block_for_insn so that we don't have to keep it
3169 up to date, either here or in merge_blocks. */
3170 free_basic_block_vars (1);
3171
3172 /* Compute postdominators if we think we'll use them. */
3173 post_dominators = NULL;
3174 if (HAVE_conditional_execution || life_data_ok)
3175 {
3176 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
3177 }
3178 if (life_data_ok)
3179 clear_bb_flags ();
3180
3181 /* Go through each of the basic blocks looking for things to convert. If we
3182 have conditional execution, we make multiple passes to allow us to handle
3183 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3184 pass = 0;
3185 do
3186 {
3187 cond_exec_changed_p = FALSE;
3188 pass++;
3189
3190 #ifdef IFCVT_MULTIPLE_DUMPS
3191 if (rtl_dump_file && pass > 1)
3192 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3193 #endif
3194
3195 FOR_EACH_BB (bb)
3196 {
3197 basic_block new_bb;
3198 while ((new_bb = find_if_header (bb, pass)))
3199 bb = new_bb;
3200 }
3201
3202 #ifdef IFCVT_MULTIPLE_DUMPS
3203 if (rtl_dump_file && cond_exec_changed_p)
3204 print_rtl_with_bb (rtl_dump_file, get_insns ());
3205 #endif
3206 }
3207 while (cond_exec_changed_p);
3208
3209 #ifdef IFCVT_MULTIPLE_DUMPS
3210 if (rtl_dump_file)
3211 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3212 #endif
3213
3214 if (post_dominators)
3215 free_dominance_info (post_dominators);
3216
3217 if (rtl_dump_file)
3218 fflush (rtl_dump_file);
3219
3220 clear_aux_for_blocks ();
3221
3222 /* Rebuild life info for basic blocks that require it. */
3223 if (num_true_changes && life_data_ok)
3224 {
3225 /* If we allocated new pseudos, we must resize the array for sched1. */
3226 if (max_regno < max_reg_num ())
3227 {
3228 max_regno = max_reg_num ();
3229 allocate_reg_info (max_regno, FALSE, FALSE);
3230 }
3231 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3232 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3233 | PROP_KILL_DEAD_CODE);
3234 }
3235
3236 /* Write the final stats. */
3237 if (rtl_dump_file && num_possible_if_blocks > 0)
3238 {
3239 fprintf (rtl_dump_file,
3240 "\n%d possible IF blocks searched.\n",
3241 num_possible_if_blocks);
3242 fprintf (rtl_dump_file,
3243 "%d IF blocks converted.\n",
3244 num_updated_if_blocks);
3245 fprintf (rtl_dump_file,
3246 "%d true changes made.\n\n\n",
3247 num_true_changes);
3248 }
3249
3250 #ifdef ENABLE_CHECKING
3251 verify_flow_info ();
3252 #endif
3253 }