Mon Jun 10 20:42:34 CEST 2002 Jan Hubicka <jh@suse.cz>
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "except.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "expr.h"
34 #include "real.h"
35 #include "output.h"
36 #include "toplev.h"
37 #include "tm_p.h"
38
39
40 #ifndef HAVE_conditional_execution
41 #define HAVE_conditional_execution 0
42 #endif
43 #ifndef HAVE_conditional_move
44 #define HAVE_conditional_move 0
45 #endif
46 #ifndef HAVE_incscc
47 #define HAVE_incscc 0
48 #endif
49 #ifndef HAVE_decscc
50 #define HAVE_decscc 0
51 #endif
52 #ifndef HAVE_trap
53 #define HAVE_trap 0
54 #endif
55 #ifndef HAVE_conditional_trap
56 #define HAVE_conditional_trap 0
57 #endif
58
59 #ifndef MAX_CONDITIONAL_EXECUTE
60 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
61 #endif
62
63 #define NULL_EDGE ((struct edge_def *)NULL)
64 #define NULL_BLOCK ((struct basic_block_def *)NULL)
65
66 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
67 static int num_possible_if_blocks;
68
69 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
70 execution. */
71 static int num_updated_if_blocks;
72
73 /* # of basic blocks that were removed. */
74 static int num_removed_blocks;
75
76 /* True if life data ok at present. */
77 static bool life_data_ok;
78
79 /* The post-dominator relation on the original block numbers. */
80 static dominance_info post_dominators;
81
82 /* Forward references. */
83 static int count_bb_insns PARAMS ((basic_block));
84 static rtx first_active_insn PARAMS ((basic_block));
85 static int last_active_insn_p PARAMS ((basic_block, rtx));
86 static int seq_contains_jump PARAMS ((rtx));
87
88 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
89 static rtx cond_exec_get_condition PARAMS ((rtx));
90 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
91 basic_block, basic_block));
92
93 static rtx noce_get_condition PARAMS ((rtx, rtx *));
94 static int noce_operand_ok PARAMS ((rtx));
95 static int noce_process_if_block PARAMS ((basic_block, basic_block,
96 basic_block, basic_block));
97
98 static int process_if_block PARAMS ((basic_block, basic_block,
99 basic_block, basic_block));
100 static void merge_if_block PARAMS ((basic_block, basic_block,
101 basic_block, basic_block));
102
103 static int find_if_header PARAMS ((basic_block));
104 static int find_if_block PARAMS ((basic_block, edge, edge));
105 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
106 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
107 static int find_cond_trap PARAMS ((basic_block, edge, edge));
108 static rtx block_has_only_trap PARAMS ((basic_block));
109 static int find_memory PARAMS ((rtx *, void *));
110 static int dead_or_predicable PARAMS ((basic_block, basic_block,
111 basic_block, basic_block, int));
112 static void noce_emit_move_insn PARAMS ((rtx, rtx));
113 \f
114 /* Count the number of non-jump active insns in BB. */
115
116 static int
117 count_bb_insns (bb)
118 basic_block bb;
119 {
120 int count = 0;
121 rtx insn = bb->head;
122
123 while (1)
124 {
125 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
126 count++;
127
128 if (insn == bb->end)
129 break;
130 insn = NEXT_INSN (insn);
131 }
132
133 return count;
134 }
135
136 /* Return the first non-jump active insn in the basic block. */
137
138 static rtx
139 first_active_insn (bb)
140 basic_block bb;
141 {
142 rtx insn = bb->head;
143
144 if (GET_CODE (insn) == CODE_LABEL)
145 {
146 if (insn == bb->end)
147 return NULL_RTX;
148 insn = NEXT_INSN (insn);
149 }
150
151 while (GET_CODE (insn) == NOTE)
152 {
153 if (insn == bb->end)
154 return NULL_RTX;
155 insn = NEXT_INSN (insn);
156 }
157
158 if (GET_CODE (insn) == JUMP_INSN)
159 return NULL_RTX;
160
161 return insn;
162 }
163
164 /* Return true if INSN is the last active non-jump insn in BB. */
165
166 static int
167 last_active_insn_p (bb, insn)
168 basic_block bb;
169 rtx insn;
170 {
171 do
172 {
173 if (insn == bb->end)
174 return TRUE;
175 insn = NEXT_INSN (insn);
176 }
177 while (GET_CODE (insn) == NOTE);
178
179 return GET_CODE (insn) == JUMP_INSN;
180 }
181
182 /* It is possible, especially when having dealt with multi-word
183 arithmetic, for the expanders to have emitted jumps. Search
184 through the sequence and return TRUE if a jump exists so that
185 we can abort the conversion. */
186
187 static int
188 seq_contains_jump (insn)
189 rtx insn;
190 {
191 while (insn)
192 {
193 if (GET_CODE (insn) == JUMP_INSN)
194 return 1;
195 insn = NEXT_INSN (insn);
196 }
197 return 0;
198 }
199 \f
200 /* Go through a bunch of insns, converting them to conditional
201 execution format if possible. Return TRUE if all of the non-note
202 insns were processed. */
203
204 static int
205 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
206 rtx start; /* first insn to look at */
207 rtx end; /* last insn to look at */
208 rtx test; /* conditional execution test */
209 rtx prob_val; /* probability of branch taken. */
210 int mod_ok; /* true if modifications ok last insn. */
211 {
212 int must_be_last = FALSE;
213 rtx insn;
214 rtx pattern;
215
216 for (insn = start; ; insn = NEXT_INSN (insn))
217 {
218 if (GET_CODE (insn) == NOTE)
219 goto insn_done;
220
221 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
222 abort ();
223
224 /* Remove USE insns that get in the way. */
225 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
226 {
227 /* ??? Ug. Actually unlinking the thing is problematic,
228 given what we'd have to coordinate with our callers. */
229 PUT_CODE (insn, NOTE);
230 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
231 NOTE_SOURCE_FILE (insn) = 0;
232 goto insn_done;
233 }
234
235 /* Last insn wasn't last? */
236 if (must_be_last)
237 return FALSE;
238
239 if (modified_in_p (test, insn))
240 {
241 if (!mod_ok)
242 return FALSE;
243 must_be_last = TRUE;
244 }
245
246 /* Now build the conditional form of the instruction. */
247 pattern = PATTERN (insn);
248
249 /* If the machine needs to modify the insn being conditionally executed,
250 say for example to force a constant integer operand into a temp
251 register, do so here. */
252 #ifdef IFCVT_MODIFY_INSN
253 IFCVT_MODIFY_INSN (pattern, insn);
254 if (! pattern)
255 return FALSE;
256 #endif
257
258 validate_change (insn, &PATTERN (insn),
259 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
260 pattern), 1);
261
262 if (GET_CODE (insn) == CALL_INSN && prob_val)
263 validate_change (insn, &REG_NOTES (insn),
264 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
265 REG_NOTES (insn)), 1);
266
267 insn_done:
268 if (insn == end)
269 break;
270 }
271
272 return TRUE;
273 }
274
275 /* Return the condition for a jump. Do not do any special processing. */
276
277 static rtx
278 cond_exec_get_condition (jump)
279 rtx jump;
280 {
281 rtx test_if, cond;
282
283 if (any_condjump_p (jump))
284 test_if = SET_SRC (pc_set (jump));
285 else
286 return NULL_RTX;
287 cond = XEXP (test_if, 0);
288
289 /* If this branches to JUMP_LABEL when the condition is false,
290 reverse the condition. */
291 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
292 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
293 {
294 enum rtx_code rev = reversed_comparison_code (cond, jump);
295 if (rev == UNKNOWN)
296 return NULL_RTX;
297
298 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
299 XEXP (cond, 1));
300 }
301
302 return cond;
303 }
304
305 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
306 to conditional execution. Return TRUE if we were successful at
307 converting the the block. */
308
309 static int
310 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
311 basic_block test_bb; /* Basic block test is in */
312 basic_block then_bb; /* Basic block for THEN block */
313 basic_block else_bb; /* Basic block for ELSE block */
314 basic_block join_bb; /* Basic block the join label is in */
315 {
316 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
317 rtx then_start; /* first insn in THEN block */
318 rtx then_end; /* last insn + 1 in THEN block */
319 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
320 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
321 int max; /* max # of insns to convert. */
322 int then_mod_ok; /* whether conditional mods are ok in THEN */
323 rtx true_expr; /* test for else block insns */
324 rtx false_expr; /* test for then block insns */
325 rtx true_prob_val; /* probability of else block */
326 rtx false_prob_val; /* probability of then block */
327 int n_insns;
328 enum rtx_code false_code;
329
330 /* Find the conditional jump to the ELSE or JOIN part, and isolate
331 the test. */
332 test_expr = cond_exec_get_condition (test_bb->end);
333 if (! test_expr)
334 return FALSE;
335
336 /* If the conditional jump is more than just a conditional jump,
337 then we can not do conditional execution conversion on this block. */
338 if (!onlyjump_p (test_bb->end))
339 return FALSE;
340
341 /* Collect the bounds of where we're to search. */
342
343 then_start = then_bb->head;
344 then_end = then_bb->end;
345
346 /* Skip a label heading THEN block. */
347 if (GET_CODE (then_start) == CODE_LABEL)
348 then_start = NEXT_INSN (then_start);
349
350 /* Skip a (use (const_int 0)) or branch as the final insn. */
351 if (GET_CODE (then_end) == INSN
352 && GET_CODE (PATTERN (then_end)) == USE
353 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
354 then_end = PREV_INSN (then_end);
355 else if (GET_CODE (then_end) == JUMP_INSN)
356 then_end = PREV_INSN (then_end);
357
358 if (else_bb)
359 {
360 /* Skip the ELSE block's label. */
361 else_start = NEXT_INSN (else_bb->head);
362 else_end = else_bb->end;
363
364 /* Skip a (use (const_int 0)) or branch as the final insn. */
365 if (GET_CODE (else_end) == INSN
366 && GET_CODE (PATTERN (else_end)) == USE
367 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
368 else_end = PREV_INSN (else_end);
369 else if (GET_CODE (else_end) == JUMP_INSN)
370 else_end = PREV_INSN (else_end);
371 }
372
373 /* How many instructions should we convert in total? */
374 n_insns = 0;
375 if (else_bb)
376 {
377 max = 2 * MAX_CONDITIONAL_EXECUTE;
378 n_insns = count_bb_insns (else_bb);
379 }
380 else
381 max = MAX_CONDITIONAL_EXECUTE;
382 n_insns += count_bb_insns (then_bb);
383 if (n_insns > max)
384 return FALSE;
385
386 /* Map test_expr/test_jump into the appropriate MD tests to use on
387 the conditionally executed code. */
388
389 true_expr = test_expr;
390
391 false_code = reversed_comparison_code (true_expr, test_bb->end);
392 if (false_code != UNKNOWN)
393 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
394 XEXP (true_expr, 0), XEXP (true_expr, 1));
395 else
396 false_expr = NULL_RTX;
397
398 #ifdef IFCVT_MODIFY_TESTS
399 /* If the machine description needs to modify the tests, such as setting a
400 conditional execution register from a comparison, it can do so here. */
401 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
402 join_bb);
403
404 /* See if the conversion failed */
405 if (!true_expr || !false_expr)
406 goto fail;
407 #endif
408
409 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
410 if (true_prob_val)
411 {
412 true_prob_val = XEXP (true_prob_val, 0);
413 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
414 }
415 else
416 false_prob_val = NULL_RTX;
417
418 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
419 on then THEN block. */
420 then_mod_ok = (else_bb == NULL_BLOCK);
421
422 /* Go through the THEN and ELSE blocks converting the insns if possible
423 to conditional execution. */
424
425 if (then_end
426 && (! false_expr
427 || ! cond_exec_process_insns (then_start, then_end, false_expr,
428 false_prob_val, then_mod_ok)))
429 goto fail;
430
431 if (else_bb
432 && ! cond_exec_process_insns (else_start, else_end,
433 true_expr, true_prob_val, TRUE))
434 goto fail;
435
436 if (! apply_change_group ())
437 return FALSE;
438
439 #ifdef IFCVT_MODIFY_FINAL
440 /* Do any machine dependent final modifications */
441 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
442 #endif
443
444 /* Conversion succeeded. */
445 if (rtl_dump_file)
446 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
447 n_insns, (n_insns == 1) ? " was" : "s were");
448
449 /* Merge the blocks! */
450 merge_if_block (test_bb, then_bb, else_bb, join_bb);
451 return TRUE;
452
453 fail:
454 #ifdef IFCVT_MODIFY_CANCEL
455 /* Cancel any machine dependent changes. */
456 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
457 #endif
458
459 cancel_changes (0);
460 return FALSE;
461 }
462 \f
463 /* Used by noce_process_if_block to communicate with its subroutines.
464
465 The subroutines know that A and B may be evaluated freely. They
466 know that X is a register. They should insert new instructions
467 before cond_earliest. */
468
469 struct noce_if_info
470 {
471 basic_block test_bb;
472 rtx insn_a, insn_b;
473 rtx x, a, b;
474 rtx jump, cond, cond_earliest;
475 };
476
477 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
478 rtx, int, int));
479 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
480 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
481 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
482 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
483 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
484 rtx, enum rtx_code, rtx,
485 rtx, rtx, rtx));
486 static int noce_try_cmove PARAMS ((struct noce_if_info *));
487 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
488 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
489 rtx, rtx *));
490 static int noce_try_minmax PARAMS ((struct noce_if_info *));
491 static int noce_try_abs PARAMS ((struct noce_if_info *));
492
493 /* Helper function for noce_try_store_flag*. */
494
495 static rtx
496 noce_emit_store_flag (if_info, x, reversep, normalize)
497 struct noce_if_info *if_info;
498 rtx x;
499 int reversep, normalize;
500 {
501 rtx cond = if_info->cond;
502 int cond_complex;
503 enum rtx_code code;
504
505 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
506 || ! general_operand (XEXP (cond, 1), VOIDmode));
507
508 /* If earliest == jump, or when the condition is complex, try to
509 build the store_flag insn directly. */
510
511 if (cond_complex)
512 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
513
514 if (reversep)
515 code = reversed_comparison_code (cond, if_info->jump);
516 else
517 code = GET_CODE (cond);
518
519 if ((if_info->cond_earliest == if_info->jump || cond_complex)
520 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
521 {
522 rtx tmp;
523
524 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
525 XEXP (cond, 1));
526 tmp = gen_rtx_SET (VOIDmode, x, tmp);
527
528 start_sequence ();
529 tmp = emit_insn (tmp);
530
531 if (recog_memoized (tmp) >= 0)
532 {
533 tmp = get_insns ();
534 end_sequence ();
535 emit_insn (tmp);
536
537 if_info->cond_earliest = if_info->jump;
538
539 return x;
540 }
541
542 end_sequence ();
543 }
544
545 /* Don't even try if the comparison operands are weird. */
546 if (cond_complex)
547 return NULL_RTX;
548
549 return emit_store_flag (x, code, XEXP (cond, 0),
550 XEXP (cond, 1), VOIDmode,
551 (code == LTU || code == LEU
552 || code == GEU || code == GTU), normalize);
553 }
554
555 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
556 static void
557 noce_emit_move_insn (x, y)
558 rtx x, y;
559 {
560 enum machine_mode outmode, inmode;
561 rtx outer, inner;
562 int bitpos;
563
564 if (GET_CODE (x) != STRICT_LOW_PART)
565 {
566 emit_move_insn (x, y);
567 return;
568 }
569
570 outer = XEXP (x, 0);
571 inner = XEXP (outer, 0);
572 outmode = GET_MODE (outer);
573 inmode = GET_MODE (inner);
574 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
575 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
576 GET_MODE_BITSIZE (inmode));
577 }
578
579 /* Convert "if (test) x = 1; else x = 0".
580
581 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
582 tried in noce_try_store_flag_constants after noce_try_cmove has had
583 a go at the conversion. */
584
585 static int
586 noce_try_store_flag (if_info)
587 struct noce_if_info *if_info;
588 {
589 int reversep;
590 rtx target, seq;
591
592 if (GET_CODE (if_info->b) == CONST_INT
593 && INTVAL (if_info->b) == STORE_FLAG_VALUE
594 && if_info->a == const0_rtx)
595 reversep = 0;
596 else if (if_info->b == const0_rtx
597 && GET_CODE (if_info->a) == CONST_INT
598 && INTVAL (if_info->a) == STORE_FLAG_VALUE
599 && (reversed_comparison_code (if_info->cond, if_info->jump)
600 != UNKNOWN))
601 reversep = 1;
602 else
603 return FALSE;
604
605 start_sequence ();
606
607 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
608 if (target)
609 {
610 if (target != if_info->x)
611 noce_emit_move_insn (if_info->x, target);
612
613 seq = get_insns ();
614 end_sequence ();
615 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
616
617 return TRUE;
618 }
619 else
620 {
621 end_sequence ();
622 return FALSE;
623 }
624 }
625
626 /* Convert "if (test) x = a; else x = b", for A and B constant. */
627
628 static int
629 noce_try_store_flag_constants (if_info)
630 struct noce_if_info *if_info;
631 {
632 rtx target, seq;
633 int reversep;
634 HOST_WIDE_INT itrue, ifalse, diff, tmp;
635 int normalize, can_reverse;
636 enum machine_mode mode;
637
638 if (! no_new_pseudos
639 && GET_CODE (if_info->a) == CONST_INT
640 && GET_CODE (if_info->b) == CONST_INT)
641 {
642 mode = GET_MODE (if_info->x);
643 ifalse = INTVAL (if_info->a);
644 itrue = INTVAL (if_info->b);
645
646 /* Make sure we can represent the difference between the two values. */
647 if ((itrue - ifalse > 0)
648 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
649 return FALSE;
650
651 diff = trunc_int_for_mode (itrue - ifalse, mode);
652
653 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
654 != UNKNOWN);
655
656 reversep = 0;
657 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
658 normalize = 0;
659 else if (ifalse == 0 && exact_log2 (itrue) >= 0
660 && (STORE_FLAG_VALUE == 1
661 || BRANCH_COST >= 2))
662 normalize = 1;
663 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
664 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
665 normalize = 1, reversep = 1;
666 else if (itrue == -1
667 && (STORE_FLAG_VALUE == -1
668 || BRANCH_COST >= 2))
669 normalize = -1;
670 else if (ifalse == -1 && can_reverse
671 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
672 normalize = -1, reversep = 1;
673 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
674 || BRANCH_COST >= 3)
675 normalize = -1;
676 else
677 return FALSE;
678
679 if (reversep)
680 {
681 tmp = itrue; itrue = ifalse; ifalse = tmp;
682 diff = trunc_int_for_mode (-diff, mode);
683 }
684
685 start_sequence ();
686 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
687 if (! target)
688 {
689 end_sequence ();
690 return FALSE;
691 }
692
693 /* if (test) x = 3; else x = 4;
694 => x = 3 + (test == 0); */
695 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
696 {
697 target = expand_simple_binop (mode,
698 (diff == STORE_FLAG_VALUE
699 ? PLUS : MINUS),
700 GEN_INT (ifalse), target, if_info->x, 0,
701 OPTAB_WIDEN);
702 }
703
704 /* if (test) x = 8; else x = 0;
705 => x = (test != 0) << 3; */
706 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
707 {
708 target = expand_simple_binop (mode, ASHIFT,
709 target, GEN_INT (tmp), if_info->x, 0,
710 OPTAB_WIDEN);
711 }
712
713 /* if (test) x = -1; else x = b;
714 => x = -(test != 0) | b; */
715 else if (itrue == -1)
716 {
717 target = expand_simple_binop (mode, IOR,
718 target, GEN_INT (ifalse), if_info->x, 0,
719 OPTAB_WIDEN);
720 }
721
722 /* if (test) x = a; else x = b;
723 => x = (-(test != 0) & (b - a)) + a; */
724 else
725 {
726 target = expand_simple_binop (mode, AND,
727 target, GEN_INT (diff), if_info->x, 0,
728 OPTAB_WIDEN);
729 if (target)
730 target = expand_simple_binop (mode, PLUS,
731 target, GEN_INT (ifalse),
732 if_info->x, 0, OPTAB_WIDEN);
733 }
734
735 if (! target)
736 {
737 end_sequence ();
738 return FALSE;
739 }
740
741 if (target != if_info->x)
742 noce_emit_move_insn (if_info->x, target);
743
744 seq = get_insns ();
745 end_sequence ();
746
747 if (seq_contains_jump (seq))
748 return FALSE;
749
750 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
751
752 return TRUE;
753 }
754
755 return FALSE;
756 }
757
758 /* Convert "if (test) foo++" into "foo += (test != 0)", and
759 similarly for "foo--". */
760
761 static int
762 noce_try_store_flag_inc (if_info)
763 struct noce_if_info *if_info;
764 {
765 rtx target, seq;
766 int subtract, normalize;
767
768 if (! no_new_pseudos
769 && (BRANCH_COST >= 2
770 || HAVE_incscc
771 || HAVE_decscc)
772 /* Should be no `else' case to worry about. */
773 && if_info->b == if_info->x
774 && GET_CODE (if_info->a) == PLUS
775 && (XEXP (if_info->a, 1) == const1_rtx
776 || XEXP (if_info->a, 1) == constm1_rtx)
777 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
778 && (reversed_comparison_code (if_info->cond, if_info->jump)
779 != UNKNOWN))
780 {
781 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
782 subtract = 0, normalize = 0;
783 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
784 subtract = 1, normalize = 0;
785 else
786 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
787
788 start_sequence ();
789
790 target = noce_emit_store_flag (if_info,
791 gen_reg_rtx (GET_MODE (if_info->x)),
792 1, normalize);
793
794 if (target)
795 target = expand_simple_binop (GET_MODE (if_info->x),
796 subtract ? MINUS : PLUS,
797 if_info->x, target, if_info->x,
798 0, OPTAB_WIDEN);
799 if (target)
800 {
801 if (target != if_info->x)
802 noce_emit_move_insn (if_info->x, target);
803
804 seq = get_insns ();
805 end_sequence ();
806
807 if (seq_contains_jump (seq))
808 return FALSE;
809
810 emit_insn_before_scope (seq, if_info->jump,
811 INSN_SCOPE (if_info->insn_a));
812
813 return TRUE;
814 }
815
816 end_sequence ();
817 }
818
819 return FALSE;
820 }
821
822 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
823
824 static int
825 noce_try_store_flag_mask (if_info)
826 struct noce_if_info *if_info;
827 {
828 rtx target, seq;
829 int reversep;
830
831 reversep = 0;
832 if (! no_new_pseudos
833 && (BRANCH_COST >= 2
834 || STORE_FLAG_VALUE == -1)
835 && ((if_info->a == const0_rtx
836 && rtx_equal_p (if_info->b, if_info->x))
837 || ((reversep = (reversed_comparison_code (if_info->cond,
838 if_info->jump)
839 != UNKNOWN))
840 && if_info->b == const0_rtx
841 && rtx_equal_p (if_info->a, if_info->x))))
842 {
843 start_sequence ();
844 target = noce_emit_store_flag (if_info,
845 gen_reg_rtx (GET_MODE (if_info->x)),
846 reversep, -1);
847 if (target)
848 target = expand_simple_binop (GET_MODE (if_info->x), AND,
849 if_info->x, target, if_info->x, 0,
850 OPTAB_WIDEN);
851
852 if (target)
853 {
854 if (target != if_info->x)
855 noce_emit_move_insn (if_info->x, target);
856
857 seq = get_insns ();
858 end_sequence ();
859
860 if (seq_contains_jump (seq))
861 return FALSE;
862
863 emit_insn_before_scope (seq, if_info->jump,
864 INSN_SCOPE (if_info->insn_a));
865
866 return TRUE;
867 }
868
869 end_sequence ();
870 }
871
872 return FALSE;
873 }
874
875 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
876
877 static rtx
878 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
879 struct noce_if_info *if_info;
880 rtx x, cmp_a, cmp_b, vfalse, vtrue;
881 enum rtx_code code;
882 {
883 /* If earliest == jump, try to build the cmove insn directly.
884 This is helpful when combine has created some complex condition
885 (like for alpha's cmovlbs) that we can't hope to regenerate
886 through the normal interface. */
887
888 if (if_info->cond_earliest == if_info->jump)
889 {
890 rtx tmp;
891
892 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
893 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
894 tmp = gen_rtx_SET (VOIDmode, x, tmp);
895
896 start_sequence ();
897 tmp = emit_insn (tmp);
898
899 if (recog_memoized (tmp) >= 0)
900 {
901 tmp = get_insns ();
902 end_sequence ();
903 emit_insn (tmp);
904
905 return x;
906 }
907
908 end_sequence ();
909 }
910
911 /* Don't even try if the comparison operands are weird. */
912 if (! general_operand (cmp_a, GET_MODE (cmp_a))
913 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
914 return NULL_RTX;
915
916 #if HAVE_conditional_move
917 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
918 vtrue, vfalse, GET_MODE (x),
919 (code == LTU || code == GEU
920 || code == LEU || code == GTU));
921 #else
922 /* We'll never get here, as noce_process_if_block doesn't call the
923 functions involved. Ifdef code, however, should be discouraged
924 because it leads to typos in the code not selected. However,
925 emit_conditional_move won't exist either. */
926 return NULL_RTX;
927 #endif
928 }
929
930 /* Try only simple constants and registers here. More complex cases
931 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
932 has had a go at it. */
933
934 static int
935 noce_try_cmove (if_info)
936 struct noce_if_info *if_info;
937 {
938 enum rtx_code code;
939 rtx target, seq;
940
941 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
942 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
943 {
944 start_sequence ();
945
946 code = GET_CODE (if_info->cond);
947 target = noce_emit_cmove (if_info, if_info->x, code,
948 XEXP (if_info->cond, 0),
949 XEXP (if_info->cond, 1),
950 if_info->a, if_info->b);
951
952 if (target)
953 {
954 if (target != if_info->x)
955 noce_emit_move_insn (if_info->x, target);
956
957 seq = get_insns ();
958 end_sequence ();
959 emit_insn_before_scope (seq, if_info->jump,
960 INSN_SCOPE (if_info->insn_a));
961 return TRUE;
962 }
963 else
964 {
965 end_sequence ();
966 return FALSE;
967 }
968 }
969
970 return FALSE;
971 }
972
973 /* Try more complex cases involving conditional_move. */
974
975 static int
976 noce_try_cmove_arith (if_info)
977 struct noce_if_info *if_info;
978 {
979 rtx a = if_info->a;
980 rtx b = if_info->b;
981 rtx x = if_info->x;
982 rtx insn_a, insn_b;
983 rtx tmp, target;
984 int is_mem = 0;
985 enum rtx_code code;
986
987 /* A conditional move from two memory sources is equivalent to a
988 conditional on their addresses followed by a load. Don't do this
989 early because it'll screw alias analysis. Note that we've
990 already checked for no side effects. */
991 if (! no_new_pseudos && cse_not_expected
992 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
993 && BRANCH_COST >= 5)
994 {
995 a = XEXP (a, 0);
996 b = XEXP (b, 0);
997 x = gen_reg_rtx (Pmode);
998 is_mem = 1;
999 }
1000
1001 /* ??? We could handle this if we knew that a load from A or B could
1002 not fault. This is also true if we've already loaded
1003 from the address along the path from ENTRY. */
1004 else if (may_trap_p (a) || may_trap_p (b))
1005 return FALSE;
1006
1007 /* if (test) x = a + b; else x = c - d;
1008 => y = a + b;
1009 x = c - d;
1010 if (test)
1011 x = y;
1012 */
1013
1014 code = GET_CODE (if_info->cond);
1015 insn_a = if_info->insn_a;
1016 insn_b = if_info->insn_b;
1017
1018 /* Possibly rearrange operands to make things come out more natural. */
1019 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1020 {
1021 int reversep = 0;
1022 if (rtx_equal_p (b, x))
1023 reversep = 1;
1024 else if (general_operand (b, GET_MODE (b)))
1025 reversep = 1;
1026
1027 if (reversep)
1028 {
1029 code = reversed_comparison_code (if_info->cond, if_info->jump);
1030 tmp = a, a = b, b = tmp;
1031 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1032 }
1033 }
1034
1035 start_sequence ();
1036
1037 /* If either operand is complex, load it into a register first.
1038 The best way to do this is to copy the original insn. In this
1039 way we preserve any clobbers etc that the insn may have had.
1040 This is of course not possible in the IS_MEM case. */
1041 if (! general_operand (a, GET_MODE (a)))
1042 {
1043 rtx set;
1044
1045 if (no_new_pseudos)
1046 goto end_seq_and_fail;
1047
1048 if (is_mem)
1049 {
1050 tmp = gen_reg_rtx (GET_MODE (a));
1051 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1052 }
1053 else if (! insn_a)
1054 goto end_seq_and_fail;
1055 else
1056 {
1057 a = gen_reg_rtx (GET_MODE (a));
1058 tmp = copy_rtx (insn_a);
1059 set = single_set (tmp);
1060 SET_DEST (set) = a;
1061 tmp = emit_insn (PATTERN (tmp));
1062 }
1063 if (recog_memoized (tmp) < 0)
1064 goto end_seq_and_fail;
1065 }
1066 if (! general_operand (b, GET_MODE (b)))
1067 {
1068 rtx set;
1069
1070 if (no_new_pseudos)
1071 goto end_seq_and_fail;
1072
1073 if (is_mem)
1074 {
1075 tmp = gen_reg_rtx (GET_MODE (b));
1076 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1077 }
1078 else if (! insn_b)
1079 goto end_seq_and_fail;
1080 else
1081 {
1082 b = gen_reg_rtx (GET_MODE (b));
1083 tmp = copy_rtx (insn_b);
1084 set = single_set (tmp);
1085 SET_DEST (set) = b;
1086 tmp = emit_insn (PATTERN (tmp));
1087 }
1088 if (recog_memoized (tmp) < 0)
1089 goto end_seq_and_fail;
1090 }
1091
1092 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1093 XEXP (if_info->cond, 1), a, b);
1094
1095 if (! target)
1096 goto end_seq_and_fail;
1097
1098 /* If we're handling a memory for above, emit the load now. */
1099 if (is_mem)
1100 {
1101 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1102
1103 /* Copy over flags as appropriate. */
1104 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1105 MEM_VOLATILE_P (tmp) = 1;
1106 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1107 MEM_IN_STRUCT_P (tmp) = 1;
1108 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1109 MEM_SCALAR_P (tmp) = 1;
1110 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1111 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1112 set_mem_align (tmp,
1113 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1114
1115 noce_emit_move_insn (if_info->x, tmp);
1116 }
1117 else if (target != x)
1118 noce_emit_move_insn (x, target);
1119
1120 tmp = get_insns ();
1121 end_sequence ();
1122 emit_insn_before_scope (tmp, if_info->jump, INSN_SCOPE (if_info->insn_a));
1123 return TRUE;
1124
1125 end_seq_and_fail:
1126 end_sequence ();
1127 return FALSE;
1128 }
1129
1130 /* For most cases, the simplified condition we found is the best
1131 choice, but this is not the case for the min/max/abs transforms.
1132 For these we wish to know that it is A or B in the condition. */
1133
1134 static rtx
1135 noce_get_alt_condition (if_info, target, earliest)
1136 struct noce_if_info *if_info;
1137 rtx target;
1138 rtx *earliest;
1139 {
1140 rtx cond, set, insn;
1141 int reverse;
1142
1143 /* If target is already mentioned in the known condition, return it. */
1144 if (reg_mentioned_p (target, if_info->cond))
1145 {
1146 *earliest = if_info->cond_earliest;
1147 return if_info->cond;
1148 }
1149
1150 set = pc_set (if_info->jump);
1151 cond = XEXP (SET_SRC (set), 0);
1152 reverse
1153 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1154 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1155
1156 /* If we're looking for a constant, try to make the conditional
1157 have that constant in it. There are two reasons why it may
1158 not have the constant we want:
1159
1160 1. GCC may have needed to put the constant in a register, because
1161 the target can't compare directly against that constant. For
1162 this case, we look for a SET immediately before the comparison
1163 that puts a constant in that register.
1164
1165 2. GCC may have canonicalized the conditional, for example
1166 replacing "if x < 4" with "if x <= 3". We can undo that (or
1167 make equivalent types of changes) to get the constants we need
1168 if they're off by one in the right direction. */
1169
1170 if (GET_CODE (target) == CONST_INT)
1171 {
1172 enum rtx_code code = GET_CODE (if_info->cond);
1173 rtx op_a = XEXP (if_info->cond, 0);
1174 rtx op_b = XEXP (if_info->cond, 1);
1175 rtx prev_insn;
1176
1177 /* First, look to see if we put a constant in a register. */
1178 prev_insn = PREV_INSN (if_info->cond_earliest);
1179 if (prev_insn
1180 && INSN_P (prev_insn)
1181 && GET_CODE (PATTERN (prev_insn)) == SET)
1182 {
1183 rtx src = find_reg_equal_equiv_note (prev_insn);
1184 if (!src)
1185 src = SET_SRC (PATTERN (prev_insn));
1186 if (GET_CODE (src) == CONST_INT)
1187 {
1188 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1189 op_a = src;
1190 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1191 op_b = src;
1192
1193 if (GET_CODE (op_a) == CONST_INT)
1194 {
1195 rtx tmp = op_a;
1196 op_a = op_b;
1197 op_b = tmp;
1198 code = swap_condition (code);
1199 }
1200 }
1201 }
1202
1203 /* Now, look to see if we can get the right constant by
1204 adjusting the conditional. */
1205 if (GET_CODE (op_b) == CONST_INT)
1206 {
1207 HOST_WIDE_INT desired_val = INTVAL (target);
1208 HOST_WIDE_INT actual_val = INTVAL (op_b);
1209
1210 switch (code)
1211 {
1212 case LT:
1213 if (actual_val == desired_val + 1)
1214 {
1215 code = LE;
1216 op_b = GEN_INT (desired_val);
1217 }
1218 break;
1219 case LE:
1220 if (actual_val == desired_val - 1)
1221 {
1222 code = LT;
1223 op_b = GEN_INT (desired_val);
1224 }
1225 break;
1226 case GT:
1227 if (actual_val == desired_val - 1)
1228 {
1229 code = GE;
1230 op_b = GEN_INT (desired_val);
1231 }
1232 break;
1233 case GE:
1234 if (actual_val == desired_val + 1)
1235 {
1236 code = GT;
1237 op_b = GEN_INT (desired_val);
1238 }
1239 break;
1240 default:
1241 break;
1242 }
1243 }
1244
1245 /* If we made any changes, generate a new conditional that is
1246 equivalent to what we started with, but has the right
1247 constants in it. */
1248 if (code != GET_CODE (if_info->cond)
1249 || op_a != XEXP (if_info->cond, 0)
1250 || op_b != XEXP (if_info->cond, 1))
1251 {
1252 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1253 *earliest = if_info->cond_earliest;
1254 return cond;
1255 }
1256 }
1257
1258 cond = canonicalize_condition (if_info->jump, cond, reverse,
1259 earliest, target);
1260 if (! cond || ! reg_mentioned_p (target, cond))
1261 return NULL;
1262
1263 /* We almost certainly searched back to a different place.
1264 Need to re-verify correct lifetimes. */
1265
1266 /* X may not be mentioned in the range (cond_earliest, jump]. */
1267 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1268 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1269 return NULL;
1270
1271 /* A and B may not be modified in the range [cond_earliest, jump). */
1272 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1273 if (INSN_P (insn)
1274 && (modified_in_p (if_info->a, insn)
1275 || modified_in_p (if_info->b, insn)))
1276 return NULL;
1277
1278 return cond;
1279 }
1280
1281 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1282
1283 static int
1284 noce_try_minmax (if_info)
1285 struct noce_if_info *if_info;
1286 {
1287 rtx cond, earliest, target, seq;
1288 enum rtx_code code, op;
1289 int unsignedp;
1290
1291 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1292 if (no_new_pseudos)
1293 return FALSE;
1294
1295 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1296 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1297 to get the target to tell us... */
1298 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1299 || HONOR_NANS (GET_MODE (if_info->x)))
1300 return FALSE;
1301
1302 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1303 if (!cond)
1304 return FALSE;
1305
1306 /* Verify the condition is of the form we expect, and canonicalize
1307 the comparison code. */
1308 code = GET_CODE (cond);
1309 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1310 {
1311 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1312 return FALSE;
1313 }
1314 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1315 {
1316 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1317 return FALSE;
1318 code = swap_condition (code);
1319 }
1320 else
1321 return FALSE;
1322
1323 /* Determine what sort of operation this is. Note that the code is for
1324 a taken branch, so the code->operation mapping appears backwards. */
1325 switch (code)
1326 {
1327 case LT:
1328 case LE:
1329 case UNLT:
1330 case UNLE:
1331 op = SMAX;
1332 unsignedp = 0;
1333 break;
1334 case GT:
1335 case GE:
1336 case UNGT:
1337 case UNGE:
1338 op = SMIN;
1339 unsignedp = 0;
1340 break;
1341 case LTU:
1342 case LEU:
1343 op = UMAX;
1344 unsignedp = 1;
1345 break;
1346 case GTU:
1347 case GEU:
1348 op = UMIN;
1349 unsignedp = 1;
1350 break;
1351 default:
1352 return FALSE;
1353 }
1354
1355 start_sequence ();
1356
1357 target = expand_simple_binop (GET_MODE (if_info->x), op,
1358 if_info->a, if_info->b,
1359 if_info->x, unsignedp, OPTAB_WIDEN);
1360 if (! target)
1361 {
1362 end_sequence ();
1363 return FALSE;
1364 }
1365 if (target != if_info->x)
1366 noce_emit_move_insn (if_info->x, target);
1367
1368 seq = get_insns ();
1369 end_sequence ();
1370
1371 if (seq_contains_jump (seq))
1372 return FALSE;
1373
1374 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1375 if_info->cond = cond;
1376 if_info->cond_earliest = earliest;
1377
1378 return TRUE;
1379 }
1380
1381 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1382
1383 static int
1384 noce_try_abs (if_info)
1385 struct noce_if_info *if_info;
1386 {
1387 rtx cond, earliest, target, seq, a, b, c;
1388 int negate;
1389
1390 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1391 if (no_new_pseudos)
1392 return FALSE;
1393
1394 /* Recognize A and B as constituting an ABS or NABS. */
1395 a = if_info->a;
1396 b = if_info->b;
1397 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1398 negate = 0;
1399 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1400 {
1401 c = a; a = b; b = c;
1402 negate = 1;
1403 }
1404 else
1405 return FALSE;
1406
1407 cond = noce_get_alt_condition (if_info, b, &earliest);
1408 if (!cond)
1409 return FALSE;
1410
1411 /* Verify the condition is of the form we expect. */
1412 if (rtx_equal_p (XEXP (cond, 0), b))
1413 c = XEXP (cond, 1);
1414 else if (rtx_equal_p (XEXP (cond, 1), b))
1415 c = XEXP (cond, 0);
1416 else
1417 return FALSE;
1418
1419 /* Verify that C is zero. Search backward through the block for
1420 a REG_EQUAL note if necessary. */
1421 if (REG_P (c))
1422 {
1423 rtx insn, note = NULL;
1424 for (insn = earliest;
1425 insn != if_info->test_bb->head;
1426 insn = PREV_INSN (insn))
1427 if (INSN_P (insn)
1428 && ((note = find_reg_note (insn, REG_EQUAL, c))
1429 || (note = find_reg_note (insn, REG_EQUIV, c))))
1430 break;
1431 if (! note)
1432 return FALSE;
1433 c = XEXP (note, 0);
1434 }
1435 if (GET_CODE (c) == MEM
1436 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1437 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1438 c = get_pool_constant (XEXP (c, 0));
1439
1440 /* Work around funny ideas get_condition has wrt canonicalization.
1441 Note that these rtx constants are known to be CONST_INT, and
1442 therefore imply integer comparisons. */
1443 if (c == constm1_rtx && GET_CODE (cond) == GT)
1444 ;
1445 else if (c == const1_rtx && GET_CODE (cond) == LT)
1446 ;
1447 else if (c != CONST0_RTX (GET_MODE (b)))
1448 return FALSE;
1449
1450 /* Determine what sort of operation this is. */
1451 switch (GET_CODE (cond))
1452 {
1453 case LT:
1454 case LE:
1455 case UNLT:
1456 case UNLE:
1457 negate = !negate;
1458 break;
1459 case GT:
1460 case GE:
1461 case UNGT:
1462 case UNGE:
1463 break;
1464 default:
1465 return FALSE;
1466 }
1467
1468 start_sequence ();
1469
1470 target = expand_simple_unop (GET_MODE (if_info->x), ABS, b, if_info->x, 0);
1471
1472 /* ??? It's a quandry whether cmove would be better here, especially
1473 for integers. Perhaps combine will clean things up. */
1474 if (target && negate)
1475 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1476
1477 if (! target)
1478 {
1479 end_sequence ();
1480 return FALSE;
1481 }
1482
1483 if (target != if_info->x)
1484 noce_emit_move_insn (if_info->x, target);
1485
1486 seq = get_insns ();
1487 end_sequence ();
1488
1489 if (seq_contains_jump (seq))
1490 return FALSE;
1491
1492 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1493 if_info->cond = cond;
1494 if_info->cond_earliest = earliest;
1495
1496 return TRUE;
1497 }
1498
1499 /* Look for the condition for the jump first. We'd prefer to avoid
1500 get_condition if we can -- it tries to look back for the contents
1501 of an original compare. On targets that use normal integers for
1502 comparisons, e.g. alpha, this is wasteful. */
1503
1504 static rtx
1505 noce_get_condition (jump, earliest)
1506 rtx jump;
1507 rtx *earliest;
1508 {
1509 rtx cond;
1510 rtx set;
1511
1512 /* If the condition variable is a register and is MODE_INT, accept it.
1513 Otherwise, fall back on get_condition. */
1514
1515 if (! any_condjump_p (jump))
1516 return NULL_RTX;
1517
1518 set = pc_set (jump);
1519
1520 cond = XEXP (SET_SRC (set), 0);
1521 if (GET_CODE (XEXP (cond, 0)) == REG
1522 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1523 {
1524 *earliest = jump;
1525
1526 /* If this branches to JUMP_LABEL when the condition is false,
1527 reverse the condition. */
1528 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1529 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1530 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1531 GET_MODE (cond), XEXP (cond, 0),
1532 XEXP (cond, 1));
1533 }
1534 else
1535 cond = get_condition (jump, earliest);
1536
1537 return cond;
1538 }
1539
1540 /* Return true if OP is ok for if-then-else processing. */
1541
1542 static int
1543 noce_operand_ok (op)
1544 rtx op;
1545 {
1546 /* We special-case memories, so handle any of them with
1547 no address side effects. */
1548 if (GET_CODE (op) == MEM)
1549 return ! side_effects_p (XEXP (op, 0));
1550
1551 if (side_effects_p (op))
1552 return FALSE;
1553
1554 return ! may_trap_p (op);
1555 }
1556
1557 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1558 without using conditional execution. Return TRUE if we were
1559 successful at converting the the block. */
1560
1561 static int
1562 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1563 basic_block test_bb; /* Basic block test is in */
1564 basic_block then_bb; /* Basic block for THEN block */
1565 basic_block else_bb; /* Basic block for ELSE block */
1566 basic_block join_bb; /* Basic block the join label is in */
1567 {
1568 /* We're looking for patterns of the form
1569
1570 (1) if (...) x = a; else x = b;
1571 (2) x = b; if (...) x = a;
1572 (3) if (...) x = a; // as if with an initial x = x.
1573
1574 The later patterns require jumps to be more expensive.
1575
1576 ??? For future expansion, look for multiple X in such patterns. */
1577
1578 struct noce_if_info if_info;
1579 rtx insn_a, insn_b;
1580 rtx set_a, set_b;
1581 rtx orig_x, x, a, b;
1582 rtx jump, cond, insn;
1583
1584 /* If this is not a standard conditional jump, we can't parse it. */
1585 jump = test_bb->end;
1586 cond = noce_get_condition (jump, &if_info.cond_earliest);
1587 if (! cond)
1588 return FALSE;
1589
1590 /* If the conditional jump is more than just a conditional jump,
1591 then we can not do if-conversion on this block. */
1592 if (! onlyjump_p (jump))
1593 return FALSE;
1594
1595 /* We must be comparing objects whose modes imply the size. */
1596 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1597 return FALSE;
1598
1599 /* Look for one of the potential sets. */
1600 insn_a = first_active_insn (then_bb);
1601 if (! insn_a
1602 || ! last_active_insn_p (then_bb, insn_a)
1603 || (set_a = single_set (insn_a)) == NULL_RTX)
1604 return FALSE;
1605
1606 x = SET_DEST (set_a);
1607 a = SET_SRC (set_a);
1608
1609 /* Look for the other potential set. Make sure we've got equivalent
1610 destinations. */
1611 /* ??? This is overconservative. Storing to two different mems is
1612 as easy as conditionally computing the address. Storing to a
1613 single mem merely requires a scratch memory to use as one of the
1614 destination addresses; often the memory immediately below the
1615 stack pointer is available for this. */
1616 set_b = NULL_RTX;
1617 if (else_bb)
1618 {
1619 insn_b = first_active_insn (else_bb);
1620 if (! insn_b
1621 || ! last_active_insn_p (else_bb, insn_b)
1622 || (set_b = single_set (insn_b)) == NULL_RTX
1623 || ! rtx_equal_p (x, SET_DEST (set_b)))
1624 return FALSE;
1625 }
1626 else
1627 {
1628 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1629 if (! insn_b
1630 || GET_CODE (insn_b) != INSN
1631 || (set_b = single_set (insn_b)) == NULL_RTX
1632 || ! rtx_equal_p (x, SET_DEST (set_b))
1633 || reg_mentioned_p (x, cond)
1634 || reg_mentioned_p (x, a)
1635 || reg_mentioned_p (x, SET_SRC (set_b)))
1636 insn_b = set_b = NULL_RTX;
1637 }
1638 b = (set_b ? SET_SRC (set_b) : x);
1639
1640 /* X may not be mentioned in the range (cond_earliest, jump]. */
1641 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1642 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1643 return FALSE;
1644
1645 /* A and B may not be modified in the range [cond_earliest, jump). */
1646 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1647 if (INSN_P (insn)
1648 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1649 return FALSE;
1650
1651 /* Only operate on register destinations, and even then avoid extending
1652 the lifetime of hard registers on small register class machines. */
1653 orig_x = x;
1654 if (GET_CODE (x) != REG
1655 || (SMALL_REGISTER_CLASSES
1656 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1657 {
1658 if (no_new_pseudos)
1659 return FALSE;
1660 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1661 ? XEXP (x, 0) : x));
1662 }
1663
1664 /* Don't operate on sources that may trap or are volatile. */
1665 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1666 return FALSE;
1667
1668 /* Set up the info block for our subroutines. */
1669 if_info.test_bb = test_bb;
1670 if_info.cond = cond;
1671 if_info.jump = jump;
1672 if_info.insn_a = insn_a;
1673 if_info.insn_b = insn_b;
1674 if_info.x = x;
1675 if_info.a = a;
1676 if_info.b = b;
1677
1678 /* Try optimizations in some approximation of a useful order. */
1679 /* ??? Should first look to see if X is live incoming at all. If it
1680 isn't, we don't need anything but an unconditional set. */
1681
1682 /* Look and see if A and B are really the same. Avoid creating silly
1683 cmove constructs that no one will fix up later. */
1684 if (rtx_equal_p (a, b))
1685 {
1686 /* If we have an INSN_B, we don't have to create any new rtl. Just
1687 move the instruction that we already have. If we don't have an
1688 INSN_B, that means that A == X, and we've got a noop move. In
1689 that case don't do anything and let the code below delete INSN_A. */
1690 if (insn_b && else_bb)
1691 {
1692 rtx note;
1693
1694 if (else_bb && insn_b == else_bb->end)
1695 else_bb->end = PREV_INSN (insn_b);
1696 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1697
1698 /* If there was a REG_EQUAL note, delete it since it may have been
1699 true due to this insn being after a jump. */
1700 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1701 remove_note (insn_b, note);
1702
1703 insn_b = NULL_RTX;
1704 }
1705 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1706 x must be executed twice. */
1707 else if (insn_b && side_effects_p (orig_x))
1708 return FALSE;
1709
1710 x = orig_x;
1711 goto success;
1712 }
1713
1714 if (noce_try_store_flag (&if_info))
1715 goto success;
1716 if (noce_try_minmax (&if_info))
1717 goto success;
1718 if (noce_try_abs (&if_info))
1719 goto success;
1720 if (HAVE_conditional_move
1721 && noce_try_cmove (&if_info))
1722 goto success;
1723 if (! HAVE_conditional_execution)
1724 {
1725 if (noce_try_store_flag_constants (&if_info))
1726 goto success;
1727 if (noce_try_store_flag_inc (&if_info))
1728 goto success;
1729 if (noce_try_store_flag_mask (&if_info))
1730 goto success;
1731 if (HAVE_conditional_move
1732 && noce_try_cmove_arith (&if_info))
1733 goto success;
1734 }
1735
1736 return FALSE;
1737
1738 success:
1739 /* The original sets may now be killed. */
1740 delete_insn (insn_a);
1741
1742 /* Several special cases here: First, we may have reused insn_b above,
1743 in which case insn_b is now NULL. Second, we want to delete insn_b
1744 if it came from the ELSE block, because follows the now correct
1745 write that appears in the TEST block. However, if we got insn_b from
1746 the TEST block, it may in fact be loading data needed for the comparison.
1747 We'll let life_analysis remove the insn if it's really dead. */
1748 if (insn_b && else_bb)
1749 delete_insn (insn_b);
1750
1751 /* The new insns will have been inserted just before the jump. We should
1752 be able to remove the jump with impunity, but the condition itself may
1753 have been modified by gcse to be shared across basic blocks. */
1754 delete_insn (jump);
1755
1756 /* If we used a temporary, fix it up now. */
1757 if (orig_x != x)
1758 {
1759 start_sequence ();
1760 noce_emit_move_insn (copy_rtx (orig_x), x);
1761 insn_b = get_insns ();
1762 end_sequence ();
1763
1764 emit_insn_after_scope (insn_b, test_bb->end, INSN_SCOPE (insn_a));
1765 }
1766
1767 /* Merge the blocks! */
1768 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1769
1770 return TRUE;
1771 }
1772 \f
1773 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1774 straight line code. Return true if successful. */
1775
1776 static int
1777 process_if_block (test_bb, then_bb, else_bb, join_bb)
1778 basic_block test_bb; /* Basic block test is in */
1779 basic_block then_bb; /* Basic block for THEN block */
1780 basic_block else_bb; /* Basic block for ELSE block */
1781 basic_block join_bb; /* Basic block the join label is in */
1782 {
1783 if (! reload_completed
1784 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1785 return TRUE;
1786
1787 if (HAVE_conditional_execution
1788 && reload_completed
1789 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1790 return TRUE;
1791
1792 return FALSE;
1793 }
1794
1795 /* Merge the blocks and mark for local life update. */
1796
1797 static void
1798 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1799 basic_block test_bb; /* Basic block test is in */
1800 basic_block then_bb; /* Basic block for THEN block */
1801 basic_block else_bb; /* Basic block for ELSE block */
1802 basic_block join_bb; /* Basic block the join label is in */
1803 {
1804 basic_block combo_bb;
1805
1806 /* All block merging is done into the lower block numbers. */
1807
1808 combo_bb = test_bb;
1809
1810 /* First merge TEST block into THEN block. This is a no-brainer since
1811 the THEN block did not have a code label to begin with. */
1812 if (then_bb)
1813 {
1814 if (combo_bb->global_live_at_end)
1815 COPY_REG_SET (combo_bb->global_live_at_end,
1816 then_bb->global_live_at_end);
1817 if (post_dominators)
1818 delete_from_dominance_info (post_dominators, then_bb);
1819 merge_blocks_nomove (combo_bb, then_bb);
1820 num_removed_blocks++;
1821 }
1822
1823 /* The ELSE block, if it existed, had a label. That label count
1824 will almost always be zero, but odd things can happen when labels
1825 get their addresses taken. */
1826 if (else_bb)
1827 {
1828 if (post_dominators)
1829 delete_from_dominance_info (post_dominators, else_bb);
1830 merge_blocks_nomove (combo_bb, else_bb);
1831 num_removed_blocks++;
1832 }
1833
1834 /* If there was no join block reported, that means it was not adjacent
1835 to the others, and so we cannot merge them. */
1836
1837 if (! join_bb)
1838 {
1839 rtx last = combo_bb->end;
1840
1841 /* The outgoing edge for the current COMBO block should already
1842 be correct. Verify this. */
1843 if (combo_bb->succ == NULL_EDGE)
1844 {
1845 if (find_reg_note (last, REG_NORETURN, NULL))
1846 ;
1847 else if (GET_CODE (last) == INSN
1848 && GET_CODE (PATTERN (last)) == TRAP_IF
1849 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
1850 ;
1851 else
1852 abort ();
1853 }
1854
1855 /* There should still be something at the end of the THEN or ELSE
1856 blocks taking us to our final destination. */
1857 else if (GET_CODE (last) == JUMP_INSN)
1858 ;
1859 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
1860 && GET_CODE (last) == CALL_INSN
1861 && SIBLING_CALL_P (last))
1862 ;
1863 else if ((combo_bb->succ->flags & EDGE_EH)
1864 && can_throw_internal (last))
1865 ;
1866 else
1867 abort ();
1868 }
1869
1870 /* The JOIN block may have had quite a number of other predecessors too.
1871 Since we've already merged the TEST, THEN and ELSE blocks, we should
1872 have only one remaining edge from our if-then-else diamond. If there
1873 is more than one remaining edge, it must come from elsewhere. There
1874 may be zero incoming edges if the THEN block didn't actually join
1875 back up (as with a call to abort). */
1876 else if ((join_bb->pred == NULL
1877 || join_bb->pred->pred_next == NULL)
1878 && join_bb != EXIT_BLOCK_PTR)
1879 {
1880 /* We can merge the JOIN. */
1881 if (combo_bb->global_live_at_end)
1882 COPY_REG_SET (combo_bb->global_live_at_end,
1883 join_bb->global_live_at_end);
1884 if (post_dominators)
1885 delete_from_dominance_info (post_dominators, join_bb);
1886 merge_blocks_nomove (combo_bb, join_bb);
1887 num_removed_blocks++;
1888 }
1889 else
1890 {
1891 /* We cannot merge the JOIN. */
1892
1893 /* The outgoing edge for the current COMBO block should already
1894 be correct. Verify this. */
1895 if (combo_bb->succ->succ_next != NULL_EDGE
1896 || combo_bb->succ->dest != join_bb)
1897 abort ();
1898
1899 /* Remove the jump and cruft from the end of the COMBO block. */
1900 if (join_bb != EXIT_BLOCK_PTR)
1901 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1902 }
1903
1904 num_updated_if_blocks++;
1905 }
1906 \f
1907 /* Find a block ending in a simple IF condition. Return TRUE if
1908 we were able to transform it in some way. */
1909
1910 static int
1911 find_if_header (test_bb)
1912 basic_block test_bb;
1913 {
1914 edge then_edge;
1915 edge else_edge;
1916
1917 /* The kind of block we're looking for has exactly two successors. */
1918 if ((then_edge = test_bb->succ) == NULL_EDGE
1919 || (else_edge = then_edge->succ_next) == NULL_EDGE
1920 || else_edge->succ_next != NULL_EDGE)
1921 return FALSE;
1922
1923 /* Neither edge should be abnormal. */
1924 if ((then_edge->flags & EDGE_COMPLEX)
1925 || (else_edge->flags & EDGE_COMPLEX))
1926 return FALSE;
1927
1928 /* The THEN edge is canonically the one that falls through. */
1929 if (then_edge->flags & EDGE_FALLTHRU)
1930 ;
1931 else if (else_edge->flags & EDGE_FALLTHRU)
1932 {
1933 edge e = else_edge;
1934 else_edge = then_edge;
1935 then_edge = e;
1936 }
1937 else
1938 /* Otherwise this must be a multiway branch of some sort. */
1939 return FALSE;
1940
1941 if (find_if_block (test_bb, then_edge, else_edge))
1942 goto success;
1943 if (HAVE_trap && HAVE_conditional_trap
1944 && find_cond_trap (test_bb, then_edge, else_edge))
1945 goto success;
1946 if (post_dominators
1947 && (! HAVE_conditional_execution || reload_completed))
1948 {
1949 if (find_if_case_1 (test_bb, then_edge, else_edge))
1950 goto success;
1951 if (find_if_case_2 (test_bb, then_edge, else_edge))
1952 goto success;
1953 }
1954
1955 return FALSE;
1956
1957 success:
1958 if (rtl_dump_file)
1959 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1960 return TRUE;
1961 }
1962
1963 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1964 block. If so, we'll try to convert the insns to not require the branch.
1965 Return TRUE if we were successful at converting the the block. */
1966
1967 static int
1968 find_if_block (test_bb, then_edge, else_edge)
1969 basic_block test_bb;
1970 edge then_edge, else_edge;
1971 {
1972 basic_block then_bb = then_edge->dest;
1973 basic_block else_bb = else_edge->dest;
1974 basic_block join_bb = NULL_BLOCK;
1975 edge then_succ = then_bb->succ;
1976 edge else_succ = else_bb->succ;
1977 basic_block next;
1978
1979 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1980 if (then_bb->pred->pred_next != NULL_EDGE)
1981 return FALSE;
1982
1983 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1984 if (then_succ != NULL_EDGE
1985 && (then_succ->succ_next != NULL_EDGE
1986 || (then_succ->flags & EDGE_COMPLEX)))
1987 return FALSE;
1988
1989 /* If the THEN block has no successors, conditional execution can still
1990 make a conditional call. Don't do this unless the ELSE block has
1991 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1992 Check for the last insn of the THEN block being an indirect jump, which
1993 is listed as not having any successors, but confuses the rest of the CE
1994 code processing. XXX we should fix this in the future. */
1995 if (then_succ == NULL)
1996 {
1997 if (else_bb->pred->pred_next == NULL_EDGE)
1998 {
1999 rtx last_insn = then_bb->end;
2000
2001 while (last_insn
2002 && GET_CODE (last_insn) == NOTE
2003 && last_insn != then_bb->head)
2004 last_insn = PREV_INSN (last_insn);
2005
2006 if (last_insn
2007 && GET_CODE (last_insn) == JUMP_INSN
2008 && ! simplejump_p (last_insn))
2009 return FALSE;
2010
2011 join_bb = else_bb;
2012 else_bb = NULL_BLOCK;
2013 }
2014 else
2015 return FALSE;
2016 }
2017
2018 /* If the THEN block's successor is the other edge out of the TEST block,
2019 then we have an IF-THEN combo without an ELSE. */
2020 else if (then_succ->dest == else_bb)
2021 {
2022 join_bb = else_bb;
2023 else_bb = NULL_BLOCK;
2024 }
2025
2026 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2027 has exactly one predecessor and one successor, and the outgoing edge
2028 is not complex, then we have an IF-THEN-ELSE combo. */
2029 else if (else_succ != NULL_EDGE
2030 && then_succ->dest == else_succ->dest
2031 && else_bb->pred->pred_next == NULL_EDGE
2032 && else_succ->succ_next == NULL_EDGE
2033 && ! (else_succ->flags & EDGE_COMPLEX))
2034 join_bb = else_succ->dest;
2035
2036 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2037 else
2038 return FALSE;
2039
2040 num_possible_if_blocks++;
2041
2042 if (rtl_dump_file)
2043 {
2044 if (else_bb)
2045 fprintf (rtl_dump_file,
2046 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
2047 test_bb->index, then_bb->index, else_bb->index,
2048 join_bb->index);
2049 else
2050 fprintf (rtl_dump_file,
2051 "\nIF-THEN block found, start %d, then %d, join %d\n",
2052 test_bb->index, then_bb->index, join_bb->index);
2053 }
2054
2055 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
2056 get the first condition for free, since we've already asserted that
2057 there's a fallthru edge from IF to THEN. */
2058 /* ??? As an enhancement, move the ELSE block. Have to deal with
2059 BLOCK notes, if by no other means than aborting the merge if they
2060 exist. Sticky enough I don't want to think about it now. */
2061 next = then_bb;
2062 if (else_bb && (next = next->next_bb) != else_bb)
2063 return FALSE;
2064 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2065 {
2066 if (else_bb)
2067 join_bb = NULL;
2068 else
2069 return FALSE;
2070 }
2071
2072 /* Do the real work. */
2073 return process_if_block (test_bb, then_bb, else_bb, join_bb);
2074 }
2075
2076 /* Convert a branch over a trap, or a branch to a trap,
2077 into a conditional trap. */
2078
2079 static int
2080 find_cond_trap (test_bb, then_edge, else_edge)
2081 basic_block test_bb;
2082 edge then_edge, else_edge;
2083 {
2084 basic_block then_bb, else_bb, trap_bb, other_bb;
2085 rtx trap, jump, cond, cond_earliest, seq;
2086 enum rtx_code code;
2087
2088 then_bb = then_edge->dest;
2089 else_bb = else_edge->dest;
2090
2091 /* Locate the block with the trap instruction. */
2092 /* ??? While we look for no successors, we really ought to allow
2093 EH successors. Need to fix merge_if_block for that to work. */
2094 if ((trap = block_has_only_trap (then_bb)) != NULL)
2095 trap_bb = then_bb, other_bb = else_bb;
2096 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2097 trap_bb = else_bb, other_bb = then_bb;
2098 else
2099 return FALSE;
2100
2101 if (rtl_dump_file)
2102 {
2103 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2104 test_bb->index, trap_bb->index);
2105 }
2106
2107 /* If this is not a standard conditional jump, we can't parse it. */
2108 jump = test_bb->end;
2109 cond = noce_get_condition (jump, &cond_earliest);
2110 if (! cond)
2111 return FALSE;
2112
2113 /* If the conditional jump is more than just a conditional jump,
2114 then we can not do if-conversion on this block. */
2115 if (! onlyjump_p (jump))
2116 return FALSE;
2117
2118 /* We must be comparing objects whose modes imply the size. */
2119 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2120 return FALSE;
2121
2122 /* Reverse the comparison code, if necessary. */
2123 code = GET_CODE (cond);
2124 if (then_bb == trap_bb)
2125 {
2126 code = reversed_comparison_code (cond, jump);
2127 if (code == UNKNOWN)
2128 return FALSE;
2129 }
2130
2131 /* Attempt to generate the conditional trap. */
2132 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2133 TRAP_CODE (PATTERN (trap)));
2134 if (seq == NULL)
2135 return FALSE;
2136
2137 /* Emit the new insns before cond_earliest. */
2138 emit_insn_before_scope (seq, cond_earliest, INSN_SCOPE (trap));
2139
2140 /* Delete the trap block if possible. */
2141 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2142 if (trap_bb->pred == NULL)
2143 {
2144 if (post_dominators)
2145 delete_from_dominance_info (post_dominators, trap_bb);
2146 flow_delete_block (trap_bb);
2147 num_removed_blocks++;
2148 }
2149
2150 /* If the non-trap block and the test are now adjacent, merge them.
2151 Otherwise we must insert a direct branch. */
2152 if (test_bb->next_bb == other_bb)
2153 {
2154 delete_insn (jump);
2155 merge_if_block (test_bb, NULL, NULL, other_bb);
2156 }
2157 else
2158 {
2159 rtx lab, newjump;
2160
2161 lab = JUMP_LABEL (jump);
2162 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2163 LABEL_NUSES (lab) += 1;
2164 JUMP_LABEL (newjump) = lab;
2165 emit_barrier_after (newjump);
2166
2167 delete_insn (jump);
2168 }
2169
2170 return TRUE;
2171 }
2172
2173 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2174 return it. */
2175
2176 static rtx
2177 block_has_only_trap (bb)
2178 basic_block bb;
2179 {
2180 rtx trap;
2181
2182 /* We're not the exit block. */
2183 if (bb == EXIT_BLOCK_PTR)
2184 return NULL_RTX;
2185
2186 /* The block must have no successors. */
2187 if (bb->succ)
2188 return NULL_RTX;
2189
2190 /* The only instruction in the THEN block must be the trap. */
2191 trap = first_active_insn (bb);
2192 if (! (trap == bb->end
2193 && GET_CODE (PATTERN (trap)) == TRAP_IF
2194 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2195 return NULL_RTX;
2196
2197 return trap;
2198 }
2199
2200 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2201 transformable, but not necessarily the other. There need be no
2202 JOIN block.
2203
2204 Return TRUE if we were successful at converting the the block.
2205
2206 Cases we'd like to look at:
2207
2208 (1)
2209 if (test) goto over; // x not live
2210 x = a;
2211 goto label;
2212 over:
2213
2214 becomes
2215
2216 x = a;
2217 if (! test) goto label;
2218
2219 (2)
2220 if (test) goto E; // x not live
2221 x = big();
2222 goto L;
2223 E:
2224 x = b;
2225 goto M;
2226
2227 becomes
2228
2229 x = b;
2230 if (test) goto M;
2231 x = big();
2232 goto L;
2233
2234 (3) // This one's really only interesting for targets that can do
2235 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2236 // it results in multiple branches on a cache line, which often
2237 // does not sit well with predictors.
2238
2239 if (test1) goto E; // predicted not taken
2240 x = a;
2241 if (test2) goto F;
2242 ...
2243 E:
2244 x = b;
2245 J:
2246
2247 becomes
2248
2249 x = a;
2250 if (test1) goto E;
2251 if (test2) goto F;
2252
2253 Notes:
2254
2255 (A) Don't do (2) if the branch is predicted against the block we're
2256 eliminating. Do it anyway if we can eliminate a branch; this requires
2257 that the sole successor of the eliminated block postdominate the other
2258 side of the if.
2259
2260 (B) With CE, on (3) we can steal from both sides of the if, creating
2261
2262 if (test1) x = a;
2263 if (!test1) x = b;
2264 if (test1) goto J;
2265 if (test2) goto F;
2266 ...
2267 J:
2268
2269 Again, this is most useful if J postdominates.
2270
2271 (C) CE substitutes for helpful life information.
2272
2273 (D) These heuristics need a lot of work. */
2274
2275 /* Tests for case 1 above. */
2276
2277 static int
2278 find_if_case_1 (test_bb, then_edge, else_edge)
2279 basic_block test_bb;
2280 edge then_edge, else_edge;
2281 {
2282 basic_block then_bb = then_edge->dest;
2283 basic_block else_bb = else_edge->dest, new_bb;
2284 edge then_succ = then_bb->succ;
2285 int then_bb_index;
2286
2287 /* THEN has one successor. */
2288 if (!then_succ || then_succ->succ_next != NULL)
2289 return FALSE;
2290
2291 /* THEN does not fall through, but is not strange either. */
2292 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2293 return FALSE;
2294
2295 /* THEN has one predecessor. */
2296 if (then_bb->pred->pred_next != NULL)
2297 return FALSE;
2298
2299 /* THEN must do something. */
2300 if (forwarder_block_p (then_bb))
2301 return FALSE;
2302
2303 num_possible_if_blocks++;
2304 if (rtl_dump_file)
2305 fprintf (rtl_dump_file,
2306 "\nIF-CASE-1 found, start %d, then %d\n",
2307 test_bb->index, then_bb->index);
2308
2309 /* THEN is small. */
2310 if (count_bb_insns (then_bb) > BRANCH_COST)
2311 return FALSE;
2312
2313 /* Registers set are dead, or are predicable. */
2314 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2315 then_bb->succ->dest, 1))
2316 return FALSE;
2317
2318 /* Conversion went ok, including moving the insns and fixing up the
2319 jump. Adjust the CFG to match. */
2320
2321 bitmap_operation (test_bb->global_live_at_end,
2322 else_bb->global_live_at_start,
2323 then_bb->global_live_at_end, BITMAP_IOR);
2324
2325 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2326 then_bb_index = then_bb->index;
2327 if (post_dominators)
2328 delete_from_dominance_info (post_dominators, then_bb);
2329 flow_delete_block (then_bb);
2330 /* Make rest of code believe that the newly created block is the THEN_BB
2331 block we removed. */
2332 if (new_bb)
2333 {
2334 new_bb->index = then_bb_index;
2335 BASIC_BLOCK (then_bb_index) = new_bb;
2336 }
2337 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2338 later. */
2339
2340 num_removed_blocks++;
2341 num_updated_if_blocks++;
2342
2343 return TRUE;
2344 }
2345
2346 /* Test for case 2 above. */
2347
2348 static int
2349 find_if_case_2 (test_bb, then_edge, else_edge)
2350 basic_block test_bb;
2351 edge then_edge, else_edge;
2352 {
2353 basic_block then_bb = then_edge->dest;
2354 basic_block else_bb = else_edge->dest;
2355 edge else_succ = else_bb->succ;
2356 rtx note;
2357
2358 /* ELSE has one successor. */
2359 if (!else_succ || else_succ->succ_next != NULL)
2360 return FALSE;
2361
2362 /* ELSE outgoing edge is not complex. */
2363 if (else_succ->flags & EDGE_COMPLEX)
2364 return FALSE;
2365
2366 /* ELSE has one predecessor. */
2367 if (else_bb->pred->pred_next != NULL)
2368 return FALSE;
2369
2370 /* THEN is not EXIT. */
2371 if (then_bb->index < 0)
2372 return FALSE;
2373
2374 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2375 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2376 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2377 ;
2378 else if (else_succ->dest->index < 0
2379 || dominated_by_p (post_dominators, then_bb,
2380 else_succ->dest))
2381 ;
2382 else
2383 return FALSE;
2384
2385 num_possible_if_blocks++;
2386 if (rtl_dump_file)
2387 fprintf (rtl_dump_file,
2388 "\nIF-CASE-2 found, start %d, else %d\n",
2389 test_bb->index, else_bb->index);
2390
2391 /* ELSE is small. */
2392 if (count_bb_insns (then_bb) > BRANCH_COST)
2393 return FALSE;
2394
2395 /* Registers set are dead, or are predicable. */
2396 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2397 return FALSE;
2398
2399 /* Conversion went ok, including moving the insns and fixing up the
2400 jump. Adjust the CFG to match. */
2401
2402 bitmap_operation (test_bb->global_live_at_end,
2403 then_bb->global_live_at_start,
2404 else_bb->global_live_at_end, BITMAP_IOR);
2405
2406 if (post_dominators)
2407 delete_from_dominance_info (post_dominators, else_bb);
2408 flow_delete_block (else_bb);
2409
2410 num_removed_blocks++;
2411 num_updated_if_blocks++;
2412
2413 /* ??? We may now fallthru from one of THEN's successors into a join
2414 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2415
2416 return TRUE;
2417 }
2418
2419 /* A subroutine of dead_or_predicable called through for_each_rtx.
2420 Return 1 if a memory is found. */
2421
2422 static int
2423 find_memory (px, data)
2424 rtx *px;
2425 void *data ATTRIBUTE_UNUSED;
2426 {
2427 return GET_CODE (*px) == MEM;
2428 }
2429
2430 /* Used by the code above to perform the actual rtl transformations.
2431 Return TRUE if successful.
2432
2433 TEST_BB is the block containing the conditional branch. MERGE_BB
2434 is the block containing the code to manipulate. NEW_DEST is the
2435 label TEST_BB should be branching to after the conversion.
2436 REVERSEP is true if the sense of the branch should be reversed. */
2437
2438 static int
2439 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2440 basic_block test_bb, merge_bb, other_bb;
2441 basic_block new_dest;
2442 int reversep;
2443 {
2444 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2445
2446 jump = test_bb->end;
2447
2448 /* Find the extent of the real code in the merge block. */
2449 head = merge_bb->head;
2450 end = merge_bb->end;
2451
2452 if (GET_CODE (head) == CODE_LABEL)
2453 head = NEXT_INSN (head);
2454 if (GET_CODE (head) == NOTE)
2455 {
2456 if (head == end)
2457 {
2458 head = end = NULL_RTX;
2459 goto no_body;
2460 }
2461 head = NEXT_INSN (head);
2462 }
2463
2464 if (GET_CODE (end) == JUMP_INSN)
2465 {
2466 if (head == end)
2467 {
2468 head = end = NULL_RTX;
2469 goto no_body;
2470 }
2471 end = PREV_INSN (end);
2472 }
2473
2474 /* Disable handling dead code by conditional execution if the machine needs
2475 to do anything funny with the tests, etc. */
2476 #ifndef IFCVT_MODIFY_TESTS
2477 if (HAVE_conditional_execution)
2478 {
2479 /* In the conditional execution case, we have things easy. We know
2480 the condition is reversable. We don't have to check life info,
2481 becase we're going to conditionally execute the code anyway.
2482 All that's left is making sure the insns involved can actually
2483 be predicated. */
2484
2485 rtx cond, prob_val;
2486
2487 cond = cond_exec_get_condition (jump);
2488 if (! cond)
2489 return FALSE;
2490
2491 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2492 if (prob_val)
2493 prob_val = XEXP (prob_val, 0);
2494
2495 if (reversep)
2496 {
2497 enum rtx_code rev = reversed_comparison_code (cond, jump);
2498 if (rev == UNKNOWN)
2499 return FALSE;
2500 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2501 XEXP (cond, 1));
2502 if (prob_val)
2503 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2504 }
2505
2506 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2507 goto cancel;
2508
2509 earliest = jump;
2510 }
2511 else
2512 #endif
2513 {
2514 /* In the non-conditional execution case, we have to verify that there
2515 are no trapping operations, no calls, no references to memory, and
2516 that any registers modified are dead at the branch site. */
2517
2518 rtx insn, cond, prev;
2519 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2520 regset merge_set, tmp, test_live, test_set;
2521 struct propagate_block_info *pbi;
2522 int i, fail = 0;
2523
2524 /* Check for no calls or trapping operations. */
2525 for (insn = head; ; insn = NEXT_INSN (insn))
2526 {
2527 if (GET_CODE (insn) == CALL_INSN)
2528 return FALSE;
2529 if (INSN_P (insn))
2530 {
2531 if (may_trap_p (PATTERN (insn)))
2532 return FALSE;
2533
2534 /* ??? Even non-trapping memories such as stack frame
2535 references must be avoided. For stores, we collect
2536 no lifetime info; for reads, we'd have to assert
2537 true_dependence false against every store in the
2538 TEST range. */
2539 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2540 return FALSE;
2541 }
2542 if (insn == end)
2543 break;
2544 }
2545
2546 if (! any_condjump_p (jump))
2547 return FALSE;
2548
2549 /* Find the extent of the conditional. */
2550 cond = noce_get_condition (jump, &earliest);
2551 if (! cond)
2552 return FALSE;
2553
2554 /* Collect:
2555 MERGE_SET = set of registers set in MERGE_BB
2556 TEST_LIVE = set of registers live at EARLIEST
2557 TEST_SET = set of registers set between EARLIEST and the
2558 end of the block. */
2559
2560 tmp = INITIALIZE_REG_SET (tmp_head);
2561 merge_set = INITIALIZE_REG_SET (merge_set_head);
2562 test_live = INITIALIZE_REG_SET (test_live_head);
2563 test_set = INITIALIZE_REG_SET (test_set_head);
2564
2565 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2566 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2567 since we've already asserted that MERGE_BB is small. */
2568 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2569
2570 /* For small register class machines, don't lengthen lifetimes of
2571 hard registers before reload. */
2572 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2573 {
2574 EXECUTE_IF_SET_IN_BITMAP
2575 (merge_set, 0, i,
2576 {
2577 if (i < FIRST_PSEUDO_REGISTER
2578 && ! fixed_regs[i]
2579 && ! global_regs[i])
2580 fail = 1;
2581 });
2582 }
2583
2584 /* For TEST, we're interested in a range of insns, not a whole block.
2585 Moreover, we're interested in the insns live from OTHER_BB. */
2586
2587 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2588 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2589 0);
2590
2591 for (insn = jump; ; insn = prev)
2592 {
2593 prev = propagate_one_insn (pbi, insn);
2594 if (insn == earliest)
2595 break;
2596 }
2597
2598 free_propagate_block_info (pbi);
2599
2600 /* We can perform the transformation if
2601 MERGE_SET & (TEST_SET | TEST_LIVE)
2602 and
2603 TEST_SET & merge_bb->global_live_at_start
2604 are empty. */
2605
2606 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2607 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2608 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2609
2610 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2611 BITMAP_AND);
2612 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2613
2614 FREE_REG_SET (tmp);
2615 FREE_REG_SET (merge_set);
2616 FREE_REG_SET (test_live);
2617 FREE_REG_SET (test_set);
2618
2619 if (fail)
2620 return FALSE;
2621 }
2622
2623 no_body:
2624 /* We don't want to use normal invert_jump or redirect_jump because
2625 we don't want to delete_insn called. Also, we want to do our own
2626 change group management. */
2627
2628 old_dest = JUMP_LABEL (jump);
2629 if (other_bb != new_dest)
2630 {
2631 new_label = block_label (new_dest);
2632 if (reversep
2633 ? ! invert_jump_1 (jump, new_label)
2634 : ! redirect_jump_1 (jump, new_label))
2635 goto cancel;
2636 }
2637
2638 if (! apply_change_group ())
2639 return FALSE;
2640
2641 if (other_bb != new_dest)
2642 {
2643 if (old_dest)
2644 LABEL_NUSES (old_dest) -= 1;
2645 if (new_label)
2646 LABEL_NUSES (new_label) += 1;
2647 JUMP_LABEL (jump) = new_label;
2648 if (reversep)
2649 invert_br_probabilities (jump);
2650
2651 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
2652 if (reversep)
2653 {
2654 gcov_type count, probability;
2655 count = BRANCH_EDGE (test_bb)->count;
2656 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
2657 FALLTHRU_EDGE (test_bb)->count = count;
2658 probability = BRANCH_EDGE (test_bb)->probability;
2659 BRANCH_EDGE (test_bb)->probability
2660 = FALLTHRU_EDGE (test_bb)->probability;
2661 FALLTHRU_EDGE (test_bb)->probability = probability;
2662 update_br_prob_note (test_bb);
2663 }
2664 }
2665
2666 /* Move the insns out of MERGE_BB to before the branch. */
2667 if (head != NULL)
2668 {
2669 if (end == merge_bb->end)
2670 merge_bb->end = PREV_INSN (head);
2671
2672 if (squeeze_notes (&head, &end))
2673 return TRUE;
2674
2675 reorder_insns (head, end, PREV_INSN (earliest));
2676 }
2677
2678 /* Remove the jump and edge if we can. */
2679 if (other_bb == new_dest)
2680 {
2681 delete_insn (jump);
2682 remove_edge (BRANCH_EDGE (test_bb));
2683 /* ??? Can't merge blocks here, as then_bb is still in use.
2684 At minimum, the merge will get done just before bb-reorder. */
2685 }
2686
2687 return TRUE;
2688
2689 cancel:
2690 cancel_changes (0);
2691 return FALSE;
2692 }
2693 \f
2694 /* Main entry point for all if-conversion. */
2695
2696 void
2697 if_convert (x_life_data_ok)
2698 int x_life_data_ok;
2699 {
2700 basic_block bb;
2701
2702 num_possible_if_blocks = 0;
2703 num_updated_if_blocks = 0;
2704 num_removed_blocks = 0;
2705 life_data_ok = (x_life_data_ok != 0);
2706
2707 /* Free up basic_block_for_insn so that we don't have to keep it
2708 up to date, either here or in merge_blocks_nomove. */
2709 free_basic_block_vars (1);
2710
2711 /* Compute postdominators if we think we'll use them. */
2712 post_dominators = NULL;
2713 if (HAVE_conditional_execution || life_data_ok)
2714 {
2715 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
2716 }
2717 if (life_data_ok)
2718 clear_bb_flags ();
2719
2720 /* Go through each of the basic blocks looking for things to convert. */
2721 FOR_EACH_BB (bb)
2722 while (find_if_header (bb))
2723 continue;
2724
2725 if (post_dominators)
2726 free_dominance_info (post_dominators);
2727
2728 if (rtl_dump_file)
2729 fflush (rtl_dump_file);
2730
2731 clear_aux_for_blocks ();
2732
2733 /* Rebuild life info for basic blocks that require it. */
2734 if (num_removed_blocks && life_data_ok)
2735 {
2736 /* If we allocated new pseudos, we must resize the array for sched1. */
2737 if (max_regno < max_reg_num ())
2738 {
2739 max_regno = max_reg_num ();
2740 allocate_reg_info (max_regno, FALSE, FALSE);
2741 }
2742 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
2743 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2744 | PROP_KILL_DEAD_CODE);
2745 }
2746
2747 /* Write the final stats. */
2748 if (rtl_dump_file && num_possible_if_blocks > 0)
2749 {
2750 fprintf (rtl_dump_file,
2751 "\n%d possible IF blocks searched.\n",
2752 num_possible_if_blocks);
2753 fprintf (rtl_dump_file,
2754 "%d IF blocks converted.\n",
2755 num_updated_if_blocks);
2756 fprintf (rtl_dump_file,
2757 "%d basic blocks deleted.\n\n\n",
2758 num_removed_blocks);
2759 }
2760
2761 #ifdef ENABLE_CHECKING
2762 verify_flow_info ();
2763 #endif
2764 }