bb-reorder.c (make_reorder_chain, [...]): Use FOR_EACH_BB macros to iterate over...
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "except.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "expr.h"
34 #include "real.h"
35 #include "output.h"
36 #include "toplev.h"
37 #include "tm_p.h"
38
39
40 #ifndef HAVE_conditional_execution
41 #define HAVE_conditional_execution 0
42 #endif
43 #ifndef HAVE_conditional_move
44 #define HAVE_conditional_move 0
45 #endif
46 #ifndef HAVE_incscc
47 #define HAVE_incscc 0
48 #endif
49 #ifndef HAVE_decscc
50 #define HAVE_decscc 0
51 #endif
52 #ifndef HAVE_trap
53 #define HAVE_trap 0
54 #endif
55 #ifndef HAVE_conditional_trap
56 #define HAVE_conditional_trap 0
57 #endif
58
59 #ifndef MAX_CONDITIONAL_EXECUTE
60 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
61 #endif
62
63 #define NULL_EDGE ((struct edge_def *)NULL)
64 #define NULL_BLOCK ((struct basic_block_def *)NULL)
65
66 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
67 static int num_possible_if_blocks;
68
69 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
70 execution. */
71 static int num_updated_if_blocks;
72
73 /* # of basic blocks that were removed. */
74 static int num_removed_blocks;
75
76 /* True if life data ok at present. */
77 static bool life_data_ok;
78
79 /* The post-dominator relation on the original block numbers. */
80 static sbitmap *post_dominators;
81
82 /* Forward references. */
83 static int count_bb_insns PARAMS ((basic_block));
84 static rtx first_active_insn PARAMS ((basic_block));
85 static int last_active_insn_p PARAMS ((basic_block, rtx));
86 static int seq_contains_jump PARAMS ((rtx));
87
88 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
89 static rtx cond_exec_get_condition PARAMS ((rtx));
90 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
91 basic_block, basic_block));
92
93 static rtx noce_get_condition PARAMS ((rtx, rtx *));
94 static int noce_operand_ok PARAMS ((rtx));
95 static int noce_process_if_block PARAMS ((basic_block, basic_block,
96 basic_block, basic_block));
97
98 static int process_if_block PARAMS ((basic_block, basic_block,
99 basic_block, basic_block));
100 static void merge_if_block PARAMS ((basic_block, basic_block,
101 basic_block, basic_block));
102
103 static int find_if_header PARAMS ((basic_block));
104 static int find_if_block PARAMS ((basic_block, edge, edge));
105 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
106 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
107 static int find_cond_trap PARAMS ((basic_block, edge, edge));
108 static rtx block_has_only_trap PARAMS ((basic_block));
109 static int find_memory PARAMS ((rtx *, void *));
110 static int dead_or_predicable PARAMS ((basic_block, basic_block,
111 basic_block, basic_block, int));
112 static void noce_emit_move_insn PARAMS ((rtx, rtx));
113 \f
114 /* Abuse the basic_block AUX field to store the original block index,
115 as well as a flag indicating that the block should be rescaned for
116 life analysis. */
117
118 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I)))
119 #define ORIG_INDEX(BB) ((size_t)(BB)->aux)
120
121 \f
122 /* Count the number of non-jump active insns in BB. */
123
124 static int
125 count_bb_insns (bb)
126 basic_block bb;
127 {
128 int count = 0;
129 rtx insn = bb->head;
130
131 while (1)
132 {
133 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
134 count++;
135
136 if (insn == bb->end)
137 break;
138 insn = NEXT_INSN (insn);
139 }
140
141 return count;
142 }
143
144 /* Return the first non-jump active insn in the basic block. */
145
146 static rtx
147 first_active_insn (bb)
148 basic_block bb;
149 {
150 rtx insn = bb->head;
151
152 if (GET_CODE (insn) == CODE_LABEL)
153 {
154 if (insn == bb->end)
155 return NULL_RTX;
156 insn = NEXT_INSN (insn);
157 }
158
159 while (GET_CODE (insn) == NOTE)
160 {
161 if (insn == bb->end)
162 return NULL_RTX;
163 insn = NEXT_INSN (insn);
164 }
165
166 if (GET_CODE (insn) == JUMP_INSN)
167 return NULL_RTX;
168
169 return insn;
170 }
171
172 /* Return true if INSN is the last active non-jump insn in BB. */
173
174 static int
175 last_active_insn_p (bb, insn)
176 basic_block bb;
177 rtx insn;
178 {
179 do
180 {
181 if (insn == bb->end)
182 return TRUE;
183 insn = NEXT_INSN (insn);
184 }
185 while (GET_CODE (insn) == NOTE);
186
187 return GET_CODE (insn) == JUMP_INSN;
188 }
189
190 /* It is possible, especially when having dealt with multi-word
191 arithmetic, for the expanders to have emitted jumps. Search
192 through the sequence and return TRUE if a jump exists so that
193 we can abort the conversion. */
194
195 static int
196 seq_contains_jump (insn)
197 rtx insn;
198 {
199 while (insn)
200 {
201 if (GET_CODE (insn) == JUMP_INSN)
202 return 1;
203 insn = NEXT_INSN (insn);
204 }
205 return 0;
206 }
207 \f
208 /* Go through a bunch of insns, converting them to conditional
209 execution format if possible. Return TRUE if all of the non-note
210 insns were processed. */
211
212 static int
213 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
214 rtx start; /* first insn to look at */
215 rtx end; /* last insn to look at */
216 rtx test; /* conditional execution test */
217 rtx prob_val; /* probability of branch taken. */
218 int mod_ok; /* true if modifications ok last insn. */
219 {
220 int must_be_last = FALSE;
221 rtx insn;
222 rtx pattern;
223
224 for (insn = start; ; insn = NEXT_INSN (insn))
225 {
226 if (GET_CODE (insn) == NOTE)
227 goto insn_done;
228
229 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
230 abort ();
231
232 /* Remove USE insns that get in the way. */
233 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
234 {
235 /* ??? Ug. Actually unlinking the thing is problematic,
236 given what we'd have to coordinate with our callers. */
237 PUT_CODE (insn, NOTE);
238 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
239 NOTE_SOURCE_FILE (insn) = 0;
240 goto insn_done;
241 }
242
243 /* Last insn wasn't last? */
244 if (must_be_last)
245 return FALSE;
246
247 if (modified_in_p (test, insn))
248 {
249 if (!mod_ok)
250 return FALSE;
251 must_be_last = TRUE;
252 }
253
254 /* Now build the conditional form of the instruction. */
255 pattern = PATTERN (insn);
256
257 /* If the machine needs to modify the insn being conditionally executed,
258 say for example to force a constant integer operand into a temp
259 register, do so here. */
260 #ifdef IFCVT_MODIFY_INSN
261 IFCVT_MODIFY_INSN (pattern, insn);
262 if (! pattern)
263 return FALSE;
264 #endif
265
266 validate_change (insn, &PATTERN (insn),
267 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
268 pattern), 1);
269
270 if (GET_CODE (insn) == CALL_INSN && prob_val)
271 validate_change (insn, &REG_NOTES (insn),
272 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
273 REG_NOTES (insn)), 1);
274
275 insn_done:
276 if (insn == end)
277 break;
278 }
279
280 return TRUE;
281 }
282
283 /* Return the condition for a jump. Do not do any special processing. */
284
285 static rtx
286 cond_exec_get_condition (jump)
287 rtx jump;
288 {
289 rtx test_if, cond;
290
291 if (any_condjump_p (jump))
292 test_if = SET_SRC (pc_set (jump));
293 else
294 return NULL_RTX;
295 cond = XEXP (test_if, 0);
296
297 /* If this branches to JUMP_LABEL when the condition is false,
298 reverse the condition. */
299 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
300 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
301 {
302 enum rtx_code rev = reversed_comparison_code (cond, jump);
303 if (rev == UNKNOWN)
304 return NULL_RTX;
305
306 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
307 XEXP (cond, 1));
308 }
309
310 return cond;
311 }
312
313 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
314 to conditional execution. Return TRUE if we were successful at
315 converting the the block. */
316
317 static int
318 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
319 basic_block test_bb; /* Basic block test is in */
320 basic_block then_bb; /* Basic block for THEN block */
321 basic_block else_bb; /* Basic block for ELSE block */
322 basic_block join_bb; /* Basic block the join label is in */
323 {
324 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
325 rtx then_start; /* first insn in THEN block */
326 rtx then_end; /* last insn + 1 in THEN block */
327 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
328 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
329 int max; /* max # of insns to convert. */
330 int then_mod_ok; /* whether conditional mods are ok in THEN */
331 rtx true_expr; /* test for else block insns */
332 rtx false_expr; /* test for then block insns */
333 rtx true_prob_val; /* probability of else block */
334 rtx false_prob_val; /* probability of then block */
335 int n_insns;
336 enum rtx_code false_code;
337
338 /* Find the conditional jump to the ELSE or JOIN part, and isolate
339 the test. */
340 test_expr = cond_exec_get_condition (test_bb->end);
341 if (! test_expr)
342 return FALSE;
343
344 /* If the conditional jump is more than just a conditional jump,
345 then we can not do conditional execution conversion on this block. */
346 if (!onlyjump_p (test_bb->end))
347 return FALSE;
348
349 /* Collect the bounds of where we're to search. */
350
351 then_start = then_bb->head;
352 then_end = then_bb->end;
353
354 /* Skip a label heading THEN block. */
355 if (GET_CODE (then_start) == CODE_LABEL)
356 then_start = NEXT_INSN (then_start);
357
358 /* Skip a (use (const_int 0)) or branch as the final insn. */
359 if (GET_CODE (then_end) == INSN
360 && GET_CODE (PATTERN (then_end)) == USE
361 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
362 then_end = PREV_INSN (then_end);
363 else if (GET_CODE (then_end) == JUMP_INSN)
364 then_end = PREV_INSN (then_end);
365
366 if (else_bb)
367 {
368 /* Skip the ELSE block's label. */
369 else_start = NEXT_INSN (else_bb->head);
370 else_end = else_bb->end;
371
372 /* Skip a (use (const_int 0)) or branch as the final insn. */
373 if (GET_CODE (else_end) == INSN
374 && GET_CODE (PATTERN (else_end)) == USE
375 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
376 else_end = PREV_INSN (else_end);
377 else if (GET_CODE (else_end) == JUMP_INSN)
378 else_end = PREV_INSN (else_end);
379 }
380
381 /* How many instructions should we convert in total? */
382 n_insns = 0;
383 if (else_bb)
384 {
385 max = 2 * MAX_CONDITIONAL_EXECUTE;
386 n_insns = count_bb_insns (else_bb);
387 }
388 else
389 max = MAX_CONDITIONAL_EXECUTE;
390 n_insns += count_bb_insns (then_bb);
391 if (n_insns > max)
392 return FALSE;
393
394 /* Map test_expr/test_jump into the appropriate MD tests to use on
395 the conditionally executed code. */
396
397 true_expr = test_expr;
398
399 false_code = reversed_comparison_code (true_expr, test_bb->end);
400 if (false_code != UNKNOWN)
401 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
402 XEXP (true_expr, 0), XEXP (true_expr, 1));
403 else
404 false_expr = NULL_RTX;
405
406 #ifdef IFCVT_MODIFY_TESTS
407 /* If the machine description needs to modify the tests, such as setting a
408 conditional execution register from a comparison, it can do so here. */
409 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
410 join_bb);
411
412 /* See if the conversion failed */
413 if (!true_expr || !false_expr)
414 goto fail;
415 #endif
416
417 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
418 if (true_prob_val)
419 {
420 true_prob_val = XEXP (true_prob_val, 0);
421 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
422 }
423 else
424 false_prob_val = NULL_RTX;
425
426 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
427 on then THEN block. */
428 then_mod_ok = (else_bb == NULL_BLOCK);
429
430 /* Go through the THEN and ELSE blocks converting the insns if possible
431 to conditional execution. */
432
433 if (then_end
434 && (! false_expr
435 || ! cond_exec_process_insns (then_start, then_end, false_expr,
436 false_prob_val, then_mod_ok)))
437 goto fail;
438
439 if (else_bb
440 && ! cond_exec_process_insns (else_start, else_end,
441 true_expr, true_prob_val, TRUE))
442 goto fail;
443
444 if (! apply_change_group ())
445 return FALSE;
446
447 #ifdef IFCVT_MODIFY_FINAL
448 /* Do any machine dependent final modifications */
449 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
450 #endif
451
452 /* Conversion succeeded. */
453 if (rtl_dump_file)
454 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
455 n_insns, (n_insns == 1) ? " was" : "s were");
456
457 /* Merge the blocks! */
458 merge_if_block (test_bb, then_bb, else_bb, join_bb);
459 return TRUE;
460
461 fail:
462 #ifdef IFCVT_MODIFY_CANCEL
463 /* Cancel any machine dependent changes. */
464 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
465 #endif
466
467 cancel_changes (0);
468 return FALSE;
469 }
470 \f
471 /* Used by noce_process_if_block to communicate with its subroutines.
472
473 The subroutines know that A and B may be evaluated freely. They
474 know that X is a register. They should insert new instructions
475 before cond_earliest. */
476
477 struct noce_if_info
478 {
479 basic_block test_bb;
480 rtx insn_a, insn_b;
481 rtx x, a, b;
482 rtx jump, cond, cond_earliest;
483 };
484
485 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
486 rtx, int, int));
487 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
488 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
489 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
490 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
491 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
492 rtx, enum rtx_code, rtx,
493 rtx, rtx, rtx));
494 static int noce_try_cmove PARAMS ((struct noce_if_info *));
495 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
496 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
497 rtx, rtx *));
498 static int noce_try_minmax PARAMS ((struct noce_if_info *));
499 static int noce_try_abs PARAMS ((struct noce_if_info *));
500
501 /* Helper function for noce_try_store_flag*. */
502
503 static rtx
504 noce_emit_store_flag (if_info, x, reversep, normalize)
505 struct noce_if_info *if_info;
506 rtx x;
507 int reversep, normalize;
508 {
509 rtx cond = if_info->cond;
510 int cond_complex;
511 enum rtx_code code;
512
513 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
514 || ! general_operand (XEXP (cond, 1), VOIDmode));
515
516 /* If earliest == jump, or when the condition is complex, try to
517 build the store_flag insn directly. */
518
519 if (cond_complex)
520 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
521
522 if (reversep)
523 code = reversed_comparison_code (cond, if_info->jump);
524 else
525 code = GET_CODE (cond);
526
527 if ((if_info->cond_earliest == if_info->jump || cond_complex)
528 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
529 {
530 rtx tmp;
531
532 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
533 XEXP (cond, 1));
534 tmp = gen_rtx_SET (VOIDmode, x, tmp);
535
536 start_sequence ();
537 tmp = emit_insn (tmp);
538
539 if (recog_memoized (tmp) >= 0)
540 {
541 tmp = get_insns ();
542 end_sequence ();
543 emit_insns (tmp);
544
545 if_info->cond_earliest = if_info->jump;
546
547 return x;
548 }
549
550 end_sequence ();
551 }
552
553 /* Don't even try if the comparison operands are weird. */
554 if (cond_complex)
555 return NULL_RTX;
556
557 return emit_store_flag (x, code, XEXP (cond, 0),
558 XEXP (cond, 1), VOIDmode,
559 (code == LTU || code == LEU
560 || code == GEU || code == GTU), normalize);
561 }
562
563 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
564 static void
565 noce_emit_move_insn (x, y)
566 rtx x, y;
567 {
568 enum machine_mode outmode, inmode;
569 rtx outer, inner;
570 int bitpos;
571
572 if (GET_CODE (x) != STRICT_LOW_PART)
573 {
574 emit_move_insn (x, y);
575 return;
576 }
577
578 outer = XEXP (x, 0);
579 inner = XEXP (outer, 0);
580 outmode = GET_MODE (outer);
581 inmode = GET_MODE (inner);
582 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
583 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
584 GET_MODE_BITSIZE (inmode));
585 }
586
587 /* Convert "if (test) x = 1; else x = 0".
588
589 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
590 tried in noce_try_store_flag_constants after noce_try_cmove has had
591 a go at the conversion. */
592
593 static int
594 noce_try_store_flag (if_info)
595 struct noce_if_info *if_info;
596 {
597 int reversep;
598 rtx target, seq;
599
600 if (GET_CODE (if_info->b) == CONST_INT
601 && INTVAL (if_info->b) == STORE_FLAG_VALUE
602 && if_info->a == const0_rtx)
603 reversep = 0;
604 else if (if_info->b == const0_rtx
605 && GET_CODE (if_info->a) == CONST_INT
606 && INTVAL (if_info->a) == STORE_FLAG_VALUE
607 && (reversed_comparison_code (if_info->cond, if_info->jump)
608 != UNKNOWN))
609 reversep = 1;
610 else
611 return FALSE;
612
613 start_sequence ();
614
615 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
616 if (target)
617 {
618 if (target != if_info->x)
619 noce_emit_move_insn (if_info->x, target);
620
621 seq = get_insns ();
622 end_sequence ();
623 emit_insns_before (seq, if_info->jump);
624
625 return TRUE;
626 }
627 else
628 {
629 end_sequence ();
630 return FALSE;
631 }
632 }
633
634 /* Convert "if (test) x = a; else x = b", for A and B constant. */
635
636 static int
637 noce_try_store_flag_constants (if_info)
638 struct noce_if_info *if_info;
639 {
640 rtx target, seq;
641 int reversep;
642 HOST_WIDE_INT itrue, ifalse, diff, tmp;
643 int normalize, can_reverse;
644 enum machine_mode mode;
645
646 if (! no_new_pseudos
647 && GET_CODE (if_info->a) == CONST_INT
648 && GET_CODE (if_info->b) == CONST_INT)
649 {
650 mode = GET_MODE (if_info->x);
651 ifalse = INTVAL (if_info->a);
652 itrue = INTVAL (if_info->b);
653
654 /* Make sure we can represent the difference between the two values. */
655 if ((itrue - ifalse > 0)
656 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
657 return FALSE;
658
659 diff = trunc_int_for_mode (itrue - ifalse, mode);
660
661 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
662 != UNKNOWN);
663
664 reversep = 0;
665 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
666 normalize = 0;
667 else if (ifalse == 0 && exact_log2 (itrue) >= 0
668 && (STORE_FLAG_VALUE == 1
669 || BRANCH_COST >= 2))
670 normalize = 1;
671 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
672 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
673 normalize = 1, reversep = 1;
674 else if (itrue == -1
675 && (STORE_FLAG_VALUE == -1
676 || BRANCH_COST >= 2))
677 normalize = -1;
678 else if (ifalse == -1 && can_reverse
679 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
680 normalize = -1, reversep = 1;
681 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
682 || BRANCH_COST >= 3)
683 normalize = -1;
684 else
685 return FALSE;
686
687 if (reversep)
688 {
689 tmp = itrue; itrue = ifalse; ifalse = tmp;
690 diff = trunc_int_for_mode (-diff, mode);
691 }
692
693 start_sequence ();
694 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
695 if (! target)
696 {
697 end_sequence ();
698 return FALSE;
699 }
700
701 /* if (test) x = 3; else x = 4;
702 => x = 3 + (test == 0); */
703 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
704 {
705 target = expand_simple_binop (mode,
706 (diff == STORE_FLAG_VALUE
707 ? PLUS : MINUS),
708 GEN_INT (ifalse), target, if_info->x, 0,
709 OPTAB_WIDEN);
710 }
711
712 /* if (test) x = 8; else x = 0;
713 => x = (test != 0) << 3; */
714 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
715 {
716 target = expand_simple_binop (mode, ASHIFT,
717 target, GEN_INT (tmp), if_info->x, 0,
718 OPTAB_WIDEN);
719 }
720
721 /* if (test) x = -1; else x = b;
722 => x = -(test != 0) | b; */
723 else if (itrue == -1)
724 {
725 target = expand_simple_binop (mode, IOR,
726 target, GEN_INT (ifalse), if_info->x, 0,
727 OPTAB_WIDEN);
728 }
729
730 /* if (test) x = a; else x = b;
731 => x = (-(test != 0) & (b - a)) + a; */
732 else
733 {
734 target = expand_simple_binop (mode, AND,
735 target, GEN_INT (diff), if_info->x, 0,
736 OPTAB_WIDEN);
737 if (target)
738 target = expand_simple_binop (mode, PLUS,
739 target, GEN_INT (ifalse),
740 if_info->x, 0, OPTAB_WIDEN);
741 }
742
743 if (! target)
744 {
745 end_sequence ();
746 return FALSE;
747 }
748
749 if (target != if_info->x)
750 noce_emit_move_insn (if_info->x, target);
751
752 seq = get_insns ();
753 end_sequence ();
754
755 if (seq_contains_jump (seq))
756 return FALSE;
757
758 emit_insns_before (seq, if_info->jump);
759
760 return TRUE;
761 }
762
763 return FALSE;
764 }
765
766 /* Convert "if (test) foo++" into "foo += (test != 0)", and
767 similarly for "foo--". */
768
769 static int
770 noce_try_store_flag_inc (if_info)
771 struct noce_if_info *if_info;
772 {
773 rtx target, seq;
774 int subtract, normalize;
775
776 if (! no_new_pseudos
777 && (BRANCH_COST >= 2
778 || HAVE_incscc
779 || HAVE_decscc)
780 /* Should be no `else' case to worry about. */
781 && if_info->b == if_info->x
782 && GET_CODE (if_info->a) == PLUS
783 && (XEXP (if_info->a, 1) == const1_rtx
784 || XEXP (if_info->a, 1) == constm1_rtx)
785 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
786 && (reversed_comparison_code (if_info->cond, if_info->jump)
787 != UNKNOWN))
788 {
789 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
790 subtract = 0, normalize = 0;
791 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
792 subtract = 1, normalize = 0;
793 else
794 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
795
796 start_sequence ();
797
798 target = noce_emit_store_flag (if_info,
799 gen_reg_rtx (GET_MODE (if_info->x)),
800 1, normalize);
801
802 if (target)
803 target = expand_simple_binop (GET_MODE (if_info->x),
804 subtract ? MINUS : PLUS,
805 if_info->x, target, if_info->x,
806 0, OPTAB_WIDEN);
807 if (target)
808 {
809 if (target != if_info->x)
810 noce_emit_move_insn (if_info->x, target);
811
812 seq = get_insns ();
813 end_sequence ();
814
815 if (seq_contains_jump (seq))
816 return FALSE;
817
818 emit_insns_before (seq, if_info->jump);
819
820 return TRUE;
821 }
822
823 end_sequence ();
824 }
825
826 return FALSE;
827 }
828
829 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
830
831 static int
832 noce_try_store_flag_mask (if_info)
833 struct noce_if_info *if_info;
834 {
835 rtx target, seq;
836 int reversep;
837
838 reversep = 0;
839 if (! no_new_pseudos
840 && (BRANCH_COST >= 2
841 || STORE_FLAG_VALUE == -1)
842 && ((if_info->a == const0_rtx
843 && rtx_equal_p (if_info->b, if_info->x))
844 || ((reversep = (reversed_comparison_code (if_info->cond,
845 if_info->jump)
846 != UNKNOWN))
847 && if_info->b == const0_rtx
848 && rtx_equal_p (if_info->a, if_info->x))))
849 {
850 start_sequence ();
851 target = noce_emit_store_flag (if_info,
852 gen_reg_rtx (GET_MODE (if_info->x)),
853 reversep, -1);
854 if (target)
855 target = expand_simple_binop (GET_MODE (if_info->x), AND,
856 if_info->x, target, if_info->x, 0,
857 OPTAB_WIDEN);
858
859 if (target)
860 {
861 if (target != if_info->x)
862 noce_emit_move_insn (if_info->x, target);
863
864 seq = get_insns ();
865 end_sequence ();
866
867 if (seq_contains_jump (seq))
868 return FALSE;
869
870 emit_insns_before (seq, if_info->jump);
871
872 return TRUE;
873 }
874
875 end_sequence ();
876 }
877
878 return FALSE;
879 }
880
881 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
882
883 static rtx
884 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
885 struct noce_if_info *if_info;
886 rtx x, cmp_a, cmp_b, vfalse, vtrue;
887 enum rtx_code code;
888 {
889 /* If earliest == jump, try to build the cmove insn directly.
890 This is helpful when combine has created some complex condition
891 (like for alpha's cmovlbs) that we can't hope to regenerate
892 through the normal interface. */
893
894 if (if_info->cond_earliest == if_info->jump)
895 {
896 rtx tmp;
897
898 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
899 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
900 tmp = gen_rtx_SET (VOIDmode, x, tmp);
901
902 start_sequence ();
903 tmp = emit_insn (tmp);
904
905 if (recog_memoized (tmp) >= 0)
906 {
907 tmp = get_insns ();
908 end_sequence ();
909 emit_insns (tmp);
910
911 return x;
912 }
913
914 end_sequence ();
915 }
916
917 /* Don't even try if the comparison operands are weird. */
918 if (! general_operand (cmp_a, GET_MODE (cmp_a))
919 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
920 return NULL_RTX;
921
922 #if HAVE_conditional_move
923 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
924 vtrue, vfalse, GET_MODE (x),
925 (code == LTU || code == GEU
926 || code == LEU || code == GTU));
927 #else
928 /* We'll never get here, as noce_process_if_block doesn't call the
929 functions involved. Ifdef code, however, should be discouraged
930 because it leads to typos in the code not selected. However,
931 emit_conditional_move won't exist either. */
932 return NULL_RTX;
933 #endif
934 }
935
936 /* Try only simple constants and registers here. More complex cases
937 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
938 has had a go at it. */
939
940 static int
941 noce_try_cmove (if_info)
942 struct noce_if_info *if_info;
943 {
944 enum rtx_code code;
945 rtx target, seq;
946
947 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
948 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
949 {
950 start_sequence ();
951
952 code = GET_CODE (if_info->cond);
953 target = noce_emit_cmove (if_info, if_info->x, code,
954 XEXP (if_info->cond, 0),
955 XEXP (if_info->cond, 1),
956 if_info->a, if_info->b);
957
958 if (target)
959 {
960 if (target != if_info->x)
961 noce_emit_move_insn (if_info->x, target);
962
963 seq = get_insns ();
964 end_sequence ();
965 emit_insns_before (seq, if_info->jump);
966 return TRUE;
967 }
968 else
969 {
970 end_sequence ();
971 return FALSE;
972 }
973 }
974
975 return FALSE;
976 }
977
978 /* Try more complex cases involving conditional_move. */
979
980 static int
981 noce_try_cmove_arith (if_info)
982 struct noce_if_info *if_info;
983 {
984 rtx a = if_info->a;
985 rtx b = if_info->b;
986 rtx x = if_info->x;
987 rtx insn_a, insn_b;
988 rtx tmp, target;
989 int is_mem = 0;
990 enum rtx_code code;
991
992 /* A conditional move from two memory sources is equivalent to a
993 conditional on their addresses followed by a load. Don't do this
994 early because it'll screw alias analysis. Note that we've
995 already checked for no side effects. */
996 if (! no_new_pseudos && cse_not_expected
997 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
998 && BRANCH_COST >= 5)
999 {
1000 a = XEXP (a, 0);
1001 b = XEXP (b, 0);
1002 x = gen_reg_rtx (Pmode);
1003 is_mem = 1;
1004 }
1005
1006 /* ??? We could handle this if we knew that a load from A or B could
1007 not fault. This is also true if we've already loaded
1008 from the address along the path from ENTRY. */
1009 else if (may_trap_p (a) || may_trap_p (b))
1010 return FALSE;
1011
1012 /* if (test) x = a + b; else x = c - d;
1013 => y = a + b;
1014 x = c - d;
1015 if (test)
1016 x = y;
1017 */
1018
1019 code = GET_CODE (if_info->cond);
1020 insn_a = if_info->insn_a;
1021 insn_b = if_info->insn_b;
1022
1023 /* Possibly rearrange operands to make things come out more natural. */
1024 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1025 {
1026 int reversep = 0;
1027 if (rtx_equal_p (b, x))
1028 reversep = 1;
1029 else if (general_operand (b, GET_MODE (b)))
1030 reversep = 1;
1031
1032 if (reversep)
1033 {
1034 code = reversed_comparison_code (if_info->cond, if_info->jump);
1035 tmp = a, a = b, b = tmp;
1036 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1037 }
1038 }
1039
1040 start_sequence ();
1041
1042 /* If either operand is complex, load it into a register first.
1043 The best way to do this is to copy the original insn. In this
1044 way we preserve any clobbers etc that the insn may have had.
1045 This is of course not possible in the IS_MEM case. */
1046 if (! general_operand (a, GET_MODE (a)))
1047 {
1048 rtx set;
1049
1050 if (no_new_pseudos)
1051 goto end_seq_and_fail;
1052
1053 if (is_mem)
1054 {
1055 tmp = gen_reg_rtx (GET_MODE (a));
1056 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1057 }
1058 else if (! insn_a)
1059 goto end_seq_and_fail;
1060 else
1061 {
1062 a = gen_reg_rtx (GET_MODE (a));
1063 tmp = copy_rtx (insn_a);
1064 set = single_set (tmp);
1065 SET_DEST (set) = a;
1066 tmp = emit_insn (PATTERN (tmp));
1067 }
1068 if (recog_memoized (tmp) < 0)
1069 goto end_seq_and_fail;
1070 }
1071 if (! general_operand (b, GET_MODE (b)))
1072 {
1073 rtx set;
1074
1075 if (no_new_pseudos)
1076 goto end_seq_and_fail;
1077
1078 if (is_mem)
1079 {
1080 tmp = gen_reg_rtx (GET_MODE (b));
1081 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1082 }
1083 else if (! insn_b)
1084 goto end_seq_and_fail;
1085 else
1086 {
1087 b = gen_reg_rtx (GET_MODE (b));
1088 tmp = copy_rtx (insn_b);
1089 set = single_set (tmp);
1090 SET_DEST (set) = b;
1091 tmp = emit_insn (PATTERN (tmp));
1092 }
1093 if (recog_memoized (tmp) < 0)
1094 goto end_seq_and_fail;
1095 }
1096
1097 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1098 XEXP (if_info->cond, 1), a, b);
1099
1100 if (! target)
1101 goto end_seq_and_fail;
1102
1103 /* If we're handling a memory for above, emit the load now. */
1104 if (is_mem)
1105 {
1106 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1107
1108 /* Copy over flags as appropriate. */
1109 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1110 MEM_VOLATILE_P (tmp) = 1;
1111 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1112 MEM_IN_STRUCT_P (tmp) = 1;
1113 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1114 MEM_SCALAR_P (tmp) = 1;
1115 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1116 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1117 set_mem_align (tmp,
1118 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1119
1120 noce_emit_move_insn (if_info->x, tmp);
1121 }
1122 else if (target != x)
1123 noce_emit_move_insn (x, target);
1124
1125 tmp = get_insns ();
1126 end_sequence ();
1127 emit_insns_before (tmp, if_info->jump);
1128 return TRUE;
1129
1130 end_seq_and_fail:
1131 end_sequence ();
1132 return FALSE;
1133 }
1134
1135 /* For most cases, the simplified condition we found is the best
1136 choice, but this is not the case for the min/max/abs transforms.
1137 For these we wish to know that it is A or B in the condition. */
1138
1139 static rtx
1140 noce_get_alt_condition (if_info, target, earliest)
1141 struct noce_if_info *if_info;
1142 rtx target;
1143 rtx *earliest;
1144 {
1145 rtx cond, set, insn;
1146 int reverse;
1147
1148 /* If target is already mentioned in the known condition, return it. */
1149 if (reg_mentioned_p (target, if_info->cond))
1150 {
1151 *earliest = if_info->cond_earliest;
1152 return if_info->cond;
1153 }
1154
1155 set = pc_set (if_info->jump);
1156 cond = XEXP (SET_SRC (set), 0);
1157 reverse
1158 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1159 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1160
1161 /* If we're looking for a constant, try to make the conditional
1162 have that constant in it. There are two reasons why it may
1163 not have the constant we want:
1164
1165 1. GCC may have needed to put the constant in a register, because
1166 the target can't compare directly against that constant. For
1167 this case, we look for a SET immediately before the comparison
1168 that puts a constant in that register.
1169
1170 2. GCC may have canonicalized the conditional, for example
1171 replacing "if x < 4" with "if x <= 3". We can undo that (or
1172 make equivalent types of changes) to get the constants we need
1173 if they're off by one in the right direction. */
1174
1175 if (GET_CODE (target) == CONST_INT)
1176 {
1177 enum rtx_code code = GET_CODE (if_info->cond);
1178 rtx op_a = XEXP (if_info->cond, 0);
1179 rtx op_b = XEXP (if_info->cond, 1);
1180 rtx prev_insn;
1181
1182 /* First, look to see if we put a constant in a register. */
1183 prev_insn = PREV_INSN (if_info->cond_earliest);
1184 if (prev_insn
1185 && INSN_P (prev_insn)
1186 && GET_CODE (PATTERN (prev_insn)) == SET)
1187 {
1188 rtx src = find_reg_equal_equiv_note (prev_insn);
1189 if (!src)
1190 src = SET_SRC (PATTERN (prev_insn));
1191 if (GET_CODE (src) == CONST_INT)
1192 {
1193 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1194 op_a = src;
1195 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1196 op_b = src;
1197
1198 if (GET_CODE (op_a) == CONST_INT)
1199 {
1200 rtx tmp = op_a;
1201 op_a = op_b;
1202 op_b = tmp;
1203 code = swap_condition (code);
1204 }
1205 }
1206 }
1207
1208 /* Now, look to see if we can get the right constant by
1209 adjusting the conditional. */
1210 if (GET_CODE (op_b) == CONST_INT)
1211 {
1212 HOST_WIDE_INT desired_val = INTVAL (target);
1213 HOST_WIDE_INT actual_val = INTVAL (op_b);
1214
1215 switch (code)
1216 {
1217 case LT:
1218 if (actual_val == desired_val + 1)
1219 {
1220 code = LE;
1221 op_b = GEN_INT (desired_val);
1222 }
1223 break;
1224 case LE:
1225 if (actual_val == desired_val - 1)
1226 {
1227 code = LT;
1228 op_b = GEN_INT (desired_val);
1229 }
1230 break;
1231 case GT:
1232 if (actual_val == desired_val - 1)
1233 {
1234 code = GE;
1235 op_b = GEN_INT (desired_val);
1236 }
1237 break;
1238 case GE:
1239 if (actual_val == desired_val + 1)
1240 {
1241 code = GT;
1242 op_b = GEN_INT (desired_val);
1243 }
1244 break;
1245 default:
1246 break;
1247 }
1248 }
1249
1250 /* If we made any changes, generate a new conditional that is
1251 equivalent to what we started with, but has the right
1252 constants in it. */
1253 if (code != GET_CODE (if_info->cond)
1254 || op_a != XEXP (if_info->cond, 0)
1255 || op_b != XEXP (if_info->cond, 1))
1256 {
1257 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1258 *earliest = if_info->cond_earliest;
1259 return cond;
1260 }
1261 }
1262
1263 cond = canonicalize_condition (if_info->jump, cond, reverse,
1264 earliest, target);
1265 if (! cond || ! reg_mentioned_p (target, cond))
1266 return NULL;
1267
1268 /* We almost certainly searched back to a different place.
1269 Need to re-verify correct lifetimes. */
1270
1271 /* X may not be mentioned in the range (cond_earliest, jump]. */
1272 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1273 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1274 return NULL;
1275
1276 /* A and B may not be modified in the range [cond_earliest, jump). */
1277 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1278 if (INSN_P (insn)
1279 && (modified_in_p (if_info->a, insn)
1280 || modified_in_p (if_info->b, insn)))
1281 return NULL;
1282
1283 return cond;
1284 }
1285
1286 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1287
1288 static int
1289 noce_try_minmax (if_info)
1290 struct noce_if_info *if_info;
1291 {
1292 rtx cond, earliest, target, seq;
1293 enum rtx_code code, op;
1294 int unsignedp;
1295
1296 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1297 if (no_new_pseudos)
1298 return FALSE;
1299
1300 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1301 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1302 to get the target to tell us... */
1303 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1304 || HONOR_NANS (GET_MODE (if_info->x)))
1305 return FALSE;
1306
1307 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1308 if (!cond)
1309 return FALSE;
1310
1311 /* Verify the condition is of the form we expect, and canonicalize
1312 the comparison code. */
1313 code = GET_CODE (cond);
1314 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1315 {
1316 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1317 return FALSE;
1318 }
1319 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1320 {
1321 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1322 return FALSE;
1323 code = swap_condition (code);
1324 }
1325 else
1326 return FALSE;
1327
1328 /* Determine what sort of operation this is. Note that the code is for
1329 a taken branch, so the code->operation mapping appears backwards. */
1330 switch (code)
1331 {
1332 case LT:
1333 case LE:
1334 case UNLT:
1335 case UNLE:
1336 op = SMAX;
1337 unsignedp = 0;
1338 break;
1339 case GT:
1340 case GE:
1341 case UNGT:
1342 case UNGE:
1343 op = SMIN;
1344 unsignedp = 0;
1345 break;
1346 case LTU:
1347 case LEU:
1348 op = UMAX;
1349 unsignedp = 1;
1350 break;
1351 case GTU:
1352 case GEU:
1353 op = UMIN;
1354 unsignedp = 1;
1355 break;
1356 default:
1357 return FALSE;
1358 }
1359
1360 start_sequence ();
1361
1362 target = expand_simple_binop (GET_MODE (if_info->x), op,
1363 if_info->a, if_info->b,
1364 if_info->x, unsignedp, OPTAB_WIDEN);
1365 if (! target)
1366 {
1367 end_sequence ();
1368 return FALSE;
1369 }
1370 if (target != if_info->x)
1371 noce_emit_move_insn (if_info->x, target);
1372
1373 seq = get_insns ();
1374 end_sequence ();
1375
1376 if (seq_contains_jump (seq))
1377 return FALSE;
1378
1379 emit_insns_before (seq, if_info->jump);
1380 if_info->cond = cond;
1381 if_info->cond_earliest = earliest;
1382
1383 return TRUE;
1384 }
1385
1386 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1387
1388 static int
1389 noce_try_abs (if_info)
1390 struct noce_if_info *if_info;
1391 {
1392 rtx cond, earliest, target, seq, a, b, c;
1393 int negate;
1394
1395 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1396 if (no_new_pseudos)
1397 return FALSE;
1398
1399 /* Recognize A and B as constituting an ABS or NABS. */
1400 a = if_info->a;
1401 b = if_info->b;
1402 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1403 negate = 0;
1404 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1405 {
1406 c = a; a = b; b = c;
1407 negate = 1;
1408 }
1409 else
1410 return FALSE;
1411
1412 cond = noce_get_alt_condition (if_info, b, &earliest);
1413 if (!cond)
1414 return FALSE;
1415
1416 /* Verify the condition is of the form we expect. */
1417 if (rtx_equal_p (XEXP (cond, 0), b))
1418 c = XEXP (cond, 1);
1419 else if (rtx_equal_p (XEXP (cond, 1), b))
1420 c = XEXP (cond, 0);
1421 else
1422 return FALSE;
1423
1424 /* Verify that C is zero. Search backward through the block for
1425 a REG_EQUAL note if necessary. */
1426 if (REG_P (c))
1427 {
1428 rtx insn, note = NULL;
1429 for (insn = earliest;
1430 insn != if_info->test_bb->head;
1431 insn = PREV_INSN (insn))
1432 if (INSN_P (insn)
1433 && ((note = find_reg_note (insn, REG_EQUAL, c))
1434 || (note = find_reg_note (insn, REG_EQUIV, c))))
1435 break;
1436 if (! note)
1437 return FALSE;
1438 c = XEXP (note, 0);
1439 }
1440 if (GET_CODE (c) == MEM
1441 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1442 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1443 c = get_pool_constant (XEXP (c, 0));
1444
1445 /* Work around funny ideas get_condition has wrt canonicalization.
1446 Note that these rtx constants are known to be CONST_INT, and
1447 therefore imply integer comparisons. */
1448 if (c == constm1_rtx && GET_CODE (cond) == GT)
1449 ;
1450 else if (c == const1_rtx && GET_CODE (cond) == LT)
1451 ;
1452 else if (c != CONST0_RTX (GET_MODE (b)))
1453 return FALSE;
1454
1455 /* Determine what sort of operation this is. */
1456 switch (GET_CODE (cond))
1457 {
1458 case LT:
1459 case LE:
1460 case UNLT:
1461 case UNLE:
1462 negate = !negate;
1463 break;
1464 case GT:
1465 case GE:
1466 case UNGT:
1467 case UNGE:
1468 break;
1469 default:
1470 return FALSE;
1471 }
1472
1473 start_sequence ();
1474
1475 target = expand_simple_unop (GET_MODE (if_info->x), ABS, b, if_info->x, 0);
1476
1477 /* ??? It's a quandry whether cmove would be better here, especially
1478 for integers. Perhaps combine will clean things up. */
1479 if (target && negate)
1480 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1481
1482 if (! target)
1483 {
1484 end_sequence ();
1485 return FALSE;
1486 }
1487
1488 if (target != if_info->x)
1489 noce_emit_move_insn (if_info->x, target);
1490
1491 seq = get_insns ();
1492 end_sequence ();
1493
1494 if (seq_contains_jump (seq))
1495 return FALSE;
1496
1497 emit_insns_before (seq, if_info->jump);
1498 if_info->cond = cond;
1499 if_info->cond_earliest = earliest;
1500
1501 return TRUE;
1502 }
1503
1504 /* Look for the condition for the jump first. We'd prefer to avoid
1505 get_condition if we can -- it tries to look back for the contents
1506 of an original compare. On targets that use normal integers for
1507 comparisons, e.g. alpha, this is wasteful. */
1508
1509 static rtx
1510 noce_get_condition (jump, earliest)
1511 rtx jump;
1512 rtx *earliest;
1513 {
1514 rtx cond;
1515 rtx set;
1516
1517 /* If the condition variable is a register and is MODE_INT, accept it.
1518 Otherwise, fall back on get_condition. */
1519
1520 if (! any_condjump_p (jump))
1521 return NULL_RTX;
1522
1523 set = pc_set (jump);
1524
1525 cond = XEXP (SET_SRC (set), 0);
1526 if (GET_CODE (XEXP (cond, 0)) == REG
1527 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1528 {
1529 *earliest = jump;
1530
1531 /* If this branches to JUMP_LABEL when the condition is false,
1532 reverse the condition. */
1533 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1534 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1535 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1536 GET_MODE (cond), XEXP (cond, 0),
1537 XEXP (cond, 1));
1538 }
1539 else
1540 cond = get_condition (jump, earliest);
1541
1542 return cond;
1543 }
1544
1545 /* Return true if OP is ok for if-then-else processing. */
1546
1547 static int
1548 noce_operand_ok (op)
1549 rtx op;
1550 {
1551 /* We special-case memories, so handle any of them with
1552 no address side effects. */
1553 if (GET_CODE (op) == MEM)
1554 return ! side_effects_p (XEXP (op, 0));
1555
1556 if (side_effects_p (op))
1557 return FALSE;
1558
1559 return ! may_trap_p (op);
1560 }
1561
1562 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1563 without using conditional execution. Return TRUE if we were
1564 successful at converting the the block. */
1565
1566 static int
1567 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1568 basic_block test_bb; /* Basic block test is in */
1569 basic_block then_bb; /* Basic block for THEN block */
1570 basic_block else_bb; /* Basic block for ELSE block */
1571 basic_block join_bb; /* Basic block the join label is in */
1572 {
1573 /* We're looking for patterns of the form
1574
1575 (1) if (...) x = a; else x = b;
1576 (2) x = b; if (...) x = a;
1577 (3) if (...) x = a; // as if with an initial x = x.
1578
1579 The later patterns require jumps to be more expensive.
1580
1581 ??? For future expansion, look for multiple X in such patterns. */
1582
1583 struct noce_if_info if_info;
1584 rtx insn_a, insn_b;
1585 rtx set_a, set_b;
1586 rtx orig_x, x, a, b;
1587 rtx jump, cond, insn;
1588
1589 /* If this is not a standard conditional jump, we can't parse it. */
1590 jump = test_bb->end;
1591 cond = noce_get_condition (jump, &if_info.cond_earliest);
1592 if (! cond)
1593 return FALSE;
1594
1595 /* If the conditional jump is more than just a conditional jump,
1596 then we can not do if-conversion on this block. */
1597 if (! onlyjump_p (jump))
1598 return FALSE;
1599
1600 /* We must be comparing objects whose modes imply the size. */
1601 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1602 return FALSE;
1603
1604 /* Look for one of the potential sets. */
1605 insn_a = first_active_insn (then_bb);
1606 if (! insn_a
1607 || ! last_active_insn_p (then_bb, insn_a)
1608 || (set_a = single_set (insn_a)) == NULL_RTX)
1609 return FALSE;
1610
1611 x = SET_DEST (set_a);
1612 a = SET_SRC (set_a);
1613
1614 /* Look for the other potential set. Make sure we've got equivalent
1615 destinations. */
1616 /* ??? This is overconservative. Storing to two different mems is
1617 as easy as conditionally computing the address. Storing to a
1618 single mem merely requires a scratch memory to use as one of the
1619 destination addresses; often the memory immediately below the
1620 stack pointer is available for this. */
1621 set_b = NULL_RTX;
1622 if (else_bb)
1623 {
1624 insn_b = first_active_insn (else_bb);
1625 if (! insn_b
1626 || ! last_active_insn_p (else_bb, insn_b)
1627 || (set_b = single_set (insn_b)) == NULL_RTX
1628 || ! rtx_equal_p (x, SET_DEST (set_b)))
1629 return FALSE;
1630 }
1631 else
1632 {
1633 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1634 if (! insn_b
1635 || GET_CODE (insn_b) != INSN
1636 || (set_b = single_set (insn_b)) == NULL_RTX
1637 || ! rtx_equal_p (x, SET_DEST (set_b))
1638 || reg_mentioned_p (x, cond)
1639 || reg_mentioned_p (x, a)
1640 || reg_mentioned_p (x, SET_SRC (set_b)))
1641 insn_b = set_b = NULL_RTX;
1642 }
1643 b = (set_b ? SET_SRC (set_b) : x);
1644
1645 /* X may not be mentioned in the range (cond_earliest, jump]. */
1646 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1647 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1648 return FALSE;
1649
1650 /* A and B may not be modified in the range [cond_earliest, jump). */
1651 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1652 if (INSN_P (insn)
1653 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1654 return FALSE;
1655
1656 /* Only operate on register destinations, and even then avoid extending
1657 the lifetime of hard registers on small register class machines. */
1658 orig_x = x;
1659 if (GET_CODE (x) != REG
1660 || (SMALL_REGISTER_CLASSES
1661 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1662 {
1663 if (no_new_pseudos)
1664 return FALSE;
1665 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1666 ? XEXP (x, 0) : x));
1667 }
1668
1669 /* Don't operate on sources that may trap or are volatile. */
1670 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1671 return FALSE;
1672
1673 /* Set up the info block for our subroutines. */
1674 if_info.test_bb = test_bb;
1675 if_info.cond = cond;
1676 if_info.jump = jump;
1677 if_info.insn_a = insn_a;
1678 if_info.insn_b = insn_b;
1679 if_info.x = x;
1680 if_info.a = a;
1681 if_info.b = b;
1682
1683 /* Try optimizations in some approximation of a useful order. */
1684 /* ??? Should first look to see if X is live incoming at all. If it
1685 isn't, we don't need anything but an unconditional set. */
1686
1687 /* Look and see if A and B are really the same. Avoid creating silly
1688 cmove constructs that no one will fix up later. */
1689 if (rtx_equal_p (a, b))
1690 {
1691 /* If we have an INSN_B, we don't have to create any new rtl. Just
1692 move the instruction that we already have. If we don't have an
1693 INSN_B, that means that A == X, and we've got a noop move. In
1694 that case don't do anything and let the code below delete INSN_A. */
1695 if (insn_b && else_bb)
1696 {
1697 rtx note;
1698
1699 if (else_bb && insn_b == else_bb->end)
1700 else_bb->end = PREV_INSN (insn_b);
1701 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1702
1703 /* If there was a REG_EQUAL note, delete it since it may have been
1704 true due to this insn being after a jump. */
1705 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1706 remove_note (insn_b, note);
1707
1708 insn_b = NULL_RTX;
1709 }
1710 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1711 x must be executed twice. */
1712 else if (insn_b && side_effects_p (orig_x))
1713 return FALSE;
1714
1715 x = orig_x;
1716 goto success;
1717 }
1718
1719 if (noce_try_store_flag (&if_info))
1720 goto success;
1721 if (noce_try_minmax (&if_info))
1722 goto success;
1723 if (noce_try_abs (&if_info))
1724 goto success;
1725 if (HAVE_conditional_move
1726 && noce_try_cmove (&if_info))
1727 goto success;
1728 if (! HAVE_conditional_execution)
1729 {
1730 if (noce_try_store_flag_constants (&if_info))
1731 goto success;
1732 if (noce_try_store_flag_inc (&if_info))
1733 goto success;
1734 if (noce_try_store_flag_mask (&if_info))
1735 goto success;
1736 if (HAVE_conditional_move
1737 && noce_try_cmove_arith (&if_info))
1738 goto success;
1739 }
1740
1741 return FALSE;
1742
1743 success:
1744 /* The original sets may now be killed. */
1745 delete_insn (insn_a);
1746
1747 /* Several special cases here: First, we may have reused insn_b above,
1748 in which case insn_b is now NULL. Second, we want to delete insn_b
1749 if it came from the ELSE block, because follows the now correct
1750 write that appears in the TEST block. However, if we got insn_b from
1751 the TEST block, it may in fact be loading data needed for the comparison.
1752 We'll let life_analysis remove the insn if it's really dead. */
1753 if (insn_b && else_bb)
1754 delete_insn (insn_b);
1755
1756 /* The new insns will have been inserted just before the jump. We should
1757 be able to remove the jump with impunity, but the condition itself may
1758 have been modified by gcse to be shared across basic blocks. */
1759 delete_insn (jump);
1760
1761 /* If we used a temporary, fix it up now. */
1762 if (orig_x != x)
1763 {
1764 start_sequence ();
1765 noce_emit_move_insn (copy_rtx (orig_x), x);
1766 insn_b = gen_sequence ();
1767 end_sequence ();
1768
1769 emit_insn_after (insn_b, test_bb->end);
1770 }
1771
1772 /* Merge the blocks! */
1773 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1774
1775 return TRUE;
1776 }
1777 \f
1778 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1779 straight line code. Return true if successful. */
1780
1781 static int
1782 process_if_block (test_bb, then_bb, else_bb, join_bb)
1783 basic_block test_bb; /* Basic block test is in */
1784 basic_block then_bb; /* Basic block for THEN block */
1785 basic_block else_bb; /* Basic block for ELSE block */
1786 basic_block join_bb; /* Basic block the join label is in */
1787 {
1788 if (! reload_completed
1789 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1790 return TRUE;
1791
1792 if (HAVE_conditional_execution
1793 && reload_completed
1794 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1795 return TRUE;
1796
1797 return FALSE;
1798 }
1799
1800 /* Merge the blocks and mark for local life update. */
1801
1802 static void
1803 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1804 basic_block test_bb; /* Basic block test is in */
1805 basic_block then_bb; /* Basic block for THEN block */
1806 basic_block else_bb; /* Basic block for ELSE block */
1807 basic_block join_bb; /* Basic block the join label is in */
1808 {
1809 basic_block combo_bb;
1810
1811 /* All block merging is done into the lower block numbers. */
1812
1813 combo_bb = test_bb;
1814
1815 /* First merge TEST block into THEN block. This is a no-brainer since
1816 the THEN block did not have a code label to begin with. */
1817 if (then_bb)
1818 {
1819 if (combo_bb->global_live_at_end)
1820 COPY_REG_SET (combo_bb->global_live_at_end,
1821 then_bb->global_live_at_end);
1822 merge_blocks_nomove (combo_bb, then_bb);
1823 num_removed_blocks++;
1824 }
1825
1826 /* The ELSE block, if it existed, had a label. That label count
1827 will almost always be zero, but odd things can happen when labels
1828 get their addresses taken. */
1829 if (else_bb)
1830 {
1831 merge_blocks_nomove (combo_bb, else_bb);
1832 num_removed_blocks++;
1833 }
1834
1835 /* If there was no join block reported, that means it was not adjacent
1836 to the others, and so we cannot merge them. */
1837
1838 if (! join_bb)
1839 {
1840 rtx last = combo_bb->end;
1841
1842 /* The outgoing edge for the current COMBO block should already
1843 be correct. Verify this. */
1844 if (combo_bb->succ == NULL_EDGE)
1845 {
1846 if (find_reg_note (last, REG_NORETURN, NULL))
1847 ;
1848 else if (GET_CODE (last) == INSN
1849 && GET_CODE (PATTERN (last)) == TRAP_IF
1850 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
1851 ;
1852 else
1853 abort ();
1854 }
1855
1856 /* There should still be something at the end of the THEN or ELSE
1857 blocks taking us to our final destination. */
1858 else if (GET_CODE (last) == JUMP_INSN)
1859 ;
1860 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
1861 && GET_CODE (last) == CALL_INSN
1862 && SIBLING_CALL_P (last))
1863 ;
1864 else if ((combo_bb->succ->flags & EDGE_EH)
1865 && can_throw_internal (last))
1866 ;
1867 else
1868 abort ();
1869 }
1870
1871 /* The JOIN block may have had quite a number of other predecessors too.
1872 Since we've already merged the TEST, THEN and ELSE blocks, we should
1873 have only one remaining edge from our if-then-else diamond. If there
1874 is more than one remaining edge, it must come from elsewhere. There
1875 may be zero incoming edges if the THEN block didn't actually join
1876 back up (as with a call to abort). */
1877 else if ((join_bb->pred == NULL
1878 || join_bb->pred->pred_next == NULL)
1879 && join_bb != EXIT_BLOCK_PTR)
1880 {
1881 /* We can merge the JOIN. */
1882 if (combo_bb->global_live_at_end)
1883 COPY_REG_SET (combo_bb->global_live_at_end,
1884 join_bb->global_live_at_end);
1885 merge_blocks_nomove (combo_bb, join_bb);
1886 num_removed_blocks++;
1887 }
1888 else
1889 {
1890 /* We cannot merge the JOIN. */
1891
1892 /* The outgoing edge for the current COMBO block should already
1893 be correct. Verify this. */
1894 if (combo_bb->succ->succ_next != NULL_EDGE
1895 || combo_bb->succ->dest != join_bb)
1896 abort ();
1897
1898 /* Remove the jump and cruft from the end of the COMBO block. */
1899 if (join_bb != EXIT_BLOCK_PTR)
1900 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1901 }
1902
1903 num_updated_if_blocks++;
1904 }
1905 \f
1906 /* Find a block ending in a simple IF condition. Return TRUE if
1907 we were able to transform it in some way. */
1908
1909 static int
1910 find_if_header (test_bb)
1911 basic_block test_bb;
1912 {
1913 edge then_edge;
1914 edge else_edge;
1915
1916 /* The kind of block we're looking for has exactly two successors. */
1917 if ((then_edge = test_bb->succ) == NULL_EDGE
1918 || (else_edge = then_edge->succ_next) == NULL_EDGE
1919 || else_edge->succ_next != NULL_EDGE)
1920 return FALSE;
1921
1922 /* Neither edge should be abnormal. */
1923 if ((then_edge->flags & EDGE_COMPLEX)
1924 || (else_edge->flags & EDGE_COMPLEX))
1925 return FALSE;
1926
1927 /* The THEN edge is canonically the one that falls through. */
1928 if (then_edge->flags & EDGE_FALLTHRU)
1929 ;
1930 else if (else_edge->flags & EDGE_FALLTHRU)
1931 {
1932 edge e = else_edge;
1933 else_edge = then_edge;
1934 then_edge = e;
1935 }
1936 else
1937 /* Otherwise this must be a multiway branch of some sort. */
1938 return FALSE;
1939
1940 if (find_if_block (test_bb, then_edge, else_edge))
1941 goto success;
1942 if (HAVE_trap && HAVE_conditional_trap
1943 && find_cond_trap (test_bb, then_edge, else_edge))
1944 goto success;
1945 if (post_dominators
1946 && (! HAVE_conditional_execution || reload_completed))
1947 {
1948 if (find_if_case_1 (test_bb, then_edge, else_edge))
1949 goto success;
1950 if (find_if_case_2 (test_bb, then_edge, else_edge))
1951 goto success;
1952 }
1953
1954 return FALSE;
1955
1956 success:
1957 if (rtl_dump_file)
1958 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1959 return TRUE;
1960 }
1961
1962 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1963 block. If so, we'll try to convert the insns to not require the branch.
1964 Return TRUE if we were successful at converting the the block. */
1965
1966 static int
1967 find_if_block (test_bb, then_edge, else_edge)
1968 basic_block test_bb;
1969 edge then_edge, else_edge;
1970 {
1971 basic_block then_bb = then_edge->dest;
1972 basic_block else_bb = else_edge->dest;
1973 basic_block join_bb = NULL_BLOCK;
1974 edge then_succ = then_bb->succ;
1975 edge else_succ = else_bb->succ;
1976 basic_block next;
1977
1978 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1979 if (then_bb->pred->pred_next != NULL_EDGE)
1980 return FALSE;
1981
1982 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1983 if (then_succ != NULL_EDGE
1984 && (then_succ->succ_next != NULL_EDGE
1985 || (then_succ->flags & EDGE_COMPLEX)))
1986 return FALSE;
1987
1988 /* If the THEN block has no successors, conditional execution can still
1989 make a conditional call. Don't do this unless the ELSE block has
1990 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1991 Check for the last insn of the THEN block being an indirect jump, which
1992 is listed as not having any successors, but confuses the rest of the CE
1993 code processing. XXX we should fix this in the future. */
1994 if (then_succ == NULL)
1995 {
1996 if (else_bb->pred->pred_next == NULL_EDGE)
1997 {
1998 rtx last_insn = then_bb->end;
1999
2000 while (last_insn
2001 && GET_CODE (last_insn) == NOTE
2002 && last_insn != then_bb->head)
2003 last_insn = PREV_INSN (last_insn);
2004
2005 if (last_insn
2006 && GET_CODE (last_insn) == JUMP_INSN
2007 && ! simplejump_p (last_insn))
2008 return FALSE;
2009
2010 join_bb = else_bb;
2011 else_bb = NULL_BLOCK;
2012 }
2013 else
2014 return FALSE;
2015 }
2016
2017 /* If the THEN block's successor is the other edge out of the TEST block,
2018 then we have an IF-THEN combo without an ELSE. */
2019 else if (then_succ->dest == else_bb)
2020 {
2021 join_bb = else_bb;
2022 else_bb = NULL_BLOCK;
2023 }
2024
2025 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2026 has exactly one predecessor and one successor, and the outgoing edge
2027 is not complex, then we have an IF-THEN-ELSE combo. */
2028 else if (else_succ != NULL_EDGE
2029 && then_succ->dest == else_succ->dest
2030 && else_bb->pred->pred_next == NULL_EDGE
2031 && else_succ->succ_next == NULL_EDGE
2032 && ! (else_succ->flags & EDGE_COMPLEX))
2033 join_bb = else_succ->dest;
2034
2035 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2036 else
2037 return FALSE;
2038
2039 num_possible_if_blocks++;
2040
2041 if (rtl_dump_file)
2042 {
2043 if (else_bb)
2044 fprintf (rtl_dump_file,
2045 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
2046 test_bb->index, then_bb->index, else_bb->index,
2047 join_bb->index);
2048 else
2049 fprintf (rtl_dump_file,
2050 "\nIF-THEN block found, start %d, then %d, join %d\n",
2051 test_bb->index, then_bb->index, join_bb->index);
2052 }
2053
2054 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
2055 get the first condition for free, since we've already asserted that
2056 there's a fallthru edge from IF to THEN. */
2057 /* ??? As an enhancement, move the ELSE block. Have to deal with
2058 BLOCK notes, if by no other means than aborting the merge if they
2059 exist. Sticky enough I don't want to think about it now. */
2060 next = then_bb;
2061 if (else_bb && (next = next->next_bb) != else_bb)
2062 return FALSE;
2063 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2064 {
2065 if (else_bb)
2066 join_bb = NULL;
2067 else
2068 return FALSE;
2069 }
2070
2071 /* Do the real work. */
2072 return process_if_block (test_bb, then_bb, else_bb, join_bb);
2073 }
2074
2075 /* Convert a branch over a trap, or a branch to a trap,
2076 into a conditional trap. */
2077
2078 static int
2079 find_cond_trap (test_bb, then_edge, else_edge)
2080 basic_block test_bb;
2081 edge then_edge, else_edge;
2082 {
2083 basic_block then_bb, else_bb, trap_bb, other_bb;
2084 rtx trap, jump, cond, cond_earliest, seq;
2085 enum rtx_code code;
2086
2087 then_bb = then_edge->dest;
2088 else_bb = else_edge->dest;
2089
2090 /* Locate the block with the trap instruction. */
2091 /* ??? While we look for no successors, we really ought to allow
2092 EH successors. Need to fix merge_if_block for that to work. */
2093 if ((trap = block_has_only_trap (then_bb)) != NULL)
2094 trap_bb = then_bb, other_bb = else_bb;
2095 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2096 trap_bb = else_bb, other_bb = then_bb;
2097 else
2098 return FALSE;
2099
2100 if (rtl_dump_file)
2101 {
2102 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2103 test_bb->index, trap_bb->index);
2104 }
2105
2106 /* If this is not a standard conditional jump, we can't parse it. */
2107 jump = test_bb->end;
2108 cond = noce_get_condition (jump, &cond_earliest);
2109 if (! cond)
2110 return FALSE;
2111
2112 /* If the conditional jump is more than just a conditional jump,
2113 then we can not do if-conversion on this block. */
2114 if (! onlyjump_p (jump))
2115 return FALSE;
2116
2117 /* We must be comparing objects whose modes imply the size. */
2118 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2119 return FALSE;
2120
2121 /* Reverse the comparison code, if necessary. */
2122 code = GET_CODE (cond);
2123 if (then_bb == trap_bb)
2124 {
2125 code = reversed_comparison_code (cond, jump);
2126 if (code == UNKNOWN)
2127 return FALSE;
2128 }
2129
2130 /* Attempt to generate the conditional trap. */
2131 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2132 TRAP_CODE (PATTERN (trap)));
2133 if (seq == NULL)
2134 return FALSE;
2135
2136 /* Emit the new insns before cond_earliest. */
2137 emit_insn_before (seq, cond_earliest);
2138
2139 /* Delete the trap block if possible. */
2140 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2141 if (trap_bb->pred == NULL)
2142 {
2143 flow_delete_block (trap_bb);
2144 num_removed_blocks++;
2145 }
2146
2147 /* If the non-trap block and the test are now adjacent, merge them.
2148 Otherwise we must insert a direct branch. */
2149 if (test_bb->next_bb == other_bb)
2150 {
2151 delete_insn (jump);
2152 merge_if_block (test_bb, NULL, NULL, other_bb);
2153 }
2154 else
2155 {
2156 rtx lab, newjump;
2157
2158 lab = JUMP_LABEL (jump);
2159 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2160 LABEL_NUSES (lab) += 1;
2161 JUMP_LABEL (newjump) = lab;
2162 emit_barrier_after (newjump);
2163
2164 delete_insn (jump);
2165 }
2166
2167 return TRUE;
2168 }
2169
2170 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2171 return it. */
2172
2173 static rtx
2174 block_has_only_trap (bb)
2175 basic_block bb;
2176 {
2177 rtx trap;
2178
2179 /* We're not the exit block. */
2180 if (bb == EXIT_BLOCK_PTR)
2181 return NULL_RTX;
2182
2183 /* The block must have no successors. */
2184 if (bb->succ)
2185 return NULL_RTX;
2186
2187 /* The only instruction in the THEN block must be the trap. */
2188 trap = first_active_insn (bb);
2189 if (! (trap == bb->end
2190 && GET_CODE (PATTERN (trap)) == TRAP_IF
2191 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2192 return NULL_RTX;
2193
2194 return trap;
2195 }
2196
2197 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2198 transformable, but not necessarily the other. There need be no
2199 JOIN block.
2200
2201 Return TRUE if we were successful at converting the the block.
2202
2203 Cases we'd like to look at:
2204
2205 (1)
2206 if (test) goto over; // x not live
2207 x = a;
2208 goto label;
2209 over:
2210
2211 becomes
2212
2213 x = a;
2214 if (! test) goto label;
2215
2216 (2)
2217 if (test) goto E; // x not live
2218 x = big();
2219 goto L;
2220 E:
2221 x = b;
2222 goto M;
2223
2224 becomes
2225
2226 x = b;
2227 if (test) goto M;
2228 x = big();
2229 goto L;
2230
2231 (3) // This one's really only interesting for targets that can do
2232 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2233 // it results in multiple branches on a cache line, which often
2234 // does not sit well with predictors.
2235
2236 if (test1) goto E; // predicted not taken
2237 x = a;
2238 if (test2) goto F;
2239 ...
2240 E:
2241 x = b;
2242 J:
2243
2244 becomes
2245
2246 x = a;
2247 if (test1) goto E;
2248 if (test2) goto F;
2249
2250 Notes:
2251
2252 (A) Don't do (2) if the branch is predicted against the block we're
2253 eliminating. Do it anyway if we can eliminate a branch; this requires
2254 that the sole successor of the eliminated block postdominate the other
2255 side of the if.
2256
2257 (B) With CE, on (3) we can steal from both sides of the if, creating
2258
2259 if (test1) x = a;
2260 if (!test1) x = b;
2261 if (test1) goto J;
2262 if (test2) goto F;
2263 ...
2264 J:
2265
2266 Again, this is most useful if J postdominates.
2267
2268 (C) CE substitutes for helpful life information.
2269
2270 (D) These heuristics need a lot of work. */
2271
2272 /* Tests for case 1 above. */
2273
2274 static int
2275 find_if_case_1 (test_bb, then_edge, else_edge)
2276 basic_block test_bb;
2277 edge then_edge, else_edge;
2278 {
2279 basic_block then_bb = then_edge->dest;
2280 basic_block else_bb = else_edge->dest, new_bb;
2281 edge then_succ = then_bb->succ;
2282
2283 /* THEN has one successor. */
2284 if (!then_succ || then_succ->succ_next != NULL)
2285 return FALSE;
2286
2287 /* THEN does not fall through, but is not strange either. */
2288 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2289 return FALSE;
2290
2291 /* THEN has one predecessor. */
2292 if (then_bb->pred->pred_next != NULL)
2293 return FALSE;
2294
2295 /* THEN must do something. */
2296 if (forwarder_block_p (then_bb))
2297 return FALSE;
2298
2299 num_possible_if_blocks++;
2300 if (rtl_dump_file)
2301 fprintf (rtl_dump_file,
2302 "\nIF-CASE-1 found, start %d, then %d\n",
2303 test_bb->index, then_bb->index);
2304
2305 /* THEN is small. */
2306 if (count_bb_insns (then_bb) > BRANCH_COST)
2307 return FALSE;
2308
2309 /* Registers set are dead, or are predicable. */
2310 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2311 then_bb->succ->dest, 1))
2312 return FALSE;
2313
2314 /* Conversion went ok, including moving the insns and fixing up the
2315 jump. Adjust the CFG to match. */
2316
2317 bitmap_operation (test_bb->global_live_at_end,
2318 else_bb->global_live_at_start,
2319 then_bb->global_live_at_end, BITMAP_IOR);
2320
2321 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2322 /* Make rest of code believe that the newly created block is the THEN_BB
2323 block we are going to remove. */
2324 if (new_bb)
2325 new_bb->aux = then_bb->aux;
2326 flow_delete_block (then_bb);
2327 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2328 later. */
2329
2330 num_removed_blocks++;
2331 num_updated_if_blocks++;
2332
2333 return TRUE;
2334 }
2335
2336 /* Test for case 2 above. */
2337
2338 static int
2339 find_if_case_2 (test_bb, then_edge, else_edge)
2340 basic_block test_bb;
2341 edge then_edge, else_edge;
2342 {
2343 basic_block then_bb = then_edge->dest;
2344 basic_block else_bb = else_edge->dest;
2345 edge else_succ = else_bb->succ;
2346 rtx note;
2347
2348 /* ELSE has one successor. */
2349 if (!else_succ || else_succ->succ_next != NULL)
2350 return FALSE;
2351
2352 /* ELSE outgoing edge is not complex. */
2353 if (else_succ->flags & EDGE_COMPLEX)
2354 return FALSE;
2355
2356 /* ELSE has one predecessor. */
2357 if (else_bb->pred->pred_next != NULL)
2358 return FALSE;
2359
2360 /* THEN is not EXIT. */
2361 if (then_bb->index < 0)
2362 return FALSE;
2363
2364 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2365 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2366 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2367 ;
2368 else if (else_succ->dest->index < 0
2369 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2370 ORIG_INDEX (else_succ->dest)))
2371 ;
2372 else
2373 return FALSE;
2374
2375 num_possible_if_blocks++;
2376 if (rtl_dump_file)
2377 fprintf (rtl_dump_file,
2378 "\nIF-CASE-2 found, start %d, else %d\n",
2379 test_bb->index, else_bb->index);
2380
2381 /* ELSE is small. */
2382 if (count_bb_insns (then_bb) > BRANCH_COST)
2383 return FALSE;
2384
2385 /* Registers set are dead, or are predicable. */
2386 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2387 return FALSE;
2388
2389 /* Conversion went ok, including moving the insns and fixing up the
2390 jump. Adjust the CFG to match. */
2391
2392 bitmap_operation (test_bb->global_live_at_end,
2393 then_bb->global_live_at_start,
2394 else_bb->global_live_at_end, BITMAP_IOR);
2395
2396 flow_delete_block (else_bb);
2397
2398 num_removed_blocks++;
2399 num_updated_if_blocks++;
2400
2401 /* ??? We may now fallthru from one of THEN's successors into a join
2402 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2403
2404 return TRUE;
2405 }
2406
2407 /* A subroutine of dead_or_predicable called through for_each_rtx.
2408 Return 1 if a memory is found. */
2409
2410 static int
2411 find_memory (px, data)
2412 rtx *px;
2413 void *data ATTRIBUTE_UNUSED;
2414 {
2415 return GET_CODE (*px) == MEM;
2416 }
2417
2418 /* Used by the code above to perform the actual rtl transformations.
2419 Return TRUE if successful.
2420
2421 TEST_BB is the block containing the conditional branch. MERGE_BB
2422 is the block containing the code to manipulate. NEW_DEST is the
2423 label TEST_BB should be branching to after the conversion.
2424 REVERSEP is true if the sense of the branch should be reversed. */
2425
2426 static int
2427 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2428 basic_block test_bb, merge_bb, other_bb;
2429 basic_block new_dest;
2430 int reversep;
2431 {
2432 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2433
2434 jump = test_bb->end;
2435
2436 /* Find the extent of the real code in the merge block. */
2437 head = merge_bb->head;
2438 end = merge_bb->end;
2439
2440 if (GET_CODE (head) == CODE_LABEL)
2441 head = NEXT_INSN (head);
2442 if (GET_CODE (head) == NOTE)
2443 {
2444 if (head == end)
2445 {
2446 head = end = NULL_RTX;
2447 goto no_body;
2448 }
2449 head = NEXT_INSN (head);
2450 }
2451
2452 if (GET_CODE (end) == JUMP_INSN)
2453 {
2454 if (head == end)
2455 {
2456 head = end = NULL_RTX;
2457 goto no_body;
2458 }
2459 end = PREV_INSN (end);
2460 }
2461
2462 /* Disable handling dead code by conditional execution if the machine needs
2463 to do anything funny with the tests, etc. */
2464 #ifndef IFCVT_MODIFY_TESTS
2465 if (HAVE_conditional_execution)
2466 {
2467 /* In the conditional execution case, we have things easy. We know
2468 the condition is reversable. We don't have to check life info,
2469 becase we're going to conditionally execute the code anyway.
2470 All that's left is making sure the insns involved can actually
2471 be predicated. */
2472
2473 rtx cond, prob_val;
2474
2475 cond = cond_exec_get_condition (jump);
2476 if (! cond)
2477 return FALSE;
2478
2479 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2480 if (prob_val)
2481 prob_val = XEXP (prob_val, 0);
2482
2483 if (reversep)
2484 {
2485 enum rtx_code rev = reversed_comparison_code (cond, jump);
2486 if (rev == UNKNOWN)
2487 return FALSE;
2488 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2489 XEXP (cond, 1));
2490 if (prob_val)
2491 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2492 }
2493
2494 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2495 goto cancel;
2496
2497 earliest = jump;
2498 }
2499 else
2500 #endif
2501 {
2502 /* In the non-conditional execution case, we have to verify that there
2503 are no trapping operations, no calls, no references to memory, and
2504 that any registers modified are dead at the branch site. */
2505
2506 rtx insn, cond, prev;
2507 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2508 regset merge_set, tmp, test_live, test_set;
2509 struct propagate_block_info *pbi;
2510 int i, fail = 0;
2511
2512 /* Check for no calls or trapping operations. */
2513 for (insn = head; ; insn = NEXT_INSN (insn))
2514 {
2515 if (GET_CODE (insn) == CALL_INSN)
2516 return FALSE;
2517 if (INSN_P (insn))
2518 {
2519 if (may_trap_p (PATTERN (insn)))
2520 return FALSE;
2521
2522 /* ??? Even non-trapping memories such as stack frame
2523 references must be avoided. For stores, we collect
2524 no lifetime info; for reads, we'd have to assert
2525 true_dependence false against every store in the
2526 TEST range. */
2527 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2528 return FALSE;
2529 }
2530 if (insn == end)
2531 break;
2532 }
2533
2534 if (! any_condjump_p (jump))
2535 return FALSE;
2536
2537 /* Find the extent of the conditional. */
2538 cond = noce_get_condition (jump, &earliest);
2539 if (! cond)
2540 return FALSE;
2541
2542 /* Collect:
2543 MERGE_SET = set of registers set in MERGE_BB
2544 TEST_LIVE = set of registers live at EARLIEST
2545 TEST_SET = set of registers set between EARLIEST and the
2546 end of the block. */
2547
2548 tmp = INITIALIZE_REG_SET (tmp_head);
2549 merge_set = INITIALIZE_REG_SET (merge_set_head);
2550 test_live = INITIALIZE_REG_SET (test_live_head);
2551 test_set = INITIALIZE_REG_SET (test_set_head);
2552
2553 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2554 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2555 since we've already asserted that MERGE_BB is small. */
2556 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2557
2558 /* For small register class machines, don't lengthen lifetimes of
2559 hard registers before reload. */
2560 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2561 {
2562 EXECUTE_IF_SET_IN_BITMAP
2563 (merge_set, 0, i,
2564 {
2565 if (i < FIRST_PSEUDO_REGISTER
2566 && ! fixed_regs[i]
2567 && ! global_regs[i])
2568 fail = 1;
2569 });
2570 }
2571
2572 /* For TEST, we're interested in a range of insns, not a whole block.
2573 Moreover, we're interested in the insns live from OTHER_BB. */
2574
2575 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2576 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2577 0);
2578
2579 for (insn = jump; ; insn = prev)
2580 {
2581 prev = propagate_one_insn (pbi, insn);
2582 if (insn == earliest)
2583 break;
2584 }
2585
2586 free_propagate_block_info (pbi);
2587
2588 /* We can perform the transformation if
2589 MERGE_SET & (TEST_SET | TEST_LIVE)
2590 and
2591 TEST_SET & merge_bb->global_live_at_start
2592 are empty. */
2593
2594 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2595 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2596 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2597
2598 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2599 BITMAP_AND);
2600 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2601
2602 FREE_REG_SET (tmp);
2603 FREE_REG_SET (merge_set);
2604 FREE_REG_SET (test_live);
2605 FREE_REG_SET (test_set);
2606
2607 if (fail)
2608 return FALSE;
2609 }
2610
2611 no_body:
2612 /* We don't want to use normal invert_jump or redirect_jump because
2613 we don't want to delete_insn called. Also, we want to do our own
2614 change group management. */
2615
2616 old_dest = JUMP_LABEL (jump);
2617 if (other_bb != new_dest)
2618 {
2619 new_label = block_label (new_dest);
2620 if (reversep
2621 ? ! invert_jump_1 (jump, new_label)
2622 : ! redirect_jump_1 (jump, new_label))
2623 goto cancel;
2624 }
2625
2626 if (! apply_change_group ())
2627 return FALSE;
2628
2629 if (other_bb != new_dest)
2630 {
2631 if (old_dest)
2632 LABEL_NUSES (old_dest) -= 1;
2633 if (new_label)
2634 LABEL_NUSES (new_label) += 1;
2635 JUMP_LABEL (jump) = new_label;
2636 if (reversep)
2637 invert_br_probabilities (jump);
2638
2639 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
2640 if (reversep)
2641 {
2642 gcov_type count, probability;
2643 count = BRANCH_EDGE (test_bb)->count;
2644 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
2645 FALLTHRU_EDGE (test_bb)->count = count;
2646 probability = BRANCH_EDGE (test_bb)->probability;
2647 BRANCH_EDGE (test_bb)->probability
2648 = FALLTHRU_EDGE (test_bb)->probability;
2649 FALLTHRU_EDGE (test_bb)->probability = probability;
2650 update_br_prob_note (test_bb);
2651 }
2652 }
2653
2654 /* Move the insns out of MERGE_BB to before the branch. */
2655 if (head != NULL)
2656 {
2657 if (end == merge_bb->end)
2658 merge_bb->end = PREV_INSN (head);
2659
2660 if (squeeze_notes (&head, &end))
2661 return TRUE;
2662
2663 reorder_insns (head, end, PREV_INSN (earliest));
2664 }
2665
2666 /* Remove the jump and edge if we can. */
2667 if (other_bb == new_dest)
2668 {
2669 delete_insn (jump);
2670 remove_edge (BRANCH_EDGE (test_bb));
2671 /* ??? Can't merge blocks here, as then_bb is still in use.
2672 At minimum, the merge will get done just before bb-reorder. */
2673 }
2674
2675 return TRUE;
2676
2677 cancel:
2678 cancel_changes (0);
2679 return FALSE;
2680 }
2681 \f
2682 /* Main entry point for all if-conversion. */
2683
2684 void
2685 if_convert (x_life_data_ok)
2686 int x_life_data_ok;
2687 {
2688 basic_block bb;
2689
2690 num_possible_if_blocks = 0;
2691 num_updated_if_blocks = 0;
2692 num_removed_blocks = 0;
2693 life_data_ok = (x_life_data_ok != 0);
2694
2695 /* Free up basic_block_for_insn so that we don't have to keep it
2696 up to date, either here or in merge_blocks_nomove. */
2697 free_basic_block_vars (1);
2698
2699 /* Compute postdominators if we think we'll use them. */
2700 post_dominators = NULL;
2701 if (HAVE_conditional_execution || life_data_ok)
2702 {
2703 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2704 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2705 }
2706 if (life_data_ok)
2707 clear_bb_flags ();
2708
2709 /* Record initial block numbers. */
2710 FOR_EACH_BB (bb)
2711 SET_ORIG_INDEX (bb, bb->index);
2712
2713 /* Go through each of the basic blocks looking for things to convert. */
2714 FOR_EACH_BB (bb)
2715 while (find_if_header (bb))
2716 continue;
2717
2718 if (post_dominators)
2719 sbitmap_vector_free (post_dominators);
2720
2721 if (rtl_dump_file)
2722 fflush (rtl_dump_file);
2723
2724 clear_aux_for_blocks ();
2725
2726 /* Rebuild life info for basic blocks that require it. */
2727 if (num_removed_blocks && life_data_ok)
2728 {
2729 /* If we allocated new pseudos, we must resize the array for sched1. */
2730 if (max_regno < max_reg_num ())
2731 {
2732 max_regno = max_reg_num ();
2733 allocate_reg_info (max_regno, FALSE, FALSE);
2734 }
2735 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
2736 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2737 | PROP_KILL_DEAD_CODE);
2738 }
2739
2740 /* Write the final stats. */
2741 if (rtl_dump_file && num_possible_if_blocks > 0)
2742 {
2743 fprintf (rtl_dump_file,
2744 "\n%d possible IF blocks searched.\n",
2745 num_possible_if_blocks);
2746 fprintf (rtl_dump_file,
2747 "%d IF blocks converted.\n",
2748 num_updated_if_blocks);
2749 fprintf (rtl_dump_file,
2750 "%d basic blocks deleted.\n\n\n",
2751 num_removed_blocks);
2752 }
2753
2754 #ifdef ENABLE_CHECKING
2755 verify_flow_info ();
2756 #endif
2757 }