physmem.c (physmem_total): Use getsysinfo on Tru64 UNIX.
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "tm_p.h"
40
41
42 #ifndef HAVE_conditional_execution
43 #define HAVE_conditional_execution 0
44 #endif
45 #ifndef HAVE_conditional_move
46 #define HAVE_conditional_move 0
47 #endif
48 #ifndef HAVE_incscc
49 #define HAVE_incscc 0
50 #endif
51 #ifndef HAVE_decscc
52 #define HAVE_decscc 0
53 #endif
54 #ifndef HAVE_trap
55 #define HAVE_trap 0
56 #endif
57 #ifndef HAVE_conditional_trap
58 #define HAVE_conditional_trap 0
59 #endif
60
61 #ifndef MAX_CONDITIONAL_EXECUTE
62 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
63 #endif
64
65 #define NULL_EDGE ((struct edge_def *)NULL)
66 #define NULL_BLOCK ((struct basic_block_def *)NULL)
67
68 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
69 static int num_possible_if_blocks;
70
71 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
72 execution. */
73 static int num_updated_if_blocks;
74
75 /* # of basic blocks that were removed. */
76 static int num_removed_blocks;
77
78 /* Whether conditional execution changes were made. */
79 static int cond_exec_changed_p;
80
81 /* True if life data ok at present. */
82 static bool life_data_ok;
83
84 /* The post-dominator relation on the original block numbers. */
85 static dominance_info post_dominators;
86
87 /* Forward references. */
88 static int count_bb_insns PARAMS ((basic_block));
89 static rtx first_active_insn PARAMS ((basic_block));
90 static rtx last_active_insn PARAMS ((basic_block, int));
91 static int seq_contains_jump PARAMS ((rtx));
92 static basic_block block_fallthru PARAMS ((basic_block));
93 static int cond_exec_process_insns PARAMS ((ce_if_block_t *,
94 rtx, rtx, rtx, rtx, int));
95 static rtx cond_exec_get_condition PARAMS ((rtx));
96 static int cond_exec_process_if_block PARAMS ((ce_if_block_t *, int));
97 static rtx noce_get_condition PARAMS ((rtx, rtx *));
98 static int noce_operand_ok PARAMS ((rtx));
99 static int noce_process_if_block PARAMS ((ce_if_block_t *));
100 static int process_if_block PARAMS ((ce_if_block_t *));
101 static void merge_if_block PARAMS ((ce_if_block_t *));
102 static int find_cond_trap PARAMS ((basic_block, edge, edge));
103 static basic_block find_if_header PARAMS ((basic_block, int));
104 static int block_jumps_and_fallthru_p PARAMS ((basic_block, basic_block));
105 static int find_if_block PARAMS ((ce_if_block_t *));
106 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
107 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
108 static int find_memory PARAMS ((rtx *, void *));
109 static int dead_or_predicable PARAMS ((basic_block, basic_block,
110 basic_block, basic_block, int));
111 static void noce_emit_move_insn PARAMS ((rtx, rtx));
112 static rtx block_has_only_trap PARAMS ((basic_block));
113 \f
114 /* Count the number of non-jump active insns in BB. */
115
116 static int
117 count_bb_insns (bb)
118 basic_block bb;
119 {
120 int count = 0;
121 rtx insn = bb->head;
122
123 while (1)
124 {
125 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
126 count++;
127
128 if (insn == bb->end)
129 break;
130 insn = NEXT_INSN (insn);
131 }
132
133 return count;
134 }
135
136 /* Return the first non-jump active insn in the basic block. */
137
138 static rtx
139 first_active_insn (bb)
140 basic_block bb;
141 {
142 rtx insn = bb->head;
143
144 if (GET_CODE (insn) == CODE_LABEL)
145 {
146 if (insn == bb->end)
147 return NULL_RTX;
148 insn = NEXT_INSN (insn);
149 }
150
151 while (GET_CODE (insn) == NOTE)
152 {
153 if (insn == bb->end)
154 return NULL_RTX;
155 insn = NEXT_INSN (insn);
156 }
157
158 if (GET_CODE (insn) == JUMP_INSN)
159 return NULL_RTX;
160
161 return insn;
162 }
163
164 /* Return the last non-jump active (non-jump) insn in the basic block. */
165
166 static rtx
167 last_active_insn (bb, skip_use_p)
168 basic_block bb;
169 int skip_use_p;
170 {
171 rtx insn = bb->end;
172 rtx head = bb->head;
173
174 while (GET_CODE (insn) == NOTE
175 || GET_CODE (insn) == JUMP_INSN
176 || (skip_use_p
177 && GET_CODE (insn) == INSN
178 && GET_CODE (PATTERN (insn)) == USE))
179 {
180 if (insn == head)
181 return NULL_RTX;
182 insn = PREV_INSN (insn);
183 }
184
185 if (GET_CODE (insn) == CODE_LABEL)
186 return NULL_RTX;
187
188 return insn;
189 }
190
191 /* It is possible, especially when having dealt with multi-word
192 arithmetic, for the expanders to have emitted jumps. Search
193 through the sequence and return TRUE if a jump exists so that
194 we can abort the conversion. */
195
196 static int
197 seq_contains_jump (insn)
198 rtx insn;
199 {
200 while (insn)
201 {
202 if (GET_CODE (insn) == JUMP_INSN)
203 return 1;
204 insn = NEXT_INSN (insn);
205 }
206 return 0;
207 }
208
209 static basic_block
210 block_fallthru (bb)
211 basic_block bb;
212 {
213 edge e;
214
215 for (e = bb->succ;
216 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
217 e = e->succ_next)
218 ;
219
220 return (e) ? e->dest : NULL_BLOCK;
221 }
222 \f
223 /* Go through a bunch of insns, converting them to conditional
224 execution format if possible. Return TRUE if all of the non-note
225 insns were processed. */
226
227 static int
228 cond_exec_process_insns (ce_info, start, end, test, prob_val, mod_ok)
229 ce_if_block_t *ce_info ATTRIBUTE_UNUSED; /* if block information */
230 rtx start; /* first insn to look at */
231 rtx end; /* last insn to look at */
232 rtx test; /* conditional execution test */
233 rtx prob_val; /* probability of branch taken. */
234 int mod_ok; /* true if modifications ok last insn. */
235 {
236 int must_be_last = FALSE;
237 rtx insn;
238 rtx xtest;
239 rtx pattern;
240
241 if (!start || !end)
242 return FALSE;
243
244 for (insn = start; ; insn = NEXT_INSN (insn))
245 {
246 if (GET_CODE (insn) == NOTE)
247 goto insn_done;
248
249 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
250 abort ();
251
252 /* Remove USE insns that get in the way. */
253 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
254 {
255 /* ??? Ug. Actually unlinking the thing is problematic,
256 given what we'd have to coordinate with our callers. */
257 PUT_CODE (insn, NOTE);
258 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
259 NOTE_SOURCE_FILE (insn) = 0;
260 goto insn_done;
261 }
262
263 /* Last insn wasn't last? */
264 if (must_be_last)
265 return FALSE;
266
267 if (modified_in_p (test, insn))
268 {
269 if (!mod_ok)
270 return FALSE;
271 must_be_last = TRUE;
272 }
273
274 /* Now build the conditional form of the instruction. */
275 pattern = PATTERN (insn);
276 xtest = copy_rtx (test);
277
278 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
279 two conditions. */
280 if (GET_CODE (pattern) == COND_EXEC)
281 {
282 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
283 return FALSE;
284
285 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
286 COND_EXEC_TEST (pattern));
287 pattern = COND_EXEC_CODE (pattern);
288 }
289
290 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
291
292 /* If the machine needs to modify the insn being conditionally executed,
293 say for example to force a constant integer operand into a temp
294 register, do so here. */
295 #ifdef IFCVT_MODIFY_INSN
296 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
297 if (! pattern)
298 return FALSE;
299 #endif
300
301 validate_change (insn, &PATTERN (insn), pattern, 1);
302
303 if (GET_CODE (insn) == CALL_INSN && prob_val)
304 validate_change (insn, &REG_NOTES (insn),
305 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
306 REG_NOTES (insn)), 1);
307
308 insn_done:
309 if (insn == end)
310 break;
311 }
312
313 return TRUE;
314 }
315
316 /* Return the condition for a jump. Do not do any special processing. */
317
318 static rtx
319 cond_exec_get_condition (jump)
320 rtx jump;
321 {
322 rtx test_if, cond;
323
324 if (any_condjump_p (jump))
325 test_if = SET_SRC (pc_set (jump));
326 else
327 return NULL_RTX;
328 cond = XEXP (test_if, 0);
329
330 /* If this branches to JUMP_LABEL when the condition is false,
331 reverse the condition. */
332 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
333 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
334 {
335 enum rtx_code rev = reversed_comparison_code (cond, jump);
336 if (rev == UNKNOWN)
337 return NULL_RTX;
338
339 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
340 XEXP (cond, 1));
341 }
342
343 return cond;
344 }
345
346 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
347 to conditional execution. Return TRUE if we were successful at
348 converting the block. */
349
350 static int
351 cond_exec_process_if_block (ce_info, do_multiple_p)
352 ce_if_block_t * ce_info; /* if block information */
353 int do_multiple_p; /* != 0 if we should handle && and || blocks */
354 {
355 basic_block test_bb = ce_info->test_bb; /* last test block */
356 basic_block then_bb = ce_info->then_bb; /* THEN */
357 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
358 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
359 rtx then_start; /* first insn in THEN block */
360 rtx then_end; /* last insn + 1 in THEN block */
361 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
362 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
363 int max; /* max # of insns to convert. */
364 int then_mod_ok; /* whether conditional mods are ok in THEN */
365 rtx true_expr; /* test for else block insns */
366 rtx false_expr; /* test for then block insns */
367 rtx true_prob_val; /* probability of else block */
368 rtx false_prob_val; /* probability of then block */
369 int n_insns;
370 enum rtx_code false_code;
371
372 /* If test is comprised of && or || elements, and we've failed at handling
373 all of them together, just use the last test if it is the special case of
374 && elements without an ELSE block. */
375 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
376 {
377 if (else_bb || ! ce_info->and_and_p)
378 return FALSE;
379
380 ce_info->test_bb = test_bb = ce_info->last_test_bb;
381 ce_info->num_multiple_test_blocks = 0;
382 ce_info->num_and_and_blocks = 0;
383 ce_info->num_or_or_blocks = 0;
384 }
385
386 /* Find the conditional jump to the ELSE or JOIN part, and isolate
387 the test. */
388 test_expr = cond_exec_get_condition (test_bb->end);
389 if (! test_expr)
390 return FALSE;
391
392 /* If the conditional jump is more than just a conditional jump,
393 then we can not do conditional execution conversion on this block. */
394 if (! onlyjump_p (test_bb->end))
395 return FALSE;
396
397 /* Collect the bounds of where we're to search, skipping any labels, jumps
398 and notes at the beginning and end of the block. Then count the total
399 number of insns and see if it is small enough to convert. */
400 then_start = first_active_insn (then_bb);
401 then_end = last_active_insn (then_bb, TRUE);
402 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
403 max = MAX_CONDITIONAL_EXECUTE;
404
405 if (else_bb)
406 {
407 max *= 2;
408 else_start = first_active_insn (else_bb);
409 else_end = last_active_insn (else_bb, TRUE);
410 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
411 }
412
413 if (n_insns > max)
414 return FALSE;
415
416 /* Map test_expr/test_jump into the appropriate MD tests to use on
417 the conditionally executed code. */
418
419 true_expr = test_expr;
420
421 false_code = reversed_comparison_code (true_expr, test_bb->end);
422 if (false_code != UNKNOWN)
423 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
424 XEXP (true_expr, 0), XEXP (true_expr, 1));
425 else
426 false_expr = NULL_RTX;
427
428 #ifdef IFCVT_MODIFY_TESTS
429 /* If the machine description needs to modify the tests, such as setting a
430 conditional execution register from a comparison, it can do so here. */
431 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
432
433 /* See if the conversion failed */
434 if (!true_expr || !false_expr)
435 goto fail;
436 #endif
437
438 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
439 if (true_prob_val)
440 {
441 true_prob_val = XEXP (true_prob_val, 0);
442 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
443 }
444 else
445 false_prob_val = NULL_RTX;
446
447 /* If we have && or || tests, do them here. These tests are in the adjacent
448 blocks after the first block containing the test. */
449 if (ce_info->num_multiple_test_blocks > 0)
450 {
451 basic_block bb = test_bb;
452 basic_block last_test_bb = ce_info->last_test_bb;
453
454 if (! false_expr)
455 goto fail;
456
457 do
458 {
459 rtx start, end;
460 rtx t, f;
461
462 bb = block_fallthru (bb);
463 start = first_active_insn (bb);
464 end = last_active_insn (bb, TRUE);
465 if (start
466 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
467 false_prob_val, FALSE))
468 goto fail;
469
470 /* If the conditional jump is more than just a conditional jump, then
471 we can not do conditional execution conversion on this block. */
472 if (! onlyjump_p (bb->end))
473 goto fail;
474
475 /* Find the conditional jump and isolate the test. */
476 t = cond_exec_get_condition (bb->end);
477 if (! t)
478 goto fail;
479
480 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
481 GET_MODE (t),
482 XEXP (t, 0),
483 XEXP (t, 1));
484
485 if (ce_info->and_and_p)
486 {
487 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
488 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
489 }
490 else
491 {
492 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
493 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
494 }
495
496 /* If the machine description needs to modify the tests, such as
497 setting a conditional execution register from a comparison, it can
498 do so here. */
499 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
500 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
501
502 /* See if the conversion failed */
503 if (!t || !f)
504 goto fail;
505 #endif
506
507 true_expr = t;
508 false_expr = f;
509 }
510 while (bb != last_test_bb);
511 }
512
513 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
514 on then THEN block. */
515 then_mod_ok = (else_bb == NULL_BLOCK);
516
517 /* Go through the THEN and ELSE blocks converting the insns if possible
518 to conditional execution. */
519
520 if (then_end
521 && (! false_expr
522 || ! cond_exec_process_insns (ce_info, then_start, then_end,
523 false_expr, false_prob_val,
524 then_mod_ok)))
525 goto fail;
526
527 if (else_bb && else_end
528 && ! cond_exec_process_insns (ce_info, else_start, else_end,
529 true_expr, true_prob_val, TRUE))
530 goto fail;
531
532 /* If we cannot apply the changes, fail. Do not go through the normal fail
533 processing, since apply_change_group will call cancel_changes. */
534 if (! apply_change_group ())
535 {
536 #ifdef IFCVT_MODIFY_CANCEL
537 /* Cancel any machine dependent changes. */
538 IFCVT_MODIFY_CANCEL (ce_info);
539 #endif
540 return FALSE;
541 }
542
543 #ifdef IFCVT_MODIFY_FINAL
544 /* Do any machine dependent final modifications */
545 IFCVT_MODIFY_FINAL (ce_info);
546 #endif
547
548 /* Conversion succeeded. */
549 if (rtl_dump_file)
550 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
551 n_insns, (n_insns == 1) ? " was" : "s were");
552
553 /* Merge the blocks! */
554 merge_if_block (ce_info);
555 cond_exec_changed_p = TRUE;
556 return TRUE;
557
558 fail:
559 #ifdef IFCVT_MODIFY_CANCEL
560 /* Cancel any machine dependent changes. */
561 IFCVT_MODIFY_CANCEL (ce_info);
562 #endif
563
564 cancel_changes (0);
565 return FALSE;
566 }
567 \f
568 /* Used by noce_process_if_block to communicate with its subroutines.
569
570 The subroutines know that A and B may be evaluated freely. They
571 know that X is a register. They should insert new instructions
572 before cond_earliest. */
573
574 struct noce_if_info
575 {
576 basic_block test_bb;
577 rtx insn_a, insn_b;
578 rtx x, a, b;
579 rtx jump, cond, cond_earliest;
580 };
581
582 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
583 rtx, int, int));
584 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
585 static int noce_try_addcc PARAMS ((struct noce_if_info *));
586 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
587 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
588 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
589 rtx, enum rtx_code, rtx,
590 rtx, rtx, rtx));
591 static int noce_try_cmove PARAMS ((struct noce_if_info *));
592 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
593 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
594 rtx, rtx *));
595 static int noce_try_minmax PARAMS ((struct noce_if_info *));
596 static int noce_try_abs PARAMS ((struct noce_if_info *));
597
598 /* Helper function for noce_try_store_flag*. */
599
600 static rtx
601 noce_emit_store_flag (if_info, x, reversep, normalize)
602 struct noce_if_info *if_info;
603 rtx x;
604 int reversep, normalize;
605 {
606 rtx cond = if_info->cond;
607 int cond_complex;
608 enum rtx_code code;
609
610 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
611 || ! general_operand (XEXP (cond, 1), VOIDmode));
612
613 /* If earliest == jump, or when the condition is complex, try to
614 build the store_flag insn directly. */
615
616 if (cond_complex)
617 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
618
619 if (reversep)
620 code = reversed_comparison_code (cond, if_info->jump);
621 else
622 code = GET_CODE (cond);
623
624 if ((if_info->cond_earliest == if_info->jump || cond_complex)
625 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
626 {
627 rtx tmp;
628
629 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
630 XEXP (cond, 1));
631 tmp = gen_rtx_SET (VOIDmode, x, tmp);
632
633 start_sequence ();
634 tmp = emit_insn (tmp);
635
636 if (recog_memoized (tmp) >= 0)
637 {
638 tmp = get_insns ();
639 end_sequence ();
640 emit_insn (tmp);
641
642 if_info->cond_earliest = if_info->jump;
643
644 return x;
645 }
646
647 end_sequence ();
648 }
649
650 /* Don't even try if the comparison operands or the mode of X are weird. */
651 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
652 return NULL_RTX;
653
654 return emit_store_flag (x, code, XEXP (cond, 0),
655 XEXP (cond, 1), VOIDmode,
656 (code == LTU || code == LEU
657 || code == GEU || code == GTU), normalize);
658 }
659
660 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
661 static void
662 noce_emit_move_insn (x, y)
663 rtx x, y;
664 {
665 enum machine_mode outmode, inmode;
666 rtx outer, inner;
667 int bitpos;
668
669 if (GET_CODE (x) != STRICT_LOW_PART)
670 {
671 emit_move_insn (x, y);
672 return;
673 }
674
675 outer = XEXP (x, 0);
676 inner = XEXP (outer, 0);
677 outmode = GET_MODE (outer);
678 inmode = GET_MODE (inner);
679 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
680 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
681 GET_MODE_BITSIZE (inmode));
682 }
683
684 /* Convert "if (test) x = 1; else x = 0".
685
686 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
687 tried in noce_try_store_flag_constants after noce_try_cmove has had
688 a go at the conversion. */
689
690 static int
691 noce_try_store_flag (if_info)
692 struct noce_if_info *if_info;
693 {
694 int reversep;
695 rtx target, seq;
696
697 if (GET_CODE (if_info->b) == CONST_INT
698 && INTVAL (if_info->b) == STORE_FLAG_VALUE
699 && if_info->a == const0_rtx)
700 reversep = 0;
701 else if (if_info->b == const0_rtx
702 && GET_CODE (if_info->a) == CONST_INT
703 && INTVAL (if_info->a) == STORE_FLAG_VALUE
704 && (reversed_comparison_code (if_info->cond, if_info->jump)
705 != UNKNOWN))
706 reversep = 1;
707 else
708 return FALSE;
709
710 start_sequence ();
711
712 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
713 if (target)
714 {
715 if (target != if_info->x)
716 noce_emit_move_insn (if_info->x, target);
717
718 seq = get_insns ();
719 end_sequence ();
720 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
721
722 return TRUE;
723 }
724 else
725 {
726 end_sequence ();
727 return FALSE;
728 }
729 }
730
731 /* Convert "if (test) x = a; else x = b", for A and B constant. */
732
733 static int
734 noce_try_store_flag_constants (if_info)
735 struct noce_if_info *if_info;
736 {
737 rtx target, seq;
738 int reversep;
739 HOST_WIDE_INT itrue, ifalse, diff, tmp;
740 int normalize, can_reverse;
741 enum machine_mode mode;
742
743 if (! no_new_pseudos
744 && GET_CODE (if_info->a) == CONST_INT
745 && GET_CODE (if_info->b) == CONST_INT)
746 {
747 mode = GET_MODE (if_info->x);
748 ifalse = INTVAL (if_info->a);
749 itrue = INTVAL (if_info->b);
750
751 /* Make sure we can represent the difference between the two values. */
752 if ((itrue - ifalse > 0)
753 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
754 return FALSE;
755
756 diff = trunc_int_for_mode (itrue - ifalse, mode);
757
758 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
759 != UNKNOWN);
760
761 reversep = 0;
762 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
763 normalize = 0;
764 else if (ifalse == 0 && exact_log2 (itrue) >= 0
765 && (STORE_FLAG_VALUE == 1
766 || BRANCH_COST >= 2))
767 normalize = 1;
768 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
769 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
770 normalize = 1, reversep = 1;
771 else if (itrue == -1
772 && (STORE_FLAG_VALUE == -1
773 || BRANCH_COST >= 2))
774 normalize = -1;
775 else if (ifalse == -1 && can_reverse
776 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
777 normalize = -1, reversep = 1;
778 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
779 || BRANCH_COST >= 3)
780 normalize = -1;
781 else
782 return FALSE;
783
784 if (reversep)
785 {
786 tmp = itrue; itrue = ifalse; ifalse = tmp;
787 diff = trunc_int_for_mode (-diff, mode);
788 }
789
790 start_sequence ();
791 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
792 if (! target)
793 {
794 end_sequence ();
795 return FALSE;
796 }
797
798 /* if (test) x = 3; else x = 4;
799 => x = 3 + (test == 0); */
800 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
801 {
802 target = expand_simple_binop (mode,
803 (diff == STORE_FLAG_VALUE
804 ? PLUS : MINUS),
805 GEN_INT (ifalse), target, if_info->x, 0,
806 OPTAB_WIDEN);
807 }
808
809 /* if (test) x = 8; else x = 0;
810 => x = (test != 0) << 3; */
811 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
812 {
813 target = expand_simple_binop (mode, ASHIFT,
814 target, GEN_INT (tmp), if_info->x, 0,
815 OPTAB_WIDEN);
816 }
817
818 /* if (test) x = -1; else x = b;
819 => x = -(test != 0) | b; */
820 else if (itrue == -1)
821 {
822 target = expand_simple_binop (mode, IOR,
823 target, GEN_INT (ifalse), if_info->x, 0,
824 OPTAB_WIDEN);
825 }
826
827 /* if (test) x = a; else x = b;
828 => x = (-(test != 0) & (b - a)) + a; */
829 else
830 {
831 target = expand_simple_binop (mode, AND,
832 target, GEN_INT (diff), if_info->x, 0,
833 OPTAB_WIDEN);
834 if (target)
835 target = expand_simple_binop (mode, PLUS,
836 target, GEN_INT (ifalse),
837 if_info->x, 0, OPTAB_WIDEN);
838 }
839
840 if (! target)
841 {
842 end_sequence ();
843 return FALSE;
844 }
845
846 if (target != if_info->x)
847 noce_emit_move_insn (if_info->x, target);
848
849 seq = get_insns ();
850 end_sequence ();
851
852 if (seq_contains_jump (seq))
853 return FALSE;
854
855 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
856
857 return TRUE;
858 }
859
860 return FALSE;
861 }
862
863 /* Convert "if (test) foo++" into "foo += (test != 0)", and
864 similarly for "foo--". */
865
866 static int
867 noce_try_addcc (if_info)
868 struct noce_if_info *if_info;
869 {
870 rtx target, seq;
871 int subtract, normalize;
872
873 if (! no_new_pseudos
874 /* Should be no `else' case to worry about. */
875 && if_info->b == if_info->x
876 && GET_CODE (if_info->a) == PLUS
877 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
878 && (reversed_comparison_code (if_info->cond, if_info->jump)
879 != UNKNOWN))
880 {
881 rtx cond = if_info->cond;
882 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
883
884 /* First try to use addcc pattern. */
885 if (general_operand (XEXP (cond, 0), VOIDmode)
886 && general_operand (XEXP (cond, 1), VOIDmode))
887 {
888 start_sequence ();
889 target = emit_conditional_add (if_info->x, code,
890 XEXP (cond, 0), XEXP (cond, 1),
891 VOIDmode,
892 if_info->b, XEXP (if_info->a, 1),
893 GET_MODE (if_info->x),
894 (code == LTU || code == GEU
895 || code == LEU || code == GTU));
896 if (target)
897 {
898 if (target != if_info->x)
899 noce_emit_move_insn (if_info->x, target);
900
901 seq = get_insns ();
902 end_sequence ();
903 emit_insn_before_scope (seq, if_info->jump,
904 INSN_SCOPE (if_info->insn_a));
905 return TRUE;
906 }
907 end_sequence ();
908 }
909
910 /* If that fails, construct conditional increment or decrement using
911 setcc. */
912 if (BRANCH_COST >= 2
913 && (XEXP (if_info->a, 1) == const1_rtx
914 || XEXP (if_info->a, 1) == constm1_rtx))
915 {
916 start_sequence ();
917 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
918 subtract = 0, normalize = 0;
919 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
920 subtract = 1, normalize = 0;
921 else
922 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
923
924
925 target = noce_emit_store_flag (if_info,
926 gen_reg_rtx (GET_MODE (if_info->x)),
927 1, normalize);
928
929 if (target)
930 target = expand_simple_binop (GET_MODE (if_info->x),
931 subtract ? MINUS : PLUS,
932 if_info->x, target, if_info->x,
933 0, OPTAB_WIDEN);
934 if (target)
935 {
936 if (target != if_info->x)
937 noce_emit_move_insn (if_info->x, target);
938
939 seq = get_insns ();
940 end_sequence ();
941
942 if (seq_contains_jump (seq))
943 return FALSE;
944
945 emit_insn_before_scope (seq, if_info->jump,
946 INSN_SCOPE (if_info->insn_a));
947
948 return TRUE;
949 }
950 end_sequence ();
951 }
952 }
953
954 return FALSE;
955 }
956
957 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
958
959 static int
960 noce_try_store_flag_mask (if_info)
961 struct noce_if_info *if_info;
962 {
963 rtx target, seq;
964 int reversep;
965
966 reversep = 0;
967 if (! no_new_pseudos
968 && (BRANCH_COST >= 2
969 || STORE_FLAG_VALUE == -1)
970 && ((if_info->a == const0_rtx
971 && rtx_equal_p (if_info->b, if_info->x))
972 || ((reversep = (reversed_comparison_code (if_info->cond,
973 if_info->jump)
974 != UNKNOWN))
975 && if_info->b == const0_rtx
976 && rtx_equal_p (if_info->a, if_info->x))))
977 {
978 start_sequence ();
979 target = noce_emit_store_flag (if_info,
980 gen_reg_rtx (GET_MODE (if_info->x)),
981 reversep, -1);
982 if (target)
983 target = expand_simple_binop (GET_MODE (if_info->x), AND,
984 if_info->x, target, if_info->x, 0,
985 OPTAB_WIDEN);
986
987 if (target)
988 {
989 if (target != if_info->x)
990 noce_emit_move_insn (if_info->x, target);
991
992 seq = get_insns ();
993 end_sequence ();
994
995 if (seq_contains_jump (seq))
996 return FALSE;
997
998 emit_insn_before_scope (seq, if_info->jump,
999 INSN_SCOPE (if_info->insn_a));
1000
1001 return TRUE;
1002 }
1003
1004 end_sequence ();
1005 }
1006
1007 return FALSE;
1008 }
1009
1010 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1011
1012 static rtx
1013 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
1014 struct noce_if_info *if_info;
1015 rtx x, cmp_a, cmp_b, vfalse, vtrue;
1016 enum rtx_code code;
1017 {
1018 /* If earliest == jump, try to build the cmove insn directly.
1019 This is helpful when combine has created some complex condition
1020 (like for alpha's cmovlbs) that we can't hope to regenerate
1021 through the normal interface. */
1022
1023 if (if_info->cond_earliest == if_info->jump)
1024 {
1025 rtx tmp;
1026
1027 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1028 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1029 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1030
1031 start_sequence ();
1032 tmp = emit_insn (tmp);
1033
1034 if (recog_memoized (tmp) >= 0)
1035 {
1036 tmp = get_insns ();
1037 end_sequence ();
1038 emit_insn (tmp);
1039
1040 return x;
1041 }
1042
1043 end_sequence ();
1044 }
1045
1046 /* Don't even try if the comparison operands are weird. */
1047 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1048 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1049 return NULL_RTX;
1050
1051 #if HAVE_conditional_move
1052 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1053 vtrue, vfalse, GET_MODE (x),
1054 (code == LTU || code == GEU
1055 || code == LEU || code == GTU));
1056 #else
1057 /* We'll never get here, as noce_process_if_block doesn't call the
1058 functions involved. Ifdef code, however, should be discouraged
1059 because it leads to typos in the code not selected. However,
1060 emit_conditional_move won't exist either. */
1061 return NULL_RTX;
1062 #endif
1063 }
1064
1065 /* Try only simple constants and registers here. More complex cases
1066 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1067 has had a go at it. */
1068
1069 static int
1070 noce_try_cmove (if_info)
1071 struct noce_if_info *if_info;
1072 {
1073 enum rtx_code code;
1074 rtx target, seq;
1075
1076 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1077 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1078 {
1079 start_sequence ();
1080
1081 code = GET_CODE (if_info->cond);
1082 target = noce_emit_cmove (if_info, if_info->x, code,
1083 XEXP (if_info->cond, 0),
1084 XEXP (if_info->cond, 1),
1085 if_info->a, if_info->b);
1086
1087 if (target)
1088 {
1089 if (target != if_info->x)
1090 noce_emit_move_insn (if_info->x, target);
1091
1092 seq = get_insns ();
1093 end_sequence ();
1094 emit_insn_before_scope (seq, if_info->jump,
1095 INSN_SCOPE (if_info->insn_a));
1096 return TRUE;
1097 }
1098 else
1099 {
1100 end_sequence ();
1101 return FALSE;
1102 }
1103 }
1104
1105 return FALSE;
1106 }
1107
1108 /* Try more complex cases involving conditional_move. */
1109
1110 static int
1111 noce_try_cmove_arith (if_info)
1112 struct noce_if_info *if_info;
1113 {
1114 rtx a = if_info->a;
1115 rtx b = if_info->b;
1116 rtx x = if_info->x;
1117 rtx insn_a, insn_b;
1118 rtx tmp, target;
1119 int is_mem = 0;
1120 enum rtx_code code;
1121
1122 /* A conditional move from two memory sources is equivalent to a
1123 conditional on their addresses followed by a load. Don't do this
1124 early because it'll screw alias analysis. Note that we've
1125 already checked for no side effects. */
1126 if (! no_new_pseudos && cse_not_expected
1127 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1128 && BRANCH_COST >= 5)
1129 {
1130 a = XEXP (a, 0);
1131 b = XEXP (b, 0);
1132 x = gen_reg_rtx (Pmode);
1133 is_mem = 1;
1134 }
1135
1136 /* ??? We could handle this if we knew that a load from A or B could
1137 not fault. This is also true if we've already loaded
1138 from the address along the path from ENTRY. */
1139 else if (may_trap_p (a) || may_trap_p (b))
1140 return FALSE;
1141
1142 /* if (test) x = a + b; else x = c - d;
1143 => y = a + b;
1144 x = c - d;
1145 if (test)
1146 x = y;
1147 */
1148
1149 code = GET_CODE (if_info->cond);
1150 insn_a = if_info->insn_a;
1151 insn_b = if_info->insn_b;
1152
1153 /* Possibly rearrange operands to make things come out more natural. */
1154 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1155 {
1156 int reversep = 0;
1157 if (rtx_equal_p (b, x))
1158 reversep = 1;
1159 else if (general_operand (b, GET_MODE (b)))
1160 reversep = 1;
1161
1162 if (reversep)
1163 {
1164 code = reversed_comparison_code (if_info->cond, if_info->jump);
1165 tmp = a, a = b, b = tmp;
1166 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1167 }
1168 }
1169
1170 start_sequence ();
1171
1172 /* If either operand is complex, load it into a register first.
1173 The best way to do this is to copy the original insn. In this
1174 way we preserve any clobbers etc that the insn may have had.
1175 This is of course not possible in the IS_MEM case. */
1176 if (! general_operand (a, GET_MODE (a)))
1177 {
1178 rtx set;
1179
1180 if (no_new_pseudos)
1181 goto end_seq_and_fail;
1182
1183 if (is_mem)
1184 {
1185 tmp = gen_reg_rtx (GET_MODE (a));
1186 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1187 }
1188 else if (! insn_a)
1189 goto end_seq_and_fail;
1190 else
1191 {
1192 a = gen_reg_rtx (GET_MODE (a));
1193 tmp = copy_rtx (insn_a);
1194 set = single_set (tmp);
1195 SET_DEST (set) = a;
1196 tmp = emit_insn (PATTERN (tmp));
1197 }
1198 if (recog_memoized (tmp) < 0)
1199 goto end_seq_and_fail;
1200 }
1201 if (! general_operand (b, GET_MODE (b)))
1202 {
1203 rtx set;
1204
1205 if (no_new_pseudos)
1206 goto end_seq_and_fail;
1207
1208 if (is_mem)
1209 {
1210 tmp = gen_reg_rtx (GET_MODE (b));
1211 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1212 }
1213 else if (! insn_b)
1214 goto end_seq_and_fail;
1215 else
1216 {
1217 b = gen_reg_rtx (GET_MODE (b));
1218 tmp = copy_rtx (insn_b);
1219 set = single_set (tmp);
1220 SET_DEST (set) = b;
1221 tmp = emit_insn (PATTERN (tmp));
1222 }
1223 if (recog_memoized (tmp) < 0)
1224 goto end_seq_and_fail;
1225 }
1226
1227 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1228 XEXP (if_info->cond, 1), a, b);
1229
1230 if (! target)
1231 goto end_seq_and_fail;
1232
1233 /* If we're handling a memory for above, emit the load now. */
1234 if (is_mem)
1235 {
1236 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1237
1238 /* Copy over flags as appropriate. */
1239 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1240 MEM_VOLATILE_P (tmp) = 1;
1241 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1242 MEM_IN_STRUCT_P (tmp) = 1;
1243 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1244 MEM_SCALAR_P (tmp) = 1;
1245 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1246 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1247 set_mem_align (tmp,
1248 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1249
1250 noce_emit_move_insn (if_info->x, tmp);
1251 }
1252 else if (target != x)
1253 noce_emit_move_insn (x, target);
1254
1255 tmp = get_insns ();
1256 end_sequence ();
1257 emit_insn_before_scope (tmp, if_info->jump, INSN_SCOPE (if_info->insn_a));
1258 return TRUE;
1259
1260 end_seq_and_fail:
1261 end_sequence ();
1262 return FALSE;
1263 }
1264
1265 /* For most cases, the simplified condition we found is the best
1266 choice, but this is not the case for the min/max/abs transforms.
1267 For these we wish to know that it is A or B in the condition. */
1268
1269 static rtx
1270 noce_get_alt_condition (if_info, target, earliest)
1271 struct noce_if_info *if_info;
1272 rtx target;
1273 rtx *earliest;
1274 {
1275 rtx cond, set, insn;
1276 int reverse;
1277
1278 /* If target is already mentioned in the known condition, return it. */
1279 if (reg_mentioned_p (target, if_info->cond))
1280 {
1281 *earliest = if_info->cond_earliest;
1282 return if_info->cond;
1283 }
1284
1285 set = pc_set (if_info->jump);
1286 cond = XEXP (SET_SRC (set), 0);
1287 reverse
1288 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1289 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1290
1291 /* If we're looking for a constant, try to make the conditional
1292 have that constant in it. There are two reasons why it may
1293 not have the constant we want:
1294
1295 1. GCC may have needed to put the constant in a register, because
1296 the target can't compare directly against that constant. For
1297 this case, we look for a SET immediately before the comparison
1298 that puts a constant in that register.
1299
1300 2. GCC may have canonicalized the conditional, for example
1301 replacing "if x < 4" with "if x <= 3". We can undo that (or
1302 make equivalent types of changes) to get the constants we need
1303 if they're off by one in the right direction. */
1304
1305 if (GET_CODE (target) == CONST_INT)
1306 {
1307 enum rtx_code code = GET_CODE (if_info->cond);
1308 rtx op_a = XEXP (if_info->cond, 0);
1309 rtx op_b = XEXP (if_info->cond, 1);
1310 rtx prev_insn;
1311
1312 /* First, look to see if we put a constant in a register. */
1313 prev_insn = PREV_INSN (if_info->cond_earliest);
1314 if (prev_insn
1315 && INSN_P (prev_insn)
1316 && GET_CODE (PATTERN (prev_insn)) == SET)
1317 {
1318 rtx src = find_reg_equal_equiv_note (prev_insn);
1319 if (!src)
1320 src = SET_SRC (PATTERN (prev_insn));
1321 if (GET_CODE (src) == CONST_INT)
1322 {
1323 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1324 op_a = src;
1325 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1326 op_b = src;
1327
1328 if (GET_CODE (op_a) == CONST_INT)
1329 {
1330 rtx tmp = op_a;
1331 op_a = op_b;
1332 op_b = tmp;
1333 code = swap_condition (code);
1334 }
1335 }
1336 }
1337
1338 /* Now, look to see if we can get the right constant by
1339 adjusting the conditional. */
1340 if (GET_CODE (op_b) == CONST_INT)
1341 {
1342 HOST_WIDE_INT desired_val = INTVAL (target);
1343 HOST_WIDE_INT actual_val = INTVAL (op_b);
1344
1345 switch (code)
1346 {
1347 case LT:
1348 if (actual_val == desired_val + 1)
1349 {
1350 code = LE;
1351 op_b = GEN_INT (desired_val);
1352 }
1353 break;
1354 case LE:
1355 if (actual_val == desired_val - 1)
1356 {
1357 code = LT;
1358 op_b = GEN_INT (desired_val);
1359 }
1360 break;
1361 case GT:
1362 if (actual_val == desired_val - 1)
1363 {
1364 code = GE;
1365 op_b = GEN_INT (desired_val);
1366 }
1367 break;
1368 case GE:
1369 if (actual_val == desired_val + 1)
1370 {
1371 code = GT;
1372 op_b = GEN_INT (desired_val);
1373 }
1374 break;
1375 default:
1376 break;
1377 }
1378 }
1379
1380 /* If we made any changes, generate a new conditional that is
1381 equivalent to what we started with, but has the right
1382 constants in it. */
1383 if (code != GET_CODE (if_info->cond)
1384 || op_a != XEXP (if_info->cond, 0)
1385 || op_b != XEXP (if_info->cond, 1))
1386 {
1387 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1388 *earliest = if_info->cond_earliest;
1389 return cond;
1390 }
1391 }
1392
1393 cond = canonicalize_condition (if_info->jump, cond, reverse,
1394 earliest, target);
1395 if (! cond || ! reg_mentioned_p (target, cond))
1396 return NULL;
1397
1398 /* We almost certainly searched back to a different place.
1399 Need to re-verify correct lifetimes. */
1400
1401 /* X may not be mentioned in the range (cond_earliest, jump]. */
1402 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1403 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1404 return NULL;
1405
1406 /* A and B may not be modified in the range [cond_earliest, jump). */
1407 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1408 if (INSN_P (insn)
1409 && (modified_in_p (if_info->a, insn)
1410 || modified_in_p (if_info->b, insn)))
1411 return NULL;
1412
1413 return cond;
1414 }
1415
1416 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1417
1418 static int
1419 noce_try_minmax (if_info)
1420 struct noce_if_info *if_info;
1421 {
1422 rtx cond, earliest, target, seq;
1423 enum rtx_code code, op;
1424 int unsignedp;
1425
1426 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1427 if (no_new_pseudos)
1428 return FALSE;
1429
1430 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1431 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1432 to get the target to tell us... */
1433 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1434 || HONOR_NANS (GET_MODE (if_info->x)))
1435 return FALSE;
1436
1437 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1438 if (!cond)
1439 return FALSE;
1440
1441 /* Verify the condition is of the form we expect, and canonicalize
1442 the comparison code. */
1443 code = GET_CODE (cond);
1444 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1445 {
1446 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1447 return FALSE;
1448 }
1449 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1450 {
1451 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1452 return FALSE;
1453 code = swap_condition (code);
1454 }
1455 else
1456 return FALSE;
1457
1458 /* Determine what sort of operation this is. Note that the code is for
1459 a taken branch, so the code->operation mapping appears backwards. */
1460 switch (code)
1461 {
1462 case LT:
1463 case LE:
1464 case UNLT:
1465 case UNLE:
1466 op = SMAX;
1467 unsignedp = 0;
1468 break;
1469 case GT:
1470 case GE:
1471 case UNGT:
1472 case UNGE:
1473 op = SMIN;
1474 unsignedp = 0;
1475 break;
1476 case LTU:
1477 case LEU:
1478 op = UMAX;
1479 unsignedp = 1;
1480 break;
1481 case GTU:
1482 case GEU:
1483 op = UMIN;
1484 unsignedp = 1;
1485 break;
1486 default:
1487 return FALSE;
1488 }
1489
1490 start_sequence ();
1491
1492 target = expand_simple_binop (GET_MODE (if_info->x), op,
1493 if_info->a, if_info->b,
1494 if_info->x, unsignedp, OPTAB_WIDEN);
1495 if (! target)
1496 {
1497 end_sequence ();
1498 return FALSE;
1499 }
1500 if (target != if_info->x)
1501 noce_emit_move_insn (if_info->x, target);
1502
1503 seq = get_insns ();
1504 end_sequence ();
1505
1506 if (seq_contains_jump (seq))
1507 return FALSE;
1508
1509 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1510 if_info->cond = cond;
1511 if_info->cond_earliest = earliest;
1512
1513 return TRUE;
1514 }
1515
1516 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1517
1518 static int
1519 noce_try_abs (if_info)
1520 struct noce_if_info *if_info;
1521 {
1522 rtx cond, earliest, target, seq, a, b, c;
1523 int negate;
1524
1525 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1526 if (no_new_pseudos)
1527 return FALSE;
1528
1529 /* Recognize A and B as constituting an ABS or NABS. */
1530 a = if_info->a;
1531 b = if_info->b;
1532 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1533 negate = 0;
1534 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1535 {
1536 c = a; a = b; b = c;
1537 negate = 1;
1538 }
1539 else
1540 return FALSE;
1541
1542 cond = noce_get_alt_condition (if_info, b, &earliest);
1543 if (!cond)
1544 return FALSE;
1545
1546 /* Verify the condition is of the form we expect. */
1547 if (rtx_equal_p (XEXP (cond, 0), b))
1548 c = XEXP (cond, 1);
1549 else if (rtx_equal_p (XEXP (cond, 1), b))
1550 c = XEXP (cond, 0);
1551 else
1552 return FALSE;
1553
1554 /* Verify that C is zero. Search backward through the block for
1555 a REG_EQUAL note if necessary. */
1556 if (REG_P (c))
1557 {
1558 rtx insn, note = NULL;
1559 for (insn = earliest;
1560 insn != if_info->test_bb->head;
1561 insn = PREV_INSN (insn))
1562 if (INSN_P (insn)
1563 && ((note = find_reg_note (insn, REG_EQUAL, c))
1564 || (note = find_reg_note (insn, REG_EQUIV, c))))
1565 break;
1566 if (! note)
1567 return FALSE;
1568 c = XEXP (note, 0);
1569 }
1570 if (GET_CODE (c) == MEM
1571 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1572 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1573 c = get_pool_constant (XEXP (c, 0));
1574
1575 /* Work around funny ideas get_condition has wrt canonicalization.
1576 Note that these rtx constants are known to be CONST_INT, and
1577 therefore imply integer comparisons. */
1578 if (c == constm1_rtx && GET_CODE (cond) == GT)
1579 ;
1580 else if (c == const1_rtx && GET_CODE (cond) == LT)
1581 ;
1582 else if (c != CONST0_RTX (GET_MODE (b)))
1583 return FALSE;
1584
1585 /* Determine what sort of operation this is. */
1586 switch (GET_CODE (cond))
1587 {
1588 case LT:
1589 case LE:
1590 case UNLT:
1591 case UNLE:
1592 negate = !negate;
1593 break;
1594 case GT:
1595 case GE:
1596 case UNGT:
1597 case UNGE:
1598 break;
1599 default:
1600 return FALSE;
1601 }
1602
1603 start_sequence ();
1604
1605 target = expand_simple_unop (GET_MODE (if_info->x), ABS, b, if_info->x, 0);
1606
1607 /* ??? It's a quandry whether cmove would be better here, especially
1608 for integers. Perhaps combine will clean things up. */
1609 if (target && negate)
1610 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1611
1612 if (! target)
1613 {
1614 end_sequence ();
1615 return FALSE;
1616 }
1617
1618 if (target != if_info->x)
1619 noce_emit_move_insn (if_info->x, target);
1620
1621 seq = get_insns ();
1622 end_sequence ();
1623
1624 if (seq_contains_jump (seq))
1625 return FALSE;
1626
1627 emit_insn_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1628 if_info->cond = cond;
1629 if_info->cond_earliest = earliest;
1630
1631 return TRUE;
1632 }
1633
1634 /* Similar to get_condition, only the resulting condition must be
1635 valid at JUMP, instead of at EARLIEST. */
1636
1637 static rtx
1638 noce_get_condition (jump, earliest)
1639 rtx jump;
1640 rtx *earliest;
1641 {
1642 rtx cond, set, tmp, insn;
1643 bool reverse;
1644
1645 if (! any_condjump_p (jump))
1646 return NULL_RTX;
1647
1648 set = pc_set (jump);
1649
1650 /* If this branches to JUMP_LABEL when the condition is false,
1651 reverse the condition. */
1652 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1653 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1654
1655 /* If the condition variable is a register and is MODE_INT, accept it. */
1656
1657 cond = XEXP (SET_SRC (set), 0);
1658 tmp = XEXP (cond, 0);
1659 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1660 {
1661 *earliest = jump;
1662
1663 if (reverse)
1664 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1665 GET_MODE (cond), tmp, XEXP (cond, 1));
1666 return cond;
1667 }
1668
1669 /* Otherwise, fall back on canonicalize_condition to do the dirty
1670 work of manipulating MODE_CC values and COMPARE rtx codes. */
1671
1672 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX);
1673 if (!tmp)
1674 return NULL_RTX;
1675
1676 /* We are going to insert code before JUMP, not before EARLIEST.
1677 We must therefore be certain that the given condition is valid
1678 at JUMP by virtue of not having been modified since. */
1679 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1680 if (INSN_P (insn) && modified_in_p (tmp, insn))
1681 break;
1682 if (insn == jump)
1683 return tmp;
1684
1685 /* The condition was modified. See if we can get a partial result
1686 that doesn't follow all the reversals. Perhaps combine can fold
1687 them together later. */
1688 tmp = XEXP (tmp, 0);
1689 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1690 return NULL_RTX;
1691 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp);
1692 if (!tmp)
1693 return NULL_RTX;
1694
1695 /* For sanity's sake, re-validate the new result. */
1696 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1697 if (INSN_P (insn) && modified_in_p (tmp, insn))
1698 return NULL_RTX;
1699
1700 return tmp;
1701 }
1702
1703 /* Return true if OP is ok for if-then-else processing. */
1704
1705 static int
1706 noce_operand_ok (op)
1707 rtx op;
1708 {
1709 /* We special-case memories, so handle any of them with
1710 no address side effects. */
1711 if (GET_CODE (op) == MEM)
1712 return ! side_effects_p (XEXP (op, 0));
1713
1714 if (side_effects_p (op))
1715 return FALSE;
1716
1717 return ! may_trap_p (op);
1718 }
1719
1720 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1721 without using conditional execution. Return TRUE if we were
1722 successful at converting the block. */
1723
1724 static int
1725 noce_process_if_block (ce_info)
1726 struct ce_if_block * ce_info;
1727 {
1728 basic_block test_bb = ce_info->test_bb; /* test block */
1729 basic_block then_bb = ce_info->then_bb; /* THEN */
1730 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1731 struct noce_if_info if_info;
1732 rtx insn_a, insn_b;
1733 rtx set_a, set_b;
1734 rtx orig_x, x, a, b;
1735 rtx jump, cond;
1736
1737 /* We're looking for patterns of the form
1738
1739 (1) if (...) x = a; else x = b;
1740 (2) x = b; if (...) x = a;
1741 (3) if (...) x = a; // as if with an initial x = x.
1742
1743 The later patterns require jumps to be more expensive.
1744
1745 ??? For future expansion, look for multiple X in such patterns. */
1746
1747 /* If test is comprised of && or || elements, don't handle it unless it is
1748 the special case of && elements without an ELSE block. */
1749 if (ce_info->num_multiple_test_blocks)
1750 {
1751 if (else_bb || ! ce_info->and_and_p)
1752 return FALSE;
1753
1754 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1755 ce_info->num_multiple_test_blocks = 0;
1756 ce_info->num_and_and_blocks = 0;
1757 ce_info->num_or_or_blocks = 0;
1758 }
1759
1760 /* If this is not a standard conditional jump, we can't parse it. */
1761 jump = test_bb->end;
1762 cond = noce_get_condition (jump, &if_info.cond_earliest);
1763 if (! cond)
1764 return FALSE;
1765
1766 /* If the conditional jump is more than just a conditional
1767 jump, then we can not do if-conversion on this block. */
1768 if (! onlyjump_p (jump))
1769 return FALSE;
1770
1771 /* We must be comparing objects whose modes imply the size. */
1772 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1773 return FALSE;
1774
1775 /* Look for one of the potential sets. */
1776 insn_a = first_active_insn (then_bb);
1777 if (! insn_a
1778 || insn_a != last_active_insn (then_bb, FALSE)
1779 || (set_a = single_set (insn_a)) == NULL_RTX)
1780 return FALSE;
1781
1782 x = SET_DEST (set_a);
1783 a = SET_SRC (set_a);
1784
1785 /* Look for the other potential set. Make sure we've got equivalent
1786 destinations. */
1787 /* ??? This is overconservative. Storing to two different mems is
1788 as easy as conditionally computing the address. Storing to a
1789 single mem merely requires a scratch memory to use as one of the
1790 destination addresses; often the memory immediately below the
1791 stack pointer is available for this. */
1792 set_b = NULL_RTX;
1793 if (else_bb)
1794 {
1795 insn_b = first_active_insn (else_bb);
1796 if (! insn_b
1797 || insn_b != last_active_insn (else_bb, FALSE)
1798 || (set_b = single_set (insn_b)) == NULL_RTX
1799 || ! rtx_equal_p (x, SET_DEST (set_b)))
1800 return FALSE;
1801 }
1802 else
1803 {
1804 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1805 /* We're going to be moving the evaluation of B down from above
1806 COND_EARLIEST to JUMP. Make sure the relevant data is still
1807 intact. */
1808 if (! insn_b
1809 || GET_CODE (insn_b) != INSN
1810 || (set_b = single_set (insn_b)) == NULL_RTX
1811 || ! rtx_equal_p (x, SET_DEST (set_b))
1812 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1813 || modified_between_p (SET_SRC (set_b),
1814 PREV_INSN (if_info.cond_earliest), jump)
1815 /* Likewise with X. In particular this can happen when
1816 noce_get_condition looks farther back in the instruction
1817 stream than one might expect. */
1818 || reg_overlap_mentioned_p (x, cond)
1819 || reg_overlap_mentioned_p (x, a)
1820 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1821 insn_b = set_b = NULL_RTX;
1822 }
1823 b = (set_b ? SET_SRC (set_b) : x);
1824
1825 /* Only operate on register destinations, and even then avoid extending
1826 the lifetime of hard registers on small register class machines. */
1827 orig_x = x;
1828 if (GET_CODE (x) != REG
1829 || (SMALL_REGISTER_CLASSES
1830 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1831 {
1832 if (no_new_pseudos)
1833 return FALSE;
1834 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1835 ? XEXP (x, 0) : x));
1836 }
1837
1838 /* Don't operate on sources that may trap or are volatile. */
1839 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1840 return FALSE;
1841
1842 /* Set up the info block for our subroutines. */
1843 if_info.test_bb = test_bb;
1844 if_info.cond = cond;
1845 if_info.jump = jump;
1846 if_info.insn_a = insn_a;
1847 if_info.insn_b = insn_b;
1848 if_info.x = x;
1849 if_info.a = a;
1850 if_info.b = b;
1851
1852 /* Try optimizations in some approximation of a useful order. */
1853 /* ??? Should first look to see if X is live incoming at all. If it
1854 isn't, we don't need anything but an unconditional set. */
1855
1856 /* Look and see if A and B are really the same. Avoid creating silly
1857 cmove constructs that no one will fix up later. */
1858 if (rtx_equal_p (a, b))
1859 {
1860 /* If we have an INSN_B, we don't have to create any new rtl. Just
1861 move the instruction that we already have. If we don't have an
1862 INSN_B, that means that A == X, and we've got a noop move. In
1863 that case don't do anything and let the code below delete INSN_A. */
1864 if (insn_b && else_bb)
1865 {
1866 rtx note;
1867
1868 if (else_bb && insn_b == else_bb->end)
1869 else_bb->end = PREV_INSN (insn_b);
1870 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
1871
1872 /* If there was a REG_EQUAL note, delete it since it may have been
1873 true due to this insn being after a jump. */
1874 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1875 remove_note (insn_b, note);
1876
1877 insn_b = NULL_RTX;
1878 }
1879 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1880 x must be executed twice. */
1881 else if (insn_b && side_effects_p (orig_x))
1882 return FALSE;
1883
1884 x = orig_x;
1885 goto success;
1886 }
1887
1888 if (noce_try_store_flag (&if_info))
1889 goto success;
1890 if (noce_try_minmax (&if_info))
1891 goto success;
1892 if (noce_try_abs (&if_info))
1893 goto success;
1894 if (HAVE_conditional_move
1895 && noce_try_cmove (&if_info))
1896 goto success;
1897 if (! HAVE_conditional_execution)
1898 {
1899 if (noce_try_store_flag_constants (&if_info))
1900 goto success;
1901 if (noce_try_addcc (&if_info))
1902 goto success;
1903 if (noce_try_store_flag_mask (&if_info))
1904 goto success;
1905 if (HAVE_conditional_move
1906 && noce_try_cmove_arith (&if_info))
1907 goto success;
1908 }
1909
1910 return FALSE;
1911
1912 success:
1913 /* The original sets may now be killed. */
1914 delete_insn (insn_a);
1915
1916 /* Several special cases here: First, we may have reused insn_b above,
1917 in which case insn_b is now NULL. Second, we want to delete insn_b
1918 if it came from the ELSE block, because follows the now correct
1919 write that appears in the TEST block. However, if we got insn_b from
1920 the TEST block, it may in fact be loading data needed for the comparison.
1921 We'll let life_analysis remove the insn if it's really dead. */
1922 if (insn_b && else_bb)
1923 delete_insn (insn_b);
1924
1925 /* The new insns will have been inserted immediately before the jump. We
1926 should be able to remove the jump with impunity, but the condition itself
1927 may have been modified by gcse to be shared across basic blocks. */
1928 delete_insn (jump);
1929
1930 /* If we used a temporary, fix it up now. */
1931 if (orig_x != x)
1932 {
1933 start_sequence ();
1934 noce_emit_move_insn (copy_rtx (orig_x), x);
1935 insn_b = get_insns ();
1936 end_sequence ();
1937
1938 emit_insn_after_scope (insn_b, test_bb->end, INSN_SCOPE (insn_a));
1939 }
1940
1941 /* Merge the blocks! */
1942 merge_if_block (ce_info);
1943
1944 return TRUE;
1945 }
1946 \f
1947 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1948 straight line code. Return true if successful. */
1949
1950 static int
1951 process_if_block (ce_info)
1952 struct ce_if_block * ce_info;
1953 {
1954 if (! reload_completed
1955 && noce_process_if_block (ce_info))
1956 return TRUE;
1957
1958 if (HAVE_conditional_execution && reload_completed)
1959 {
1960 /* If we have && and || tests, try to first handle combining the && and
1961 || tests into the conditional code, and if that fails, go back and
1962 handle it without the && and ||, which at present handles the && case
1963 if there was no ELSE block. */
1964 if (cond_exec_process_if_block (ce_info, TRUE))
1965 return TRUE;
1966
1967 if (ce_info->num_multiple_test_blocks)
1968 {
1969 cancel_changes (0);
1970
1971 if (cond_exec_process_if_block (ce_info, FALSE))
1972 return TRUE;
1973 }
1974 }
1975
1976 return FALSE;
1977 }
1978
1979 /* Merge the blocks and mark for local life update. */
1980
1981 static void
1982 merge_if_block (ce_info)
1983 struct ce_if_block * ce_info;
1984 {
1985 basic_block test_bb = ce_info->test_bb; /* last test block */
1986 basic_block then_bb = ce_info->then_bb; /* THEN */
1987 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1988 basic_block join_bb = ce_info->join_bb; /* join block */
1989 basic_block combo_bb;
1990
1991 /* All block merging is done into the lower block numbers. */
1992
1993 combo_bb = test_bb;
1994
1995 /* Merge any basic blocks to handle && and || subtests. Each of
1996 the blocks are on the fallthru path from the predecessor block. */
1997 if (ce_info->num_multiple_test_blocks > 0)
1998 {
1999 basic_block bb = test_bb;
2000 basic_block last_test_bb = ce_info->last_test_bb;
2001 basic_block fallthru = block_fallthru (bb);
2002
2003 do
2004 {
2005 bb = fallthru;
2006 fallthru = block_fallthru (bb);
2007 if (post_dominators)
2008 delete_from_dominance_info (post_dominators, bb);
2009 merge_blocks_nomove (combo_bb, bb);
2010 num_removed_blocks++;
2011 }
2012 while (bb != last_test_bb);
2013 }
2014
2015 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2016 label, but it might if there were || tests. That label's count should be
2017 zero, and it normally should be removed. */
2018
2019 if (then_bb)
2020 {
2021 if (combo_bb->global_live_at_end)
2022 COPY_REG_SET (combo_bb->global_live_at_end,
2023 then_bb->global_live_at_end);
2024 if (post_dominators)
2025 delete_from_dominance_info (post_dominators, then_bb);
2026 merge_blocks_nomove (combo_bb, then_bb);
2027 num_removed_blocks++;
2028 }
2029
2030 /* The ELSE block, if it existed, had a label. That label count
2031 will almost always be zero, but odd things can happen when labels
2032 get their addresses taken. */
2033 if (else_bb)
2034 {
2035 if (post_dominators)
2036 delete_from_dominance_info (post_dominators, else_bb);
2037 merge_blocks_nomove (combo_bb, else_bb);
2038 num_removed_blocks++;
2039 }
2040
2041 /* If there was no join block reported, that means it was not adjacent
2042 to the others, and so we cannot merge them. */
2043
2044 if (! join_bb)
2045 {
2046 rtx last = combo_bb->end;
2047
2048 /* The outgoing edge for the current COMBO block should already
2049 be correct. Verify this. */
2050 if (combo_bb->succ == NULL_EDGE)
2051 {
2052 if (find_reg_note (last, REG_NORETURN, NULL))
2053 ;
2054 else if (GET_CODE (last) == INSN
2055 && GET_CODE (PATTERN (last)) == TRAP_IF
2056 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2057 ;
2058 else
2059 abort ();
2060 }
2061
2062 /* There should still be something at the end of the THEN or ELSE
2063 blocks taking us to our final destination. */
2064 else if (GET_CODE (last) == JUMP_INSN)
2065 ;
2066 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2067 && GET_CODE (last) == CALL_INSN
2068 && SIBLING_CALL_P (last))
2069 ;
2070 else if ((combo_bb->succ->flags & EDGE_EH)
2071 && can_throw_internal (last))
2072 ;
2073 else
2074 abort ();
2075 }
2076
2077 /* The JOIN block may have had quite a number of other predecessors too.
2078 Since we've already merged the TEST, THEN and ELSE blocks, we should
2079 have only one remaining edge from our if-then-else diamond. If there
2080 is more than one remaining edge, it must come from elsewhere. There
2081 may be zero incoming edges if the THEN block didn't actually join
2082 back up (as with a call to abort). */
2083 else if ((join_bb->pred == NULL
2084 || join_bb->pred->pred_next == NULL)
2085 && join_bb != EXIT_BLOCK_PTR)
2086 {
2087 /* We can merge the JOIN. */
2088 if (combo_bb->global_live_at_end)
2089 COPY_REG_SET (combo_bb->global_live_at_end,
2090 join_bb->global_live_at_end);
2091
2092 if (post_dominators)
2093 delete_from_dominance_info (post_dominators, join_bb);
2094 merge_blocks_nomove (combo_bb, join_bb);
2095 num_removed_blocks++;
2096 }
2097 else
2098 {
2099 /* We cannot merge the JOIN. */
2100
2101 /* The outgoing edge for the current COMBO block should already
2102 be correct. Verify this. */
2103 if (combo_bb->succ->succ_next != NULL_EDGE
2104 || combo_bb->succ->dest != join_bb)
2105 abort ();
2106
2107 /* Remove the jump and cruft from the end of the COMBO block. */
2108 if (join_bb != EXIT_BLOCK_PTR)
2109 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2110 }
2111
2112 num_updated_if_blocks++;
2113 }
2114 \f
2115 /* Find a block ending in a simple IF condition and try to transform it
2116 in some way. When converting a multi-block condition, put the new code
2117 in the first such block and delete the rest. Return a pointer to this
2118 first block if some transformation was done. Return NULL otherwise. */
2119
2120 static basic_block
2121 find_if_header (test_bb, pass)
2122 basic_block test_bb;
2123 int pass;
2124 {
2125 ce_if_block_t ce_info;
2126 edge then_edge;
2127 edge else_edge;
2128
2129 /* The kind of block we're looking for has exactly two successors. */
2130 if ((then_edge = test_bb->succ) == NULL_EDGE
2131 || (else_edge = then_edge->succ_next) == NULL_EDGE
2132 || else_edge->succ_next != NULL_EDGE)
2133 return NULL;
2134
2135 /* Neither edge should be abnormal. */
2136 if ((then_edge->flags & EDGE_COMPLEX)
2137 || (else_edge->flags & EDGE_COMPLEX))
2138 return NULL;
2139
2140 /* The THEN edge is canonically the one that falls through. */
2141 if (then_edge->flags & EDGE_FALLTHRU)
2142 ;
2143 else if (else_edge->flags & EDGE_FALLTHRU)
2144 {
2145 edge e = else_edge;
2146 else_edge = then_edge;
2147 then_edge = e;
2148 }
2149 else
2150 /* Otherwise this must be a multiway branch of some sort. */
2151 return NULL;
2152
2153 memset ((PTR) &ce_info, '\0', sizeof (ce_info));
2154 ce_info.test_bb = test_bb;
2155 ce_info.then_bb = then_edge->dest;
2156 ce_info.else_bb = else_edge->dest;
2157 ce_info.pass = pass;
2158
2159 #ifdef IFCVT_INIT_EXTRA_FIELDS
2160 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2161 #endif
2162
2163 if (find_if_block (&ce_info))
2164 goto success;
2165
2166 if (HAVE_trap && HAVE_conditional_trap
2167 && find_cond_trap (test_bb, then_edge, else_edge))
2168 goto success;
2169
2170 if (post_dominators
2171 && (! HAVE_conditional_execution || reload_completed))
2172 {
2173 if (find_if_case_1 (test_bb, then_edge, else_edge))
2174 goto success;
2175 if (find_if_case_2 (test_bb, then_edge, else_edge))
2176 goto success;
2177 }
2178
2179 return NULL;
2180
2181 success:
2182 if (rtl_dump_file)
2183 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2184 return ce_info.test_bb;
2185 }
2186
2187 /* Return true if a block has two edges, one of which falls through to the next
2188 block, and the other jumps to a specific block, so that we can tell if the
2189 block is part of an && test or an || test. Returns either -1 or the number
2190 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2191
2192 static int
2193 block_jumps_and_fallthru_p (cur_bb, target_bb)
2194 basic_block cur_bb;
2195 basic_block target_bb;
2196 {
2197 edge cur_edge;
2198 int fallthru_p = FALSE;
2199 int jump_p = FALSE;
2200 rtx insn;
2201 rtx end;
2202 int n_insns = 0;
2203
2204 if (!cur_bb || !target_bb)
2205 return -1;
2206
2207 /* If no edges, obviously it doesn't jump or fallthru. */
2208 if (cur_bb->succ == NULL_EDGE)
2209 return FALSE;
2210
2211 for (cur_edge = cur_bb->succ;
2212 cur_edge != NULL_EDGE;
2213 cur_edge = cur_edge->succ_next)
2214 {
2215 if (cur_edge->flags & EDGE_COMPLEX)
2216 /* Anything complex isn't what we want. */
2217 return -1;
2218
2219 else if (cur_edge->flags & EDGE_FALLTHRU)
2220 fallthru_p = TRUE;
2221
2222 else if (cur_edge->dest == target_bb)
2223 jump_p = TRUE;
2224
2225 else
2226 return -1;
2227 }
2228
2229 if ((jump_p & fallthru_p) == 0)
2230 return -1;
2231
2232 /* Don't allow calls in the block, since this is used to group && and ||
2233 together for conditional execution support. ??? we should support
2234 conditional execution support across calls for IA-64 some day, but
2235 for now it makes the code simpler. */
2236 end = cur_bb->end;
2237 insn = cur_bb->head;
2238
2239 while (insn != NULL_RTX)
2240 {
2241 if (GET_CODE (insn) == CALL_INSN)
2242 return -1;
2243
2244 if (INSN_P (insn)
2245 && GET_CODE (insn) != JUMP_INSN
2246 && GET_CODE (PATTERN (insn)) != USE
2247 && GET_CODE (PATTERN (insn)) != CLOBBER)
2248 n_insns++;
2249
2250 if (insn == end)
2251 break;
2252
2253 insn = NEXT_INSN (insn);
2254 }
2255
2256 return n_insns;
2257 }
2258
2259 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2260 block. If so, we'll try to convert the insns to not require the branch.
2261 Return TRUE if we were successful at converting the block. */
2262
2263 static int
2264 find_if_block (ce_info)
2265 struct ce_if_block * ce_info;
2266 {
2267 basic_block test_bb = ce_info->test_bb;
2268 basic_block then_bb = ce_info->then_bb;
2269 basic_block else_bb = ce_info->else_bb;
2270 basic_block join_bb = NULL_BLOCK;
2271 edge then_succ = then_bb->succ;
2272 edge else_succ = else_bb->succ;
2273 int then_predecessors;
2274 int else_predecessors;
2275 edge cur_edge;
2276 basic_block next;
2277
2278 ce_info->last_test_bb = test_bb;
2279
2280 /* Discover if any fall through predecessors of the current test basic block
2281 were && tests (which jump to the else block) or || tests (which jump to
2282 the then block). */
2283 if (HAVE_conditional_execution && reload_completed
2284 && test_bb->pred != NULL_EDGE
2285 && test_bb->pred->pred_next == NULL_EDGE
2286 && test_bb->pred->flags == EDGE_FALLTHRU)
2287 {
2288 basic_block bb = test_bb->pred->src;
2289 basic_block target_bb;
2290 int max_insns = MAX_CONDITIONAL_EXECUTE;
2291 int n_insns;
2292
2293 /* Determine if the preceding block is an && or || block. */
2294 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2295 {
2296 ce_info->and_and_p = TRUE;
2297 target_bb = else_bb;
2298 }
2299 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2300 {
2301 ce_info->and_and_p = FALSE;
2302 target_bb = then_bb;
2303 }
2304 else
2305 target_bb = NULL_BLOCK;
2306
2307 if (target_bb && n_insns <= max_insns)
2308 {
2309 int total_insns = 0;
2310 int blocks = 0;
2311
2312 ce_info->last_test_bb = test_bb;
2313
2314 /* Found at least one && or || block, look for more. */
2315 do
2316 {
2317 ce_info->test_bb = test_bb = bb;
2318 total_insns += n_insns;
2319 blocks++;
2320
2321 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2322 break;
2323
2324 bb = bb->pred->src;
2325 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2326 }
2327 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2328
2329 ce_info->num_multiple_test_blocks = blocks;
2330 ce_info->num_multiple_test_insns = total_insns;
2331
2332 if (ce_info->and_and_p)
2333 ce_info->num_and_and_blocks = blocks;
2334 else
2335 ce_info->num_or_or_blocks = blocks;
2336 }
2337 }
2338
2339 /* Count the number of edges the THEN and ELSE blocks have. */
2340 then_predecessors = 0;
2341 for (cur_edge = then_bb->pred;
2342 cur_edge != NULL_EDGE;
2343 cur_edge = cur_edge->pred_next)
2344 {
2345 then_predecessors++;
2346 if (cur_edge->flags & EDGE_COMPLEX)
2347 return FALSE;
2348 }
2349
2350 else_predecessors = 0;
2351 for (cur_edge = else_bb->pred;
2352 cur_edge != NULL_EDGE;
2353 cur_edge = cur_edge->pred_next)
2354 {
2355 else_predecessors++;
2356 if (cur_edge->flags & EDGE_COMPLEX)
2357 return FALSE;
2358 }
2359
2360 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2361 other than any || blocks which jump to the THEN block. */
2362 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2363 return FALSE;
2364
2365 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2366 if (then_succ != NULL_EDGE
2367 && (then_succ->succ_next != NULL_EDGE
2368 || (then_succ->flags & EDGE_COMPLEX)))
2369 return FALSE;
2370
2371 /* If the THEN block has no successors, conditional execution can still
2372 make a conditional call. Don't do this unless the ELSE block has
2373 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2374 Check for the last insn of the THEN block being an indirect jump, which
2375 is listed as not having any successors, but confuses the rest of the CE
2376 code processing. ??? we should fix this in the future. */
2377 if (then_succ == NULL)
2378 {
2379 if (else_bb->pred->pred_next == NULL_EDGE)
2380 {
2381 rtx last_insn = then_bb->end;
2382
2383 while (last_insn
2384 && GET_CODE (last_insn) == NOTE
2385 && last_insn != then_bb->head)
2386 last_insn = PREV_INSN (last_insn);
2387
2388 if (last_insn
2389 && GET_CODE (last_insn) == JUMP_INSN
2390 && ! simplejump_p (last_insn))
2391 return FALSE;
2392
2393 join_bb = else_bb;
2394 else_bb = NULL_BLOCK;
2395 }
2396 else
2397 return FALSE;
2398 }
2399
2400 /* If the THEN block's successor is the other edge out of the TEST block,
2401 then we have an IF-THEN combo without an ELSE. */
2402 else if (then_succ->dest == else_bb)
2403 {
2404 join_bb = else_bb;
2405 else_bb = NULL_BLOCK;
2406 }
2407
2408 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2409 has exactly one predecessor and one successor, and the outgoing edge
2410 is not complex, then we have an IF-THEN-ELSE combo. */
2411 else if (else_succ != NULL_EDGE
2412 && then_succ->dest == else_succ->dest
2413 && else_bb->pred->pred_next == NULL_EDGE
2414 && else_succ->succ_next == NULL_EDGE
2415 && ! (else_succ->flags & EDGE_COMPLEX))
2416 join_bb = else_succ->dest;
2417
2418 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2419 else
2420 return FALSE;
2421
2422 num_possible_if_blocks++;
2423
2424 if (rtl_dump_file)
2425 {
2426 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2427 (else_bb) ? "-ELSE" : "",
2428 ce_info->pass,
2429 test_bb->index, (test_bb->head) ? (int)INSN_UID (test_bb->head) : -1,
2430 then_bb->index, (then_bb->head) ? (int)INSN_UID (then_bb->head) : -1);
2431
2432 if (else_bb)
2433 fprintf (rtl_dump_file, ", else %d [%d]",
2434 else_bb->index, (else_bb->head) ? (int)INSN_UID (else_bb->head) : -1);
2435
2436 fprintf (rtl_dump_file, ", join %d [%d]",
2437 join_bb->index, (join_bb->head) ? (int)INSN_UID (join_bb->head) : -1);
2438
2439 if (ce_info->num_multiple_test_blocks > 0)
2440 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2441 ce_info->num_multiple_test_blocks,
2442 (ce_info->and_and_p) ? "&&" : "||",
2443 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2444 ce_info->last_test_bb->index,
2445 ((ce_info->last_test_bb->head)
2446 ? (int)INSN_UID (ce_info->last_test_bb->head)
2447 : -1));
2448
2449 fputc ('\n', rtl_dump_file);
2450 }
2451
2452 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2453 first condition for free, since we've already asserted that there's a
2454 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2455 we checked the FALLTHRU flag, those are already adjacent to the last IF
2456 block. */
2457 /* ??? As an enhancement, move the ELSE block. Have to deal with
2458 BLOCK notes, if by no other means than aborting the merge if they
2459 exist. Sticky enough I don't want to think about it now. */
2460 next = then_bb;
2461 if (else_bb && (next = next->next_bb) != else_bb)
2462 return FALSE;
2463 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2464 {
2465 if (else_bb)
2466 join_bb = NULL;
2467 else
2468 return FALSE;
2469 }
2470
2471 /* Do the real work. */
2472 ce_info->else_bb = else_bb;
2473 ce_info->join_bb = join_bb;
2474
2475 return process_if_block (ce_info);
2476 }
2477
2478 /* Convert a branch over a trap, or a branch
2479 to a trap, into a conditional trap. */
2480
2481 static int
2482 find_cond_trap (test_bb, then_edge, else_edge)
2483 basic_block test_bb;
2484 edge then_edge, else_edge;
2485 {
2486 basic_block then_bb = then_edge->dest;
2487 basic_block else_bb = else_edge->dest;
2488 basic_block other_bb, trap_bb;
2489 rtx trap, jump, cond, cond_earliest, seq;
2490 enum rtx_code code;
2491
2492 /* Locate the block with the trap instruction. */
2493 /* ??? While we look for no successors, we really ought to allow
2494 EH successors. Need to fix merge_if_block for that to work. */
2495 if ((trap = block_has_only_trap (then_bb)) != NULL)
2496 trap_bb = then_bb, other_bb = else_bb;
2497 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2498 trap_bb = else_bb, other_bb = then_bb;
2499 else
2500 return FALSE;
2501
2502 if (rtl_dump_file)
2503 {
2504 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2505 test_bb->index, trap_bb->index);
2506 }
2507
2508 /* If this is not a standard conditional jump, we can't parse it. */
2509 jump = test_bb->end;
2510 cond = noce_get_condition (jump, &cond_earliest);
2511 if (! cond)
2512 return FALSE;
2513
2514 /* If the conditional jump is more than just a conditional jump, then
2515 we can not do if-conversion on this block. */
2516 if (! onlyjump_p (jump))
2517 return FALSE;
2518
2519 /* We must be comparing objects whose modes imply the size. */
2520 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2521 return FALSE;
2522
2523 /* Reverse the comparison code, if necessary. */
2524 code = GET_CODE (cond);
2525 if (then_bb == trap_bb)
2526 {
2527 code = reversed_comparison_code (cond, jump);
2528 if (code == UNKNOWN)
2529 return FALSE;
2530 }
2531
2532 /* Attempt to generate the conditional trap. */
2533 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2534 TRAP_CODE (PATTERN (trap)));
2535 if (seq == NULL)
2536 return FALSE;
2537
2538 /* Emit the new insns before cond_earliest. */
2539 emit_insn_before_scope (seq, cond_earliest, INSN_SCOPE (trap));
2540
2541 /* Delete the trap block if possible. */
2542 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2543 if (trap_bb->pred == NULL)
2544 {
2545 if (post_dominators)
2546 delete_from_dominance_info (post_dominators, trap_bb);
2547 flow_delete_block (trap_bb);
2548 num_removed_blocks++;
2549 }
2550
2551 /* If the non-trap block and the test are now adjacent, merge them.
2552 Otherwise we must insert a direct branch. */
2553 if (test_bb->next_bb == other_bb)
2554 {
2555 struct ce_if_block new_ce_info;
2556 delete_insn (jump);
2557 memset ((PTR) &new_ce_info, '\0', sizeof (new_ce_info));
2558 new_ce_info.test_bb = test_bb;
2559 new_ce_info.then_bb = NULL;
2560 new_ce_info.else_bb = NULL;
2561 new_ce_info.join_bb = other_bb;
2562 merge_if_block (&new_ce_info);
2563 }
2564 else
2565 {
2566 rtx lab, newjump;
2567
2568 lab = JUMP_LABEL (jump);
2569 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2570 LABEL_NUSES (lab) += 1;
2571 JUMP_LABEL (newjump) = lab;
2572 emit_barrier_after (newjump);
2573
2574 delete_insn (jump);
2575 }
2576
2577 return TRUE;
2578 }
2579
2580 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2581 return it. */
2582
2583 static rtx
2584 block_has_only_trap (bb)
2585 basic_block bb;
2586 {
2587 rtx trap;
2588
2589 /* We're not the exit block. */
2590 if (bb == EXIT_BLOCK_PTR)
2591 return NULL_RTX;
2592
2593 /* The block must have no successors. */
2594 if (bb->succ)
2595 return NULL_RTX;
2596
2597 /* The only instruction in the THEN block must be the trap. */
2598 trap = first_active_insn (bb);
2599 if (! (trap == bb->end
2600 && GET_CODE (PATTERN (trap)) == TRAP_IF
2601 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2602 return NULL_RTX;
2603
2604 return trap;
2605 }
2606
2607 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2608 transformable, but not necessarily the other. There need be no
2609 JOIN block.
2610
2611 Return TRUE if we were successful at converting the block.
2612
2613 Cases we'd like to look at:
2614
2615 (1)
2616 if (test) goto over; // x not live
2617 x = a;
2618 goto label;
2619 over:
2620
2621 becomes
2622
2623 x = a;
2624 if (! test) goto label;
2625
2626 (2)
2627 if (test) goto E; // x not live
2628 x = big();
2629 goto L;
2630 E:
2631 x = b;
2632 goto M;
2633
2634 becomes
2635
2636 x = b;
2637 if (test) goto M;
2638 x = big();
2639 goto L;
2640
2641 (3) // This one's really only interesting for targets that can do
2642 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2643 // it results in multiple branches on a cache line, which often
2644 // does not sit well with predictors.
2645
2646 if (test1) goto E; // predicted not taken
2647 x = a;
2648 if (test2) goto F;
2649 ...
2650 E:
2651 x = b;
2652 J:
2653
2654 becomes
2655
2656 x = a;
2657 if (test1) goto E;
2658 if (test2) goto F;
2659
2660 Notes:
2661
2662 (A) Don't do (2) if the branch is predicted against the block we're
2663 eliminating. Do it anyway if we can eliminate a branch; this requires
2664 that the sole successor of the eliminated block postdominate the other
2665 side of the if.
2666
2667 (B) With CE, on (3) we can steal from both sides of the if, creating
2668
2669 if (test1) x = a;
2670 if (!test1) x = b;
2671 if (test1) goto J;
2672 if (test2) goto F;
2673 ...
2674 J:
2675
2676 Again, this is most useful if J postdominates.
2677
2678 (C) CE substitutes for helpful life information.
2679
2680 (D) These heuristics need a lot of work. */
2681
2682 /* Tests for case 1 above. */
2683
2684 static int
2685 find_if_case_1 (test_bb, then_edge, else_edge)
2686 basic_block test_bb;
2687 edge then_edge, else_edge;
2688 {
2689 basic_block then_bb = then_edge->dest;
2690 basic_block else_bb = else_edge->dest, new_bb;
2691 edge then_succ = then_bb->succ;
2692 int then_bb_index;
2693
2694 /* THEN has one successor. */
2695 if (!then_succ || then_succ->succ_next != NULL)
2696 return FALSE;
2697
2698 /* THEN does not fall through, but is not strange either. */
2699 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2700 return FALSE;
2701
2702 /* THEN has one predecessor. */
2703 if (then_bb->pred->pred_next != NULL)
2704 return FALSE;
2705
2706 /* THEN must do something. */
2707 if (forwarder_block_p (then_bb))
2708 return FALSE;
2709
2710 num_possible_if_blocks++;
2711 if (rtl_dump_file)
2712 fprintf (rtl_dump_file,
2713 "\nIF-CASE-1 found, start %d, then %d\n",
2714 test_bb->index, then_bb->index);
2715
2716 /* THEN is small. */
2717 if (count_bb_insns (then_bb) > BRANCH_COST)
2718 return FALSE;
2719
2720 /* Registers set are dead, or are predicable. */
2721 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2722 then_bb->succ->dest, 1))
2723 return FALSE;
2724
2725 /* Conversion went ok, including moving the insns and fixing up the
2726 jump. Adjust the CFG to match. */
2727
2728 bitmap_operation (test_bb->global_live_at_end,
2729 else_bb->global_live_at_start,
2730 then_bb->global_live_at_end, BITMAP_IOR);
2731
2732 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2733 then_bb_index = then_bb->index;
2734 if (post_dominators)
2735 delete_from_dominance_info (post_dominators, then_bb);
2736 flow_delete_block (then_bb);
2737
2738 /* Make rest of code believe that the newly created block is the THEN_BB
2739 block we removed. */
2740 if (new_bb)
2741 {
2742 new_bb->index = then_bb_index;
2743 BASIC_BLOCK (then_bb_index) = new_bb;
2744 }
2745 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2746 later. */
2747
2748 num_removed_blocks++;
2749 num_updated_if_blocks++;
2750
2751 return TRUE;
2752 }
2753
2754 /* Test for case 2 above. */
2755
2756 static int
2757 find_if_case_2 (test_bb, then_edge, else_edge)
2758 basic_block test_bb;
2759 edge then_edge, else_edge;
2760 {
2761 basic_block then_bb = then_edge->dest;
2762 basic_block else_bb = else_edge->dest;
2763 edge else_succ = else_bb->succ;
2764 rtx note;
2765
2766 /* ELSE has one successor. */
2767 if (!else_succ || else_succ->succ_next != NULL)
2768 return FALSE;
2769
2770 /* ELSE outgoing edge is not complex. */
2771 if (else_succ->flags & EDGE_COMPLEX)
2772 return FALSE;
2773
2774 /* ELSE has one predecessor. */
2775 if (else_bb->pred->pred_next != NULL)
2776 return FALSE;
2777
2778 /* THEN is not EXIT. */
2779 if (then_bb->index < 0)
2780 return FALSE;
2781
2782 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2783 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2784 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2785 ;
2786 else if (else_succ->dest->index < 0
2787 || dominated_by_p (post_dominators, then_bb,
2788 else_succ->dest))
2789 ;
2790 else
2791 return FALSE;
2792
2793 num_possible_if_blocks++;
2794 if (rtl_dump_file)
2795 fprintf (rtl_dump_file,
2796 "\nIF-CASE-2 found, start %d, else %d\n",
2797 test_bb->index, else_bb->index);
2798
2799 /* ELSE is small. */
2800 if (count_bb_insns (else_bb) > BRANCH_COST)
2801 return FALSE;
2802
2803 /* Registers set are dead, or are predicable. */
2804 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2805 return FALSE;
2806
2807 /* Conversion went ok, including moving the insns and fixing up the
2808 jump. Adjust the CFG to match. */
2809
2810 bitmap_operation (test_bb->global_live_at_end,
2811 then_bb->global_live_at_start,
2812 else_bb->global_live_at_end, BITMAP_IOR);
2813
2814 if (post_dominators)
2815 delete_from_dominance_info (post_dominators, else_bb);
2816 flow_delete_block (else_bb);
2817
2818 num_removed_blocks++;
2819 num_updated_if_blocks++;
2820
2821 /* ??? We may now fallthru from one of THEN's successors into a join
2822 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2823
2824 return TRUE;
2825 }
2826
2827 /* A subroutine of dead_or_predicable called through for_each_rtx.
2828 Return 1 if a memory is found. */
2829
2830 static int
2831 find_memory (px, data)
2832 rtx *px;
2833 void *data ATTRIBUTE_UNUSED;
2834 {
2835 return GET_CODE (*px) == MEM;
2836 }
2837
2838 /* Used by the code above to perform the actual rtl transformations.
2839 Return TRUE if successful.
2840
2841 TEST_BB is the block containing the conditional branch. MERGE_BB
2842 is the block containing the code to manipulate. NEW_DEST is the
2843 label TEST_BB should be branching to after the conversion.
2844 REVERSEP is true if the sense of the branch should be reversed. */
2845
2846 static int
2847 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2848 basic_block test_bb, merge_bb, other_bb;
2849 basic_block new_dest;
2850 int reversep;
2851 {
2852 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2853
2854 jump = test_bb->end;
2855
2856 /* Find the extent of the real code in the merge block. */
2857 head = merge_bb->head;
2858 end = merge_bb->end;
2859
2860 if (GET_CODE (head) == CODE_LABEL)
2861 head = NEXT_INSN (head);
2862 if (GET_CODE (head) == NOTE)
2863 {
2864 if (head == end)
2865 {
2866 head = end = NULL_RTX;
2867 goto no_body;
2868 }
2869 head = NEXT_INSN (head);
2870 }
2871
2872 if (GET_CODE (end) == JUMP_INSN)
2873 {
2874 if (head == end)
2875 {
2876 head = end = NULL_RTX;
2877 goto no_body;
2878 }
2879 end = PREV_INSN (end);
2880 }
2881
2882 /* Disable handling dead code by conditional execution if the machine needs
2883 to do anything funny with the tests, etc. */
2884 #ifndef IFCVT_MODIFY_TESTS
2885 if (HAVE_conditional_execution)
2886 {
2887 /* In the conditional execution case, we have things easy. We know
2888 the condition is reversible. We don't have to check life info,
2889 becase we're going to conditionally execute the code anyway.
2890 All that's left is making sure the insns involved can actually
2891 be predicated. */
2892
2893 rtx cond, prob_val;
2894
2895 cond = cond_exec_get_condition (jump);
2896 if (! cond)
2897 return FALSE;
2898
2899 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2900 if (prob_val)
2901 prob_val = XEXP (prob_val, 0);
2902
2903 if (reversep)
2904 {
2905 enum rtx_code rev = reversed_comparison_code (cond, jump);
2906 if (rev == UNKNOWN)
2907 return FALSE;
2908 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2909 XEXP (cond, 1));
2910 if (prob_val)
2911 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2912 }
2913
2914 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
2915 prob_val, 0))
2916 goto cancel;
2917
2918 earliest = jump;
2919 }
2920 else
2921 #endif
2922 {
2923 /* In the non-conditional execution case, we have to verify that there
2924 are no trapping operations, no calls, no references to memory, and
2925 that any registers modified are dead at the branch site. */
2926
2927 rtx insn, cond, prev;
2928 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2929 regset merge_set, tmp, test_live, test_set;
2930 struct propagate_block_info *pbi;
2931 int i, fail = 0;
2932
2933 /* Check for no calls or trapping operations. */
2934 for (insn = head; ; insn = NEXT_INSN (insn))
2935 {
2936 if (GET_CODE (insn) == CALL_INSN)
2937 return FALSE;
2938 if (INSN_P (insn))
2939 {
2940 if (may_trap_p (PATTERN (insn)))
2941 return FALSE;
2942
2943 /* ??? Even non-trapping memories such as stack frame
2944 references must be avoided. For stores, we collect
2945 no lifetime info; for reads, we'd have to assert
2946 true_dependence false against every store in the
2947 TEST range. */
2948 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2949 return FALSE;
2950 }
2951 if (insn == end)
2952 break;
2953 }
2954
2955 if (! any_condjump_p (jump))
2956 return FALSE;
2957
2958 /* Find the extent of the conditional. */
2959 cond = noce_get_condition (jump, &earliest);
2960 if (! cond)
2961 return FALSE;
2962
2963 /* Collect:
2964 MERGE_SET = set of registers set in MERGE_BB
2965 TEST_LIVE = set of registers live at EARLIEST
2966 TEST_SET = set of registers set between EARLIEST and the
2967 end of the block. */
2968
2969 tmp = INITIALIZE_REG_SET (tmp_head);
2970 merge_set = INITIALIZE_REG_SET (merge_set_head);
2971 test_live = INITIALIZE_REG_SET (test_live_head);
2972 test_set = INITIALIZE_REG_SET (test_set_head);
2973
2974 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2975 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2976 since we've already asserted that MERGE_BB is small. */
2977 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2978
2979 /* For small register class machines, don't lengthen lifetimes of
2980 hard registers before reload. */
2981 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2982 {
2983 EXECUTE_IF_SET_IN_BITMAP
2984 (merge_set, 0, i,
2985 {
2986 if (i < FIRST_PSEUDO_REGISTER
2987 && ! fixed_regs[i]
2988 && ! global_regs[i])
2989 fail = 1;
2990 });
2991 }
2992
2993 /* For TEST, we're interested in a range of insns, not a whole block.
2994 Moreover, we're interested in the insns live from OTHER_BB. */
2995
2996 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2997 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2998 0);
2999
3000 for (insn = jump; ; insn = prev)
3001 {
3002 prev = propagate_one_insn (pbi, insn);
3003 if (insn == earliest)
3004 break;
3005 }
3006
3007 free_propagate_block_info (pbi);
3008
3009 /* We can perform the transformation if
3010 MERGE_SET & (TEST_SET | TEST_LIVE)
3011 and
3012 TEST_SET & merge_bb->global_live_at_start
3013 are empty. */
3014
3015 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3016 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3017 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3018
3019 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3020 BITMAP_AND);
3021 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3022
3023 FREE_REG_SET (tmp);
3024 FREE_REG_SET (merge_set);
3025 FREE_REG_SET (test_live);
3026 FREE_REG_SET (test_set);
3027
3028 if (fail)
3029 return FALSE;
3030 }
3031
3032 no_body:
3033 /* We don't want to use normal invert_jump or redirect_jump because
3034 we don't want to delete_insn called. Also, we want to do our own
3035 change group management. */
3036
3037 old_dest = JUMP_LABEL (jump);
3038 if (other_bb != new_dest)
3039 {
3040 new_label = block_label (new_dest);
3041 if (reversep
3042 ? ! invert_jump_1 (jump, new_label)
3043 : ! redirect_jump_1 (jump, new_label))
3044 goto cancel;
3045 }
3046
3047 if (! apply_change_group ())
3048 return FALSE;
3049
3050 if (other_bb != new_dest)
3051 {
3052 if (old_dest)
3053 LABEL_NUSES (old_dest) -= 1;
3054 if (new_label)
3055 LABEL_NUSES (new_label) += 1;
3056 JUMP_LABEL (jump) = new_label;
3057 if (reversep)
3058 invert_br_probabilities (jump);
3059
3060 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3061 if (reversep)
3062 {
3063 gcov_type count, probability;
3064 count = BRANCH_EDGE (test_bb)->count;
3065 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3066 FALLTHRU_EDGE (test_bb)->count = count;
3067 probability = BRANCH_EDGE (test_bb)->probability;
3068 BRANCH_EDGE (test_bb)->probability
3069 = FALLTHRU_EDGE (test_bb)->probability;
3070 FALLTHRU_EDGE (test_bb)->probability = probability;
3071 update_br_prob_note (test_bb);
3072 }
3073 }
3074
3075 /* Move the insns out of MERGE_BB to before the branch. */
3076 if (head != NULL)
3077 {
3078 if (end == merge_bb->end)
3079 merge_bb->end = PREV_INSN (head);
3080
3081 if (squeeze_notes (&head, &end))
3082 return TRUE;
3083
3084 reorder_insns (head, end, PREV_INSN (earliest));
3085 }
3086
3087 /* Remove the jump and edge if we can. */
3088 if (other_bb == new_dest)
3089 {
3090 delete_insn (jump);
3091 remove_edge (BRANCH_EDGE (test_bb));
3092 /* ??? Can't merge blocks here, as then_bb is still in use.
3093 At minimum, the merge will get done just before bb-reorder. */
3094 }
3095
3096 return TRUE;
3097
3098 cancel:
3099 cancel_changes (0);
3100 return FALSE;
3101 }
3102 \f
3103 /* Main entry point for all if-conversion. */
3104
3105 void
3106 if_convert (x_life_data_ok)
3107 int x_life_data_ok;
3108 {
3109 basic_block bb;
3110 int pass;
3111
3112 num_possible_if_blocks = 0;
3113 num_updated_if_blocks = 0;
3114 num_removed_blocks = 0;
3115 life_data_ok = (x_life_data_ok != 0);
3116
3117 /* Free up basic_block_for_insn so that we don't have to keep it
3118 up to date, either here or in merge_blocks_nomove. */
3119 free_basic_block_vars (1);
3120
3121 /* Compute postdominators if we think we'll use them. */
3122 post_dominators = NULL;
3123 if (HAVE_conditional_execution || life_data_ok)
3124 {
3125 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
3126 }
3127 if (life_data_ok)
3128 clear_bb_flags ();
3129
3130 /* Go through each of the basic blocks looking for things to convert. If we
3131 have conditional execution, we make multiple passes to allow us to handle
3132 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3133 pass = 0;
3134 do
3135 {
3136 cond_exec_changed_p = FALSE;
3137 pass++;
3138
3139 #ifdef IFCVT_MULTIPLE_DUMPS
3140 if (rtl_dump_file && pass > 1)
3141 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3142 #endif
3143
3144 FOR_EACH_BB (bb)
3145 {
3146 basic_block new_bb;
3147 while ((new_bb = find_if_header (bb, pass)))
3148 bb = new_bb;
3149 }
3150
3151 #ifdef IFCVT_MULTIPLE_DUMPS
3152 if (rtl_dump_file && cond_exec_changed_p)
3153 print_rtl_with_bb (rtl_dump_file, get_insns ());
3154 #endif
3155 }
3156 while (cond_exec_changed_p);
3157
3158 #ifdef IFCVT_MULTIPLE_DUMPS
3159 if (rtl_dump_file)
3160 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3161 #endif
3162
3163 if (post_dominators)
3164 free_dominance_info (post_dominators);
3165
3166 if (rtl_dump_file)
3167 fflush (rtl_dump_file);
3168
3169 clear_aux_for_blocks ();
3170
3171 /* Rebuild life info for basic blocks that require it. */
3172 if (num_removed_blocks && life_data_ok)
3173 {
3174 /* If we allocated new pseudos, we must resize the array for sched1. */
3175 if (max_regno < max_reg_num ())
3176 {
3177 max_regno = max_reg_num ();
3178 allocate_reg_info (max_regno, FALSE, FALSE);
3179 }
3180 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3181 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3182 | PROP_KILL_DEAD_CODE);
3183 }
3184
3185 /* Write the final stats. */
3186 if (rtl_dump_file && num_possible_if_blocks > 0)
3187 {
3188 fprintf (rtl_dump_file,
3189 "\n%d possible IF blocks searched.\n",
3190 num_possible_if_blocks);
3191 fprintf (rtl_dump_file,
3192 "%d IF blocks converted.\n",
3193 num_updated_if_blocks);
3194 fprintf (rtl_dump_file,
3195 "%d basic blocks deleted.\n\n\n",
3196 num_removed_blocks);
3197 }
3198
3199 #ifdef ENABLE_CHECKING
3200 verify_flow_info ();
3201 #endif
3202 }