re PR c++/10779 (Error cascade for unknown type in function prototype)
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25
26 #include "rtl.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "except.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "expr.h"
36 #include "real.h"
37 #include "output.h"
38 #include "optabs.h"
39 #include "toplev.h"
40 #include "tm_p.h"
41 #include "cfgloop.h"
42 #include "target.h"
43
44
45 #ifndef HAVE_conditional_execution
46 #define HAVE_conditional_execution 0
47 #endif
48 #ifndef HAVE_conditional_move
49 #define HAVE_conditional_move 0
50 #endif
51 #ifndef HAVE_incscc
52 #define HAVE_incscc 0
53 #endif
54 #ifndef HAVE_decscc
55 #define HAVE_decscc 0
56 #endif
57 #ifndef HAVE_trap
58 #define HAVE_trap 0
59 #endif
60 #ifndef HAVE_conditional_trap
61 #define HAVE_conditional_trap 0
62 #endif
63
64 #ifndef MAX_CONDITIONAL_EXECUTE
65 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
66 #endif
67
68 #define NULL_EDGE ((struct edge_def *)NULL)
69 #define NULL_BLOCK ((struct basic_block_def *)NULL)
70
71 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
72 static int num_possible_if_blocks;
73
74 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
75 execution. */
76 static int num_updated_if_blocks;
77
78 /* # of changes made which require life information to be updated. */
79 static int num_true_changes;
80
81 /* Whether conditional execution changes were made. */
82 static int cond_exec_changed_p;
83
84 /* True if life data ok at present. */
85 static bool life_data_ok;
86
87 /* The post-dominator relation on the original block numbers. */
88 static dominance_info post_dominators;
89
90 /* Forward references. */
91 static int count_bb_insns (basic_block);
92 static rtx first_active_insn (basic_block);
93 static rtx last_active_insn (basic_block, int);
94 static int seq_contains_jump (rtx);
95 static basic_block block_fallthru (basic_block);
96 static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
97 static rtx cond_exec_get_condition (rtx);
98 static int cond_exec_process_if_block (ce_if_block_t *, int);
99 static rtx noce_get_condition (rtx, rtx *);
100 static int noce_operand_ok (rtx);
101 static int noce_process_if_block (ce_if_block_t *);
102 static int process_if_block (ce_if_block_t *);
103 static void merge_if_block (ce_if_block_t *);
104 static int find_cond_trap (basic_block, edge, edge);
105 static basic_block find_if_header (basic_block, int);
106 static int block_jumps_and_fallthru_p (basic_block, basic_block);
107 static int find_if_block (ce_if_block_t *);
108 static int find_if_case_1 (basic_block, edge, edge);
109 static int find_if_case_2 (basic_block, edge, edge);
110 static int find_memory (rtx *, void *);
111 static int dead_or_predicable (basic_block, basic_block, basic_block,
112 basic_block, int);
113 static void noce_emit_move_insn (rtx, rtx);
114 static rtx block_has_only_trap (basic_block);
115 static void mark_loop_exit_edges (void);
116 \f
117 /* Sets EDGE_LOOP_EXIT flag for all loop exits. */
118 static void
119 mark_loop_exit_edges (void)
120 {
121 struct loops loops;
122 basic_block bb;
123 edge e;
124
125 flow_loops_find (&loops, LOOP_TREE);
126
127 if (loops.num > 1)
128 {
129 FOR_EACH_BB (bb)
130 {
131 for (e = bb->succ; e; e = e->succ_next)
132 {
133 if (find_common_loop (bb->loop_father, e->dest->loop_father)
134 != bb->loop_father)
135 e->flags |= EDGE_LOOP_EXIT;
136 else
137 e->flags &= ~EDGE_LOOP_EXIT;
138 }
139 }
140 }
141
142 flow_loops_free (&loops);
143 }
144
145 /* Count the number of non-jump active insns in BB. */
146
147 static int
148 count_bb_insns (basic_block bb)
149 {
150 int count = 0;
151 rtx insn = BB_HEAD (bb);
152
153 while (1)
154 {
155 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
156 count++;
157
158 if (insn == BB_END (bb))
159 break;
160 insn = NEXT_INSN (insn);
161 }
162
163 return count;
164 }
165
166 /* Return the first non-jump active insn in the basic block. */
167
168 static rtx
169 first_active_insn (basic_block bb)
170 {
171 rtx insn = BB_HEAD (bb);
172
173 if (GET_CODE (insn) == CODE_LABEL)
174 {
175 if (insn == BB_END (bb))
176 return NULL_RTX;
177 insn = NEXT_INSN (insn);
178 }
179
180 while (GET_CODE (insn) == NOTE)
181 {
182 if (insn == BB_END (bb))
183 return NULL_RTX;
184 insn = NEXT_INSN (insn);
185 }
186
187 if (GET_CODE (insn) == JUMP_INSN)
188 return NULL_RTX;
189
190 return insn;
191 }
192
193 /* Return the last non-jump active (non-jump) insn in the basic block. */
194
195 static rtx
196 last_active_insn (basic_block bb, int skip_use_p)
197 {
198 rtx insn = BB_END (bb);
199 rtx head = BB_HEAD (bb);
200
201 while (GET_CODE (insn) == NOTE
202 || GET_CODE (insn) == JUMP_INSN
203 || (skip_use_p
204 && GET_CODE (insn) == INSN
205 && GET_CODE (PATTERN (insn)) == USE))
206 {
207 if (insn == head)
208 return NULL_RTX;
209 insn = PREV_INSN (insn);
210 }
211
212 if (GET_CODE (insn) == CODE_LABEL)
213 return NULL_RTX;
214
215 return insn;
216 }
217
218 /* It is possible, especially when having dealt with multi-word
219 arithmetic, for the expanders to have emitted jumps. Search
220 through the sequence and return TRUE if a jump exists so that
221 we can abort the conversion. */
222
223 static int
224 seq_contains_jump (rtx insn)
225 {
226 while (insn)
227 {
228 if (GET_CODE (insn) == JUMP_INSN)
229 return 1;
230 insn = NEXT_INSN (insn);
231 }
232 return 0;
233 }
234
235 static basic_block
236 block_fallthru (basic_block bb)
237 {
238 edge e;
239
240 for (e = bb->succ;
241 e != NULL_EDGE && (e->flags & EDGE_FALLTHRU) == 0;
242 e = e->succ_next)
243 ;
244
245 return (e) ? e->dest : NULL_BLOCK;
246 }
247 \f
248 /* Go through a bunch of insns, converting them to conditional
249 execution format if possible. Return TRUE if all of the non-note
250 insns were processed. */
251
252 static int
253 cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
254 /* if block information */rtx start,
255 /* first insn to look at */rtx end,
256 /* last insn to look at */rtx test,
257 /* conditional execution test */rtx prob_val,
258 /* probability of branch taken. */int mod_ok)
259 {
260 int must_be_last = FALSE;
261 rtx insn;
262 rtx xtest;
263 rtx pattern;
264
265 if (!start || !end)
266 return FALSE;
267
268 for (insn = start; ; insn = NEXT_INSN (insn))
269 {
270 if (GET_CODE (insn) == NOTE)
271 goto insn_done;
272
273 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
274 abort ();
275
276 /* Remove USE insns that get in the way. */
277 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
278 {
279 /* ??? Ug. Actually unlinking the thing is problematic,
280 given what we'd have to coordinate with our callers. */
281 PUT_CODE (insn, NOTE);
282 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
283 NOTE_SOURCE_FILE (insn) = 0;
284 goto insn_done;
285 }
286
287 /* Last insn wasn't last? */
288 if (must_be_last)
289 return FALSE;
290
291 if (modified_in_p (test, insn))
292 {
293 if (!mod_ok)
294 return FALSE;
295 must_be_last = TRUE;
296 }
297
298 /* Now build the conditional form of the instruction. */
299 pattern = PATTERN (insn);
300 xtest = copy_rtx (test);
301
302 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
303 two conditions. */
304 if (GET_CODE (pattern) == COND_EXEC)
305 {
306 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
307 return FALSE;
308
309 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
310 COND_EXEC_TEST (pattern));
311 pattern = COND_EXEC_CODE (pattern);
312 }
313
314 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
315
316 /* If the machine needs to modify the insn being conditionally executed,
317 say for example to force a constant integer operand into a temp
318 register, do so here. */
319 #ifdef IFCVT_MODIFY_INSN
320 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
321 if (! pattern)
322 return FALSE;
323 #endif
324
325 validate_change (insn, &PATTERN (insn), pattern, 1);
326
327 if (GET_CODE (insn) == CALL_INSN && prob_val)
328 validate_change (insn, &REG_NOTES (insn),
329 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
330 REG_NOTES (insn)), 1);
331
332 insn_done:
333 if (insn == end)
334 break;
335 }
336
337 return TRUE;
338 }
339
340 /* Return the condition for a jump. Do not do any special processing. */
341
342 static rtx
343 cond_exec_get_condition (rtx jump)
344 {
345 rtx test_if, cond;
346
347 if (any_condjump_p (jump))
348 test_if = SET_SRC (pc_set (jump));
349 else
350 return NULL_RTX;
351 cond = XEXP (test_if, 0);
352
353 /* If this branches to JUMP_LABEL when the condition is false,
354 reverse the condition. */
355 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
356 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
357 {
358 enum rtx_code rev = reversed_comparison_code (cond, jump);
359 if (rev == UNKNOWN)
360 return NULL_RTX;
361
362 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
363 XEXP (cond, 1));
364 }
365
366 return cond;
367 }
368
369 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
370 to conditional execution. Return TRUE if we were successful at
371 converting the block. */
372
373 static int
374 cond_exec_process_if_block (ce_if_block_t * ce_info,
375 /* if block information */int do_multiple_p)
376 {
377 basic_block test_bb = ce_info->test_bb; /* last test block */
378 basic_block then_bb = ce_info->then_bb; /* THEN */
379 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
380 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
381 rtx then_start; /* first insn in THEN block */
382 rtx then_end; /* last insn + 1 in THEN block */
383 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
384 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
385 int max; /* max # of insns to convert. */
386 int then_mod_ok; /* whether conditional mods are ok in THEN */
387 rtx true_expr; /* test for else block insns */
388 rtx false_expr; /* test for then block insns */
389 rtx true_prob_val; /* probability of else block */
390 rtx false_prob_val; /* probability of then block */
391 int n_insns;
392 enum rtx_code false_code;
393
394 /* If test is comprised of && or || elements, and we've failed at handling
395 all of them together, just use the last test if it is the special case of
396 && elements without an ELSE block. */
397 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
398 {
399 if (else_bb || ! ce_info->and_and_p)
400 return FALSE;
401
402 ce_info->test_bb = test_bb = ce_info->last_test_bb;
403 ce_info->num_multiple_test_blocks = 0;
404 ce_info->num_and_and_blocks = 0;
405 ce_info->num_or_or_blocks = 0;
406 }
407
408 /* Find the conditional jump to the ELSE or JOIN part, and isolate
409 the test. */
410 test_expr = cond_exec_get_condition (BB_END (test_bb));
411 if (! test_expr)
412 return FALSE;
413
414 /* If the conditional jump is more than just a conditional jump,
415 then we can not do conditional execution conversion on this block. */
416 if (! onlyjump_p (BB_END (test_bb)))
417 return FALSE;
418
419 /* Collect the bounds of where we're to search, skipping any labels, jumps
420 and notes at the beginning and end of the block. Then count the total
421 number of insns and see if it is small enough to convert. */
422 then_start = first_active_insn (then_bb);
423 then_end = last_active_insn (then_bb, TRUE);
424 n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
425 max = MAX_CONDITIONAL_EXECUTE;
426
427 if (else_bb)
428 {
429 max *= 2;
430 else_start = first_active_insn (else_bb);
431 else_end = last_active_insn (else_bb, TRUE);
432 n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
433 }
434
435 if (n_insns > max)
436 return FALSE;
437
438 /* Map test_expr/test_jump into the appropriate MD tests to use on
439 the conditionally executed code. */
440
441 true_expr = test_expr;
442
443 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
444 if (false_code != UNKNOWN)
445 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
446 XEXP (true_expr, 0), XEXP (true_expr, 1));
447 else
448 false_expr = NULL_RTX;
449
450 #ifdef IFCVT_MODIFY_TESTS
451 /* If the machine description needs to modify the tests, such as setting a
452 conditional execution register from a comparison, it can do so here. */
453 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
454
455 /* See if the conversion failed */
456 if (!true_expr || !false_expr)
457 goto fail;
458 #endif
459
460 true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
461 if (true_prob_val)
462 {
463 true_prob_val = XEXP (true_prob_val, 0);
464 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
465 }
466 else
467 false_prob_val = NULL_RTX;
468
469 /* If we have && or || tests, do them here. These tests are in the adjacent
470 blocks after the first block containing the test. */
471 if (ce_info->num_multiple_test_blocks > 0)
472 {
473 basic_block bb = test_bb;
474 basic_block last_test_bb = ce_info->last_test_bb;
475
476 if (! false_expr)
477 goto fail;
478
479 do
480 {
481 rtx start, end;
482 rtx t, f;
483
484 bb = block_fallthru (bb);
485 start = first_active_insn (bb);
486 end = last_active_insn (bb, TRUE);
487 if (start
488 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
489 false_prob_val, FALSE))
490 goto fail;
491
492 /* If the conditional jump is more than just a conditional jump, then
493 we can not do conditional execution conversion on this block. */
494 if (! onlyjump_p (BB_END (bb)))
495 goto fail;
496
497 /* Find the conditional jump and isolate the test. */
498 t = cond_exec_get_condition (BB_END (bb));
499 if (! t)
500 goto fail;
501
502 f = gen_rtx_fmt_ee (reverse_condition (GET_CODE (t)),
503 GET_MODE (t),
504 XEXP (t, 0),
505 XEXP (t, 1));
506
507 if (ce_info->and_and_p)
508 {
509 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
510 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
511 }
512 else
513 {
514 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
515 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
516 }
517
518 /* If the machine description needs to modify the tests, such as
519 setting a conditional execution register from a comparison, it can
520 do so here. */
521 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
522 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
523
524 /* See if the conversion failed */
525 if (!t || !f)
526 goto fail;
527 #endif
528
529 true_expr = t;
530 false_expr = f;
531 }
532 while (bb != last_test_bb);
533 }
534
535 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
536 on then THEN block. */
537 then_mod_ok = (else_bb == NULL_BLOCK);
538
539 /* Go through the THEN and ELSE blocks converting the insns if possible
540 to conditional execution. */
541
542 if (then_end
543 && (! false_expr
544 || ! cond_exec_process_insns (ce_info, then_start, then_end,
545 false_expr, false_prob_val,
546 then_mod_ok)))
547 goto fail;
548
549 if (else_bb && else_end
550 && ! cond_exec_process_insns (ce_info, else_start, else_end,
551 true_expr, true_prob_val, TRUE))
552 goto fail;
553
554 /* If we cannot apply the changes, fail. Do not go through the normal fail
555 processing, since apply_change_group will call cancel_changes. */
556 if (! apply_change_group ())
557 {
558 #ifdef IFCVT_MODIFY_CANCEL
559 /* Cancel any machine dependent changes. */
560 IFCVT_MODIFY_CANCEL (ce_info);
561 #endif
562 return FALSE;
563 }
564
565 #ifdef IFCVT_MODIFY_FINAL
566 /* Do any machine dependent final modifications */
567 IFCVT_MODIFY_FINAL (ce_info);
568 #endif
569
570 /* Conversion succeeded. */
571 if (rtl_dump_file)
572 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
573 n_insns, (n_insns == 1) ? " was" : "s were");
574
575 /* Merge the blocks! */
576 merge_if_block (ce_info);
577 cond_exec_changed_p = TRUE;
578 return TRUE;
579
580 fail:
581 #ifdef IFCVT_MODIFY_CANCEL
582 /* Cancel any machine dependent changes. */
583 IFCVT_MODIFY_CANCEL (ce_info);
584 #endif
585
586 cancel_changes (0);
587 return FALSE;
588 }
589 \f
590 /* Used by noce_process_if_block to communicate with its subroutines.
591
592 The subroutines know that A and B may be evaluated freely. They
593 know that X is a register. They should insert new instructions
594 before cond_earliest. */
595
596 struct noce_if_info
597 {
598 basic_block test_bb;
599 rtx insn_a, insn_b;
600 rtx x, a, b;
601 rtx jump, cond, cond_earliest;
602 };
603
604 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
605 static int noce_try_move (struct noce_if_info *);
606 static int noce_try_store_flag (struct noce_if_info *);
607 static int noce_try_addcc (struct noce_if_info *);
608 static int noce_try_store_flag_constants (struct noce_if_info *);
609 static int noce_try_store_flag_mask (struct noce_if_info *);
610 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
611 rtx, rtx, rtx);
612 static int noce_try_cmove (struct noce_if_info *);
613 static int noce_try_cmove_arith (struct noce_if_info *);
614 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
615 static int noce_try_minmax (struct noce_if_info *);
616 static int noce_try_abs (struct noce_if_info *);
617
618 /* Helper function for noce_try_store_flag*. */
619
620 static rtx
621 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
622 int normalize)
623 {
624 rtx cond = if_info->cond;
625 int cond_complex;
626 enum rtx_code code;
627
628 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
629 || ! general_operand (XEXP (cond, 1), VOIDmode));
630
631 /* If earliest == jump, or when the condition is complex, try to
632 build the store_flag insn directly. */
633
634 if (cond_complex)
635 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
636
637 if (reversep)
638 code = reversed_comparison_code (cond, if_info->jump);
639 else
640 code = GET_CODE (cond);
641
642 if ((if_info->cond_earliest == if_info->jump || cond_complex)
643 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
644 {
645 rtx tmp;
646
647 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
648 XEXP (cond, 1));
649 tmp = gen_rtx_SET (VOIDmode, x, tmp);
650
651 start_sequence ();
652 tmp = emit_insn (tmp);
653
654 if (recog_memoized (tmp) >= 0)
655 {
656 tmp = get_insns ();
657 end_sequence ();
658 emit_insn (tmp);
659
660 if_info->cond_earliest = if_info->jump;
661
662 return x;
663 }
664
665 end_sequence ();
666 }
667
668 /* Don't even try if the comparison operands or the mode of X are weird. */
669 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
670 return NULL_RTX;
671
672 return emit_store_flag (x, code, XEXP (cond, 0),
673 XEXP (cond, 1), VOIDmode,
674 (code == LTU || code == LEU
675 || code == GEU || code == GTU), normalize);
676 }
677
678 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
679 X is the destination/target and Y is the value to copy. */
680
681 static void
682 noce_emit_move_insn (rtx x, rtx y)
683 {
684 enum machine_mode outmode, inmode;
685 rtx outer, inner;
686 int bitpos;
687
688 if (GET_CODE (x) != STRICT_LOW_PART)
689 {
690 emit_move_insn (x, y);
691 return;
692 }
693
694 outer = XEXP (x, 0);
695 inner = XEXP (outer, 0);
696 outmode = GET_MODE (outer);
697 inmode = GET_MODE (inner);
698 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
699 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
700 GET_MODE_BITSIZE (inmode));
701 }
702
703 /* Unshare sequence SEQ produced by if conversion. We care to mark
704 all arguments that may be shared with outer instruction stream. */
705 static void
706 unshare_ifcvt_sequence (struct noce_if_info *if_info, rtx seq)
707 {
708 set_used_flags (if_info->x);
709 set_used_flags (if_info->cond);
710 unshare_all_rtl_in_chain (seq);
711 }
712
713 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
714 "if (a == b) x = a; else x = b" into "x = b". */
715
716 static int
717 noce_try_move (struct noce_if_info *if_info)
718 {
719 rtx cond = if_info->cond;
720 enum rtx_code code = GET_CODE (cond);
721 rtx y, seq;
722
723 if (code != NE && code != EQ)
724 return FALSE;
725
726 /* This optimization isn't valid if either A or B could be a NaN
727 or a signed zero. */
728 if (HONOR_NANS (GET_MODE (if_info->x))
729 || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
730 return FALSE;
731
732 /* Check whether the operands of the comparison are A and in
733 either order. */
734 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
735 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
736 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
737 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
738 {
739 y = (code == EQ) ? if_info->a : if_info->b;
740
741 /* Avoid generating the move if the source is the destination. */
742 if (! rtx_equal_p (if_info->x, y))
743 {
744 start_sequence ();
745 noce_emit_move_insn (if_info->x, y);
746 seq = get_insns ();
747 unshare_ifcvt_sequence (if_info, seq);
748 end_sequence ();
749 emit_insn_before_setloc (seq, if_info->jump,
750 INSN_LOCATOR (if_info->insn_a));
751 }
752 return TRUE;
753 }
754 return FALSE;
755 }
756
757 /* Convert "if (test) x = 1; else x = 0".
758
759 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
760 tried in noce_try_store_flag_constants after noce_try_cmove has had
761 a go at the conversion. */
762
763 static int
764 noce_try_store_flag (struct noce_if_info *if_info)
765 {
766 int reversep;
767 rtx target, seq;
768
769 if (GET_CODE (if_info->b) == CONST_INT
770 && INTVAL (if_info->b) == STORE_FLAG_VALUE
771 && if_info->a == const0_rtx)
772 reversep = 0;
773 else if (if_info->b == const0_rtx
774 && GET_CODE (if_info->a) == CONST_INT
775 && INTVAL (if_info->a) == STORE_FLAG_VALUE
776 && (reversed_comparison_code (if_info->cond, if_info->jump)
777 != UNKNOWN))
778 reversep = 1;
779 else
780 return FALSE;
781
782 start_sequence ();
783
784 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
785 if (target)
786 {
787 if (target != if_info->x)
788 noce_emit_move_insn (if_info->x, target);
789
790 seq = get_insns ();
791 unshare_ifcvt_sequence (if_info, seq);
792 end_sequence ();
793 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
794
795 return TRUE;
796 }
797 else
798 {
799 end_sequence ();
800 return FALSE;
801 }
802 }
803
804 /* Convert "if (test) x = a; else x = b", for A and B constant. */
805
806 static int
807 noce_try_store_flag_constants (struct noce_if_info *if_info)
808 {
809 rtx target, seq;
810 int reversep;
811 HOST_WIDE_INT itrue, ifalse, diff, tmp;
812 int normalize, can_reverse;
813 enum machine_mode mode;
814
815 if (! no_new_pseudos
816 && GET_CODE (if_info->a) == CONST_INT
817 && GET_CODE (if_info->b) == CONST_INT)
818 {
819 mode = GET_MODE (if_info->x);
820 ifalse = INTVAL (if_info->a);
821 itrue = INTVAL (if_info->b);
822
823 /* Make sure we can represent the difference between the two values. */
824 if ((itrue - ifalse > 0)
825 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
826 return FALSE;
827
828 diff = trunc_int_for_mode (itrue - ifalse, mode);
829
830 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
831 != UNKNOWN);
832
833 reversep = 0;
834 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
835 normalize = 0;
836 else if (ifalse == 0 && exact_log2 (itrue) >= 0
837 && (STORE_FLAG_VALUE == 1
838 || BRANCH_COST >= 2))
839 normalize = 1;
840 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
841 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
842 normalize = 1, reversep = 1;
843 else if (itrue == -1
844 && (STORE_FLAG_VALUE == -1
845 || BRANCH_COST >= 2))
846 normalize = -1;
847 else if (ifalse == -1 && can_reverse
848 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
849 normalize = -1, reversep = 1;
850 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
851 || BRANCH_COST >= 3)
852 normalize = -1;
853 else
854 return FALSE;
855
856 if (reversep)
857 {
858 tmp = itrue; itrue = ifalse; ifalse = tmp;
859 diff = trunc_int_for_mode (-diff, mode);
860 }
861
862 start_sequence ();
863 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
864 if (! target)
865 {
866 end_sequence ();
867 return FALSE;
868 }
869
870 /* if (test) x = 3; else x = 4;
871 => x = 3 + (test == 0); */
872 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
873 {
874 target = expand_simple_binop (mode,
875 (diff == STORE_FLAG_VALUE
876 ? PLUS : MINUS),
877 GEN_INT (ifalse), target, if_info->x, 0,
878 OPTAB_WIDEN);
879 }
880
881 /* if (test) x = 8; else x = 0;
882 => x = (test != 0) << 3; */
883 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
884 {
885 target = expand_simple_binop (mode, ASHIFT,
886 target, GEN_INT (tmp), if_info->x, 0,
887 OPTAB_WIDEN);
888 }
889
890 /* if (test) x = -1; else x = b;
891 => x = -(test != 0) | b; */
892 else if (itrue == -1)
893 {
894 target = expand_simple_binop (mode, IOR,
895 target, GEN_INT (ifalse), if_info->x, 0,
896 OPTAB_WIDEN);
897 }
898
899 /* if (test) x = a; else x = b;
900 => x = (-(test != 0) & (b - a)) + a; */
901 else
902 {
903 target = expand_simple_binop (mode, AND,
904 target, GEN_INT (diff), if_info->x, 0,
905 OPTAB_WIDEN);
906 if (target)
907 target = expand_simple_binop (mode, PLUS,
908 target, GEN_INT (ifalse),
909 if_info->x, 0, OPTAB_WIDEN);
910 }
911
912 if (! target)
913 {
914 end_sequence ();
915 return FALSE;
916 }
917
918 if (target != if_info->x)
919 noce_emit_move_insn (if_info->x, target);
920
921 seq = get_insns ();
922 unshare_ifcvt_sequence (if_info, seq);
923 end_sequence ();
924
925 if (seq_contains_jump (seq))
926 return FALSE;
927
928 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
929
930 return TRUE;
931 }
932
933 return FALSE;
934 }
935
936 /* Convert "if (test) foo++" into "foo += (test != 0)", and
937 similarly for "foo--". */
938
939 static int
940 noce_try_addcc (struct noce_if_info *if_info)
941 {
942 rtx target, seq;
943 int subtract, normalize;
944
945 if (! no_new_pseudos
946 && GET_CODE (if_info->a) == PLUS
947 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
948 && (reversed_comparison_code (if_info->cond, if_info->jump)
949 != UNKNOWN))
950 {
951 rtx cond = if_info->cond;
952 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
953
954 /* First try to use addcc pattern. */
955 if (general_operand (XEXP (cond, 0), VOIDmode)
956 && general_operand (XEXP (cond, 1), VOIDmode))
957 {
958 start_sequence ();
959 target = emit_conditional_add (if_info->x, code,
960 XEXP (cond, 0),
961 XEXP (cond, 1),
962 VOIDmode,
963 if_info->b,
964 XEXP (if_info->a, 1),
965 GET_MODE (if_info->x),
966 (code == LTU || code == GEU
967 || code == LEU || code == GTU));
968 if (target)
969 {
970 if (target != if_info->x)
971 noce_emit_move_insn (if_info->x, target);
972
973 seq = get_insns ();
974 unshare_ifcvt_sequence (if_info, seq);
975 end_sequence ();
976 emit_insn_before_setloc (seq, if_info->jump,
977 INSN_LOCATOR (if_info->insn_a));
978 return TRUE;
979 }
980 end_sequence ();
981 }
982
983 /* If that fails, construct conditional increment or decrement using
984 setcc. */
985 if (BRANCH_COST >= 2
986 && (XEXP (if_info->a, 1) == const1_rtx
987 || XEXP (if_info->a, 1) == constm1_rtx))
988 {
989 start_sequence ();
990 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
991 subtract = 0, normalize = 0;
992 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
993 subtract = 1, normalize = 0;
994 else
995 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
996
997
998 target = noce_emit_store_flag (if_info,
999 gen_reg_rtx (GET_MODE (if_info->x)),
1000 1, normalize);
1001
1002 if (target)
1003 target = expand_simple_binop (GET_MODE (if_info->x),
1004 subtract ? MINUS : PLUS,
1005 if_info->b, target, if_info->x,
1006 0, OPTAB_WIDEN);
1007 if (target)
1008 {
1009 if (target != if_info->x)
1010 noce_emit_move_insn (if_info->x, target);
1011
1012 seq = get_insns ();
1013 unshare_ifcvt_sequence (if_info, seq);
1014 end_sequence ();
1015
1016 if (seq_contains_jump (seq))
1017 return FALSE;
1018
1019 emit_insn_before_setloc (seq, if_info->jump,
1020 INSN_LOCATOR (if_info->insn_a));
1021
1022 return TRUE;
1023 }
1024 end_sequence ();
1025 }
1026 }
1027
1028 return FALSE;
1029 }
1030
1031 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1032
1033 static int
1034 noce_try_store_flag_mask (struct noce_if_info *if_info)
1035 {
1036 rtx target, seq;
1037 int reversep;
1038
1039 reversep = 0;
1040 if (! no_new_pseudos
1041 && (BRANCH_COST >= 2
1042 || STORE_FLAG_VALUE == -1)
1043 && ((if_info->a == const0_rtx
1044 && rtx_equal_p (if_info->b, if_info->x))
1045 || ((reversep = (reversed_comparison_code (if_info->cond,
1046 if_info->jump)
1047 != UNKNOWN))
1048 && if_info->b == const0_rtx
1049 && rtx_equal_p (if_info->a, if_info->x))))
1050 {
1051 start_sequence ();
1052 target = noce_emit_store_flag (if_info,
1053 gen_reg_rtx (GET_MODE (if_info->x)),
1054 reversep, -1);
1055 if (target)
1056 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1057 if_info->x,
1058 target, if_info->x, 0,
1059 OPTAB_WIDEN);
1060
1061 if (target)
1062 {
1063 if (target != if_info->x)
1064 noce_emit_move_insn (if_info->x, target);
1065
1066 seq = get_insns ();
1067 unshare_ifcvt_sequence (if_info, seq);
1068 end_sequence ();
1069
1070 if (seq_contains_jump (seq))
1071 return FALSE;
1072
1073 emit_insn_before_setloc (seq, if_info->jump,
1074 INSN_LOCATOR (if_info->insn_a));
1075
1076 return TRUE;
1077 }
1078
1079 end_sequence ();
1080 }
1081
1082 return FALSE;
1083 }
1084
1085 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1086
1087 static rtx
1088 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1089 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1090 {
1091 /* If earliest == jump, try to build the cmove insn directly.
1092 This is helpful when combine has created some complex condition
1093 (like for alpha's cmovlbs) that we can't hope to regenerate
1094 through the normal interface. */
1095
1096 if (if_info->cond_earliest == if_info->jump)
1097 {
1098 rtx tmp;
1099
1100 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1101 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1102 tmp = gen_rtx_SET (VOIDmode, x, tmp);
1103
1104 start_sequence ();
1105 tmp = emit_insn (tmp);
1106
1107 if (recog_memoized (tmp) >= 0)
1108 {
1109 tmp = get_insns ();
1110 end_sequence ();
1111 emit_insn (tmp);
1112
1113 return x;
1114 }
1115
1116 end_sequence ();
1117 }
1118
1119 /* Don't even try if the comparison operands are weird. */
1120 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1121 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1122 return NULL_RTX;
1123
1124 #if HAVE_conditional_move
1125 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1126 vtrue, vfalse, GET_MODE (x),
1127 (code == LTU || code == GEU
1128 || code == LEU || code == GTU));
1129 #else
1130 /* We'll never get here, as noce_process_if_block doesn't call the
1131 functions involved. Ifdef code, however, should be discouraged
1132 because it leads to typos in the code not selected. However,
1133 emit_conditional_move won't exist either. */
1134 return NULL_RTX;
1135 #endif
1136 }
1137
1138 /* Try only simple constants and registers here. More complex cases
1139 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1140 has had a go at it. */
1141
1142 static int
1143 noce_try_cmove (struct noce_if_info *if_info)
1144 {
1145 enum rtx_code code;
1146 rtx target, seq;
1147
1148 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1149 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1150 {
1151 start_sequence ();
1152
1153 code = GET_CODE (if_info->cond);
1154 target = noce_emit_cmove (if_info, if_info->x, code,
1155 XEXP (if_info->cond, 0),
1156 XEXP (if_info->cond, 1),
1157 if_info->a, if_info->b);
1158
1159 if (target)
1160 {
1161 if (target != if_info->x)
1162 noce_emit_move_insn (if_info->x, target);
1163
1164 seq = get_insns ();
1165 unshare_ifcvt_sequence (if_info, seq);
1166 end_sequence ();
1167 emit_insn_before_setloc (seq, if_info->jump,
1168 INSN_LOCATOR (if_info->insn_a));
1169 return TRUE;
1170 }
1171 else
1172 {
1173 end_sequence ();
1174 return FALSE;
1175 }
1176 }
1177
1178 return FALSE;
1179 }
1180
1181 /* Try more complex cases involving conditional_move. */
1182
1183 static int
1184 noce_try_cmove_arith (struct noce_if_info *if_info)
1185 {
1186 rtx a = if_info->a;
1187 rtx b = if_info->b;
1188 rtx x = if_info->x;
1189 rtx insn_a, insn_b;
1190 rtx tmp, target;
1191 int is_mem = 0;
1192 enum rtx_code code;
1193
1194 /* A conditional move from two memory sources is equivalent to a
1195 conditional on their addresses followed by a load. Don't do this
1196 early because it'll screw alias analysis. Note that we've
1197 already checked for no side effects. */
1198 if (! no_new_pseudos && cse_not_expected
1199 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
1200 && BRANCH_COST >= 5)
1201 {
1202 a = XEXP (a, 0);
1203 b = XEXP (b, 0);
1204 x = gen_reg_rtx (Pmode);
1205 is_mem = 1;
1206 }
1207
1208 /* ??? We could handle this if we knew that a load from A or B could
1209 not fault. This is also true if we've already loaded
1210 from the address along the path from ENTRY. */
1211 else if (may_trap_p (a) || may_trap_p (b))
1212 return FALSE;
1213
1214 /* if (test) x = a + b; else x = c - d;
1215 => y = a + b;
1216 x = c - d;
1217 if (test)
1218 x = y;
1219 */
1220
1221 code = GET_CODE (if_info->cond);
1222 insn_a = if_info->insn_a;
1223 insn_b = if_info->insn_b;
1224
1225 /* Possibly rearrange operands to make things come out more natural. */
1226 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1227 {
1228 int reversep = 0;
1229 if (rtx_equal_p (b, x))
1230 reversep = 1;
1231 else if (general_operand (b, GET_MODE (b)))
1232 reversep = 1;
1233
1234 if (reversep)
1235 {
1236 code = reversed_comparison_code (if_info->cond, if_info->jump);
1237 tmp = a, a = b, b = tmp;
1238 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1239 }
1240 }
1241
1242 start_sequence ();
1243
1244 /* If either operand is complex, load it into a register first.
1245 The best way to do this is to copy the original insn. In this
1246 way we preserve any clobbers etc that the insn may have had.
1247 This is of course not possible in the IS_MEM case. */
1248 if (! general_operand (a, GET_MODE (a)))
1249 {
1250 rtx set;
1251
1252 if (no_new_pseudos)
1253 goto end_seq_and_fail;
1254
1255 if (is_mem)
1256 {
1257 tmp = gen_reg_rtx (GET_MODE (a));
1258 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1259 }
1260 else if (! insn_a)
1261 goto end_seq_and_fail;
1262 else
1263 {
1264 a = gen_reg_rtx (GET_MODE (a));
1265 tmp = copy_rtx (insn_a);
1266 set = single_set (tmp);
1267 SET_DEST (set) = a;
1268 tmp = emit_insn (PATTERN (tmp));
1269 }
1270 if (recog_memoized (tmp) < 0)
1271 goto end_seq_and_fail;
1272 }
1273 if (! general_operand (b, GET_MODE (b)))
1274 {
1275 rtx set;
1276
1277 if (no_new_pseudos)
1278 goto end_seq_and_fail;
1279
1280 if (is_mem)
1281 {
1282 tmp = gen_reg_rtx (GET_MODE (b));
1283 tmp = emit_insn (gen_rtx_SET (VOIDmode,
1284 tmp,
1285 b));
1286 }
1287 else if (! insn_b)
1288 goto end_seq_and_fail;
1289 else
1290 {
1291 b = gen_reg_rtx (GET_MODE (b));
1292 tmp = copy_rtx (insn_b);
1293 set = single_set (tmp);
1294 SET_DEST (set) = b;
1295 tmp = emit_insn (PATTERN (tmp));
1296 }
1297 if (recog_memoized (tmp) < 0)
1298 goto end_seq_and_fail;
1299 }
1300
1301 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1302 XEXP (if_info->cond, 1), a, b);
1303
1304 if (! target)
1305 goto end_seq_and_fail;
1306
1307 /* If we're handling a memory for above, emit the load now. */
1308 if (is_mem)
1309 {
1310 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1311
1312 /* Copy over flags as appropriate. */
1313 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1314 MEM_VOLATILE_P (tmp) = 1;
1315 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1316 MEM_IN_STRUCT_P (tmp) = 1;
1317 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1318 MEM_SCALAR_P (tmp) = 1;
1319 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1320 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1321 set_mem_align (tmp,
1322 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1323
1324 noce_emit_move_insn (if_info->x, tmp);
1325 }
1326 else if (target != x)
1327 noce_emit_move_insn (x, target);
1328
1329 tmp = get_insns ();
1330 unshare_ifcvt_sequence (if_info, tmp);
1331 end_sequence ();
1332 emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1333 return TRUE;
1334
1335 end_seq_and_fail:
1336 end_sequence ();
1337 return FALSE;
1338 }
1339
1340 /* For most cases, the simplified condition we found is the best
1341 choice, but this is not the case for the min/max/abs transforms.
1342 For these we wish to know that it is A or B in the condition. */
1343
1344 static rtx
1345 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1346 rtx *earliest)
1347 {
1348 rtx cond, set, insn;
1349 int reverse;
1350
1351 /* If target is already mentioned in the known condition, return it. */
1352 if (reg_mentioned_p (target, if_info->cond))
1353 {
1354 *earliest = if_info->cond_earliest;
1355 return if_info->cond;
1356 }
1357
1358 set = pc_set (if_info->jump);
1359 cond = XEXP (SET_SRC (set), 0);
1360 reverse
1361 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1362 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1363
1364 /* If we're looking for a constant, try to make the conditional
1365 have that constant in it. There are two reasons why it may
1366 not have the constant we want:
1367
1368 1. GCC may have needed to put the constant in a register, because
1369 the target can't compare directly against that constant. For
1370 this case, we look for a SET immediately before the comparison
1371 that puts a constant in that register.
1372
1373 2. GCC may have canonicalized the conditional, for example
1374 replacing "if x < 4" with "if x <= 3". We can undo that (or
1375 make equivalent types of changes) to get the constants we need
1376 if they're off by one in the right direction. */
1377
1378 if (GET_CODE (target) == CONST_INT)
1379 {
1380 enum rtx_code code = GET_CODE (if_info->cond);
1381 rtx op_a = XEXP (if_info->cond, 0);
1382 rtx op_b = XEXP (if_info->cond, 1);
1383 rtx prev_insn;
1384
1385 /* First, look to see if we put a constant in a register. */
1386 prev_insn = PREV_INSN (if_info->cond_earliest);
1387 if (prev_insn
1388 && INSN_P (prev_insn)
1389 && GET_CODE (PATTERN (prev_insn)) == SET)
1390 {
1391 rtx src = find_reg_equal_equiv_note (prev_insn);
1392 if (!src)
1393 src = SET_SRC (PATTERN (prev_insn));
1394 if (GET_CODE (src) == CONST_INT)
1395 {
1396 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1397 op_a = src;
1398 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1399 op_b = src;
1400
1401 if (GET_CODE (op_a) == CONST_INT)
1402 {
1403 rtx tmp = op_a;
1404 op_a = op_b;
1405 op_b = tmp;
1406 code = swap_condition (code);
1407 }
1408 }
1409 }
1410
1411 /* Now, look to see if we can get the right constant by
1412 adjusting the conditional. */
1413 if (GET_CODE (op_b) == CONST_INT)
1414 {
1415 HOST_WIDE_INT desired_val = INTVAL (target);
1416 HOST_WIDE_INT actual_val = INTVAL (op_b);
1417
1418 switch (code)
1419 {
1420 case LT:
1421 if (actual_val == desired_val + 1)
1422 {
1423 code = LE;
1424 op_b = GEN_INT (desired_val);
1425 }
1426 break;
1427 case LE:
1428 if (actual_val == desired_val - 1)
1429 {
1430 code = LT;
1431 op_b = GEN_INT (desired_val);
1432 }
1433 break;
1434 case GT:
1435 if (actual_val == desired_val - 1)
1436 {
1437 code = GE;
1438 op_b = GEN_INT (desired_val);
1439 }
1440 break;
1441 case GE:
1442 if (actual_val == desired_val + 1)
1443 {
1444 code = GT;
1445 op_b = GEN_INT (desired_val);
1446 }
1447 break;
1448 default:
1449 break;
1450 }
1451 }
1452
1453 /* If we made any changes, generate a new conditional that is
1454 equivalent to what we started with, but has the right
1455 constants in it. */
1456 if (code != GET_CODE (if_info->cond)
1457 || op_a != XEXP (if_info->cond, 0)
1458 || op_b != XEXP (if_info->cond, 1))
1459 {
1460 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1461 *earliest = if_info->cond_earliest;
1462 return cond;
1463 }
1464 }
1465
1466 cond = canonicalize_condition (if_info->jump, cond, reverse,
1467 earliest, target, false);
1468 if (! cond || ! reg_mentioned_p (target, cond))
1469 return NULL;
1470
1471 /* We almost certainly searched back to a different place.
1472 Need to re-verify correct lifetimes. */
1473
1474 /* X may not be mentioned in the range (cond_earliest, jump]. */
1475 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1476 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1477 return NULL;
1478
1479 /* A and B may not be modified in the range [cond_earliest, jump). */
1480 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1481 if (INSN_P (insn)
1482 && (modified_in_p (if_info->a, insn)
1483 || modified_in_p (if_info->b, insn)))
1484 return NULL;
1485
1486 return cond;
1487 }
1488
1489 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1490
1491 static int
1492 noce_try_minmax (struct noce_if_info *if_info)
1493 {
1494 rtx cond, earliest, target, seq;
1495 enum rtx_code code, op;
1496 int unsignedp;
1497
1498 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1499 if (no_new_pseudos)
1500 return FALSE;
1501
1502 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1503 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1504 to get the target to tell us... */
1505 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1506 || HONOR_NANS (GET_MODE (if_info->x)))
1507 return FALSE;
1508
1509 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1510 if (!cond)
1511 return FALSE;
1512
1513 /* Verify the condition is of the form we expect, and canonicalize
1514 the comparison code. */
1515 code = GET_CODE (cond);
1516 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1517 {
1518 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1519 return FALSE;
1520 }
1521 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1522 {
1523 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1524 return FALSE;
1525 code = swap_condition (code);
1526 }
1527 else
1528 return FALSE;
1529
1530 /* Determine what sort of operation this is. Note that the code is for
1531 a taken branch, so the code->operation mapping appears backwards. */
1532 switch (code)
1533 {
1534 case LT:
1535 case LE:
1536 case UNLT:
1537 case UNLE:
1538 op = SMAX;
1539 unsignedp = 0;
1540 break;
1541 case GT:
1542 case GE:
1543 case UNGT:
1544 case UNGE:
1545 op = SMIN;
1546 unsignedp = 0;
1547 break;
1548 case LTU:
1549 case LEU:
1550 op = UMAX;
1551 unsignedp = 1;
1552 break;
1553 case GTU:
1554 case GEU:
1555 op = UMIN;
1556 unsignedp = 1;
1557 break;
1558 default:
1559 return FALSE;
1560 }
1561
1562 start_sequence ();
1563
1564 target = expand_simple_binop (GET_MODE (if_info->x), op,
1565 if_info->a, if_info->b,
1566 if_info->x, unsignedp, OPTAB_WIDEN);
1567 if (! target)
1568 {
1569 end_sequence ();
1570 return FALSE;
1571 }
1572 if (target != if_info->x)
1573 noce_emit_move_insn (if_info->x, target);
1574
1575 seq = get_insns ();
1576 unshare_ifcvt_sequence (if_info, seq);
1577 end_sequence ();
1578
1579 if (seq_contains_jump (seq))
1580 return FALSE;
1581
1582 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1583 if_info->cond = cond;
1584 if_info->cond_earliest = earliest;
1585
1586 return TRUE;
1587 }
1588
1589 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1590
1591 static int
1592 noce_try_abs (struct noce_if_info *if_info)
1593 {
1594 rtx cond, earliest, target, seq, a, b, c;
1595 int negate;
1596
1597 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1598 if (no_new_pseudos)
1599 return FALSE;
1600
1601 /* Recognize A and B as constituting an ABS or NABS. */
1602 a = if_info->a;
1603 b = if_info->b;
1604 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1605 negate = 0;
1606 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1607 {
1608 c = a; a = b; b = c;
1609 negate = 1;
1610 }
1611 else
1612 return FALSE;
1613
1614 cond = noce_get_alt_condition (if_info, b, &earliest);
1615 if (!cond)
1616 return FALSE;
1617
1618 /* Verify the condition is of the form we expect. */
1619 if (rtx_equal_p (XEXP (cond, 0), b))
1620 c = XEXP (cond, 1);
1621 else if (rtx_equal_p (XEXP (cond, 1), b))
1622 c = XEXP (cond, 0);
1623 else
1624 return FALSE;
1625
1626 /* Verify that C is zero. Search backward through the block for
1627 a REG_EQUAL note if necessary. */
1628 if (REG_P (c))
1629 {
1630 rtx insn, note = NULL;
1631 for (insn = earliest;
1632 insn != BB_HEAD (if_info->test_bb);
1633 insn = PREV_INSN (insn))
1634 if (INSN_P (insn)
1635 && ((note = find_reg_note (insn, REG_EQUAL, c))
1636 || (note = find_reg_note (insn, REG_EQUIV, c))))
1637 break;
1638 if (! note)
1639 return FALSE;
1640 c = XEXP (note, 0);
1641 }
1642 if (GET_CODE (c) == MEM
1643 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1644 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1645 c = get_pool_constant (XEXP (c, 0));
1646
1647 /* Work around funny ideas get_condition has wrt canonicalization.
1648 Note that these rtx constants are known to be CONST_INT, and
1649 therefore imply integer comparisons. */
1650 if (c == constm1_rtx && GET_CODE (cond) == GT)
1651 ;
1652 else if (c == const1_rtx && GET_CODE (cond) == LT)
1653 ;
1654 else if (c != CONST0_RTX (GET_MODE (b)))
1655 return FALSE;
1656
1657 /* Determine what sort of operation this is. */
1658 switch (GET_CODE (cond))
1659 {
1660 case LT:
1661 case LE:
1662 case UNLT:
1663 case UNLE:
1664 negate = !negate;
1665 break;
1666 case GT:
1667 case GE:
1668 case UNGT:
1669 case UNGE:
1670 break;
1671 default:
1672 return FALSE;
1673 }
1674
1675 start_sequence ();
1676
1677 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1678
1679 /* ??? It's a quandary whether cmove would be better here, especially
1680 for integers. Perhaps combine will clean things up. */
1681 if (target && negate)
1682 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1683
1684 if (! target)
1685 {
1686 end_sequence ();
1687 return FALSE;
1688 }
1689
1690 if (target != if_info->x)
1691 noce_emit_move_insn (if_info->x, target);
1692
1693 seq = get_insns ();
1694 unshare_ifcvt_sequence (if_info, seq);
1695 end_sequence ();
1696
1697 if (seq_contains_jump (seq))
1698 return FALSE;
1699
1700 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1701 if_info->cond = cond;
1702 if_info->cond_earliest = earliest;
1703
1704 return TRUE;
1705 }
1706
1707 /* Similar to get_condition, only the resulting condition must be
1708 valid at JUMP, instead of at EARLIEST. */
1709
1710 static rtx
1711 noce_get_condition (rtx jump, rtx *earliest)
1712 {
1713 rtx cond, set, tmp, insn;
1714 bool reverse;
1715
1716 if (! any_condjump_p (jump))
1717 return NULL_RTX;
1718
1719 set = pc_set (jump);
1720
1721 /* If this branches to JUMP_LABEL when the condition is false,
1722 reverse the condition. */
1723 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1724 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
1725
1726 /* If the condition variable is a register and is MODE_INT, accept it. */
1727
1728 cond = XEXP (SET_SRC (set), 0);
1729 tmp = XEXP (cond, 0);
1730 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
1731 {
1732 *earliest = jump;
1733
1734 if (reverse)
1735 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1736 GET_MODE (cond), tmp, XEXP (cond, 1));
1737 return cond;
1738 }
1739
1740 /* Otherwise, fall back on canonicalize_condition to do the dirty
1741 work of manipulating MODE_CC values and COMPARE rtx codes. */
1742
1743 tmp = canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
1744 false);
1745 if (!tmp)
1746 return NULL_RTX;
1747
1748 /* We are going to insert code before JUMP, not before EARLIEST.
1749 We must therefore be certain that the given condition is valid
1750 at JUMP by virtue of not having been modified since. */
1751 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1752 if (INSN_P (insn) && modified_in_p (tmp, insn))
1753 break;
1754 if (insn == jump)
1755 return tmp;
1756
1757 /* The condition was modified. See if we can get a partial result
1758 that doesn't follow all the reversals. Perhaps combine can fold
1759 them together later. */
1760 tmp = XEXP (tmp, 0);
1761 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
1762 return NULL_RTX;
1763 tmp = canonicalize_condition (jump, cond, reverse, earliest, tmp,
1764 false);
1765 if (!tmp)
1766 return NULL_RTX;
1767
1768 /* For sanity's sake, re-validate the new result. */
1769 for (insn = *earliest; insn != jump; insn = NEXT_INSN (insn))
1770 if (INSN_P (insn) && modified_in_p (tmp, insn))
1771 return NULL_RTX;
1772
1773 return tmp;
1774 }
1775
1776 /* Return true if OP is ok for if-then-else processing. */
1777
1778 static int
1779 noce_operand_ok (rtx op)
1780 {
1781 /* We special-case memories, so handle any of them with
1782 no address side effects. */
1783 if (GET_CODE (op) == MEM)
1784 return ! side_effects_p (XEXP (op, 0));
1785
1786 if (side_effects_p (op))
1787 return FALSE;
1788
1789 return ! may_trap_p (op);
1790 }
1791
1792 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1793 without using conditional execution. Return TRUE if we were
1794 successful at converting the block. */
1795
1796 static int
1797 noce_process_if_block (struct ce_if_block * ce_info)
1798 {
1799 basic_block test_bb = ce_info->test_bb; /* test block */
1800 basic_block then_bb = ce_info->then_bb; /* THEN */
1801 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
1802 struct noce_if_info if_info;
1803 rtx insn_a, insn_b;
1804 rtx set_a, set_b;
1805 rtx orig_x, x, a, b;
1806 rtx jump, cond;
1807
1808 /* We're looking for patterns of the form
1809
1810 (1) if (...) x = a; else x = b;
1811 (2) x = b; if (...) x = a;
1812 (3) if (...) x = a; // as if with an initial x = x.
1813
1814 The later patterns require jumps to be more expensive.
1815
1816 ??? For future expansion, look for multiple X in such patterns. */
1817
1818 /* If test is comprised of && or || elements, don't handle it unless it is
1819 the special case of && elements without an ELSE block. */
1820 if (ce_info->num_multiple_test_blocks)
1821 {
1822 if (else_bb || ! ce_info->and_and_p)
1823 return FALSE;
1824
1825 ce_info->test_bb = test_bb = ce_info->last_test_bb;
1826 ce_info->num_multiple_test_blocks = 0;
1827 ce_info->num_and_and_blocks = 0;
1828 ce_info->num_or_or_blocks = 0;
1829 }
1830
1831 /* If this is not a standard conditional jump, we can't parse it. */
1832 jump = BB_END (test_bb);
1833 cond = noce_get_condition (jump, &if_info.cond_earliest);
1834 if (! cond)
1835 return FALSE;
1836
1837 /* If the conditional jump is more than just a conditional
1838 jump, then we can not do if-conversion on this block. */
1839 if (! onlyjump_p (jump))
1840 return FALSE;
1841
1842 /* We must be comparing objects whose modes imply the size. */
1843 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1844 return FALSE;
1845
1846 /* Look for one of the potential sets. */
1847 insn_a = first_active_insn (then_bb);
1848 if (! insn_a
1849 || insn_a != last_active_insn (then_bb, FALSE)
1850 || (set_a = single_set (insn_a)) == NULL_RTX)
1851 return FALSE;
1852
1853 x = SET_DEST (set_a);
1854 a = SET_SRC (set_a);
1855
1856 /* Look for the other potential set. Make sure we've got equivalent
1857 destinations. */
1858 /* ??? This is overconservative. Storing to two different mems is
1859 as easy as conditionally computing the address. Storing to a
1860 single mem merely requires a scratch memory to use as one of the
1861 destination addresses; often the memory immediately below the
1862 stack pointer is available for this. */
1863 set_b = NULL_RTX;
1864 if (else_bb)
1865 {
1866 insn_b = first_active_insn (else_bb);
1867 if (! insn_b
1868 || insn_b != last_active_insn (else_bb, FALSE)
1869 || (set_b = single_set (insn_b)) == NULL_RTX
1870 || ! rtx_equal_p (x, SET_DEST (set_b)))
1871 return FALSE;
1872 }
1873 else
1874 {
1875 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1876 /* We're going to be moving the evaluation of B down from above
1877 COND_EARLIEST to JUMP. Make sure the relevant data is still
1878 intact. */
1879 if (! insn_b
1880 || GET_CODE (insn_b) != INSN
1881 || (set_b = single_set (insn_b)) == NULL_RTX
1882 || ! rtx_equal_p (x, SET_DEST (set_b))
1883 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
1884 || modified_between_p (SET_SRC (set_b),
1885 PREV_INSN (if_info.cond_earliest), jump)
1886 /* Likewise with X. In particular this can happen when
1887 noce_get_condition looks farther back in the instruction
1888 stream than one might expect. */
1889 || reg_overlap_mentioned_p (x, cond)
1890 || reg_overlap_mentioned_p (x, a)
1891 || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
1892 insn_b = set_b = NULL_RTX;
1893 }
1894
1895 /* If x has side effects then only the if-then-else form is safe to
1896 convert. But even in that case we would need to restore any notes
1897 (such as REG_INC) at then end. That can be tricky if
1898 noce_emit_move_insn expands to more than one insn, so disable the
1899 optimization entirely for now if there are side effects. */
1900 if (side_effects_p (x))
1901 return FALSE;
1902
1903 b = (set_b ? SET_SRC (set_b) : x);
1904
1905 /* Only operate on register destinations, and even then avoid extending
1906 the lifetime of hard registers on small register class machines. */
1907 orig_x = x;
1908 if (GET_CODE (x) != REG
1909 || (SMALL_REGISTER_CLASSES
1910 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1911 {
1912 if (no_new_pseudos || GET_MODE (x) == BLKmode)
1913 return FALSE;
1914 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1915 ? XEXP (x, 0) : x));
1916 }
1917
1918 /* Don't operate on sources that may trap or are volatile. */
1919 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1920 return FALSE;
1921
1922 /* Set up the info block for our subroutines. */
1923 if_info.test_bb = test_bb;
1924 if_info.cond = cond;
1925 if_info.jump = jump;
1926 if_info.insn_a = insn_a;
1927 if_info.insn_b = insn_b;
1928 if_info.x = x;
1929 if_info.a = a;
1930 if_info.b = b;
1931
1932 /* Try optimizations in some approximation of a useful order. */
1933 /* ??? Should first look to see if X is live incoming at all. If it
1934 isn't, we don't need anything but an unconditional set. */
1935
1936 /* Look and see if A and B are really the same. Avoid creating silly
1937 cmove constructs that no one will fix up later. */
1938 if (rtx_equal_p (a, b))
1939 {
1940 /* If we have an INSN_B, we don't have to create any new rtl. Just
1941 move the instruction that we already have. If we don't have an
1942 INSN_B, that means that A == X, and we've got a noop move. In
1943 that case don't do anything and let the code below delete INSN_A. */
1944 if (insn_b && else_bb)
1945 {
1946 rtx note;
1947
1948 if (else_bb && insn_b == BB_END (else_bb))
1949 BB_END (else_bb) = PREV_INSN (insn_b);
1950 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
1951
1952 /* If there was a REG_EQUAL note, delete it since it may have been
1953 true due to this insn being after a jump. */
1954 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1955 remove_note (insn_b, note);
1956
1957 insn_b = NULL_RTX;
1958 }
1959 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1960 x must be executed twice. */
1961 else if (insn_b && side_effects_p (orig_x))
1962 return FALSE;
1963
1964 x = orig_x;
1965 goto success;
1966 }
1967
1968 if (noce_try_move (&if_info))
1969 goto success;
1970 if (noce_try_store_flag (&if_info))
1971 goto success;
1972 if (noce_try_minmax (&if_info))
1973 goto success;
1974 if (noce_try_abs (&if_info))
1975 goto success;
1976 if (HAVE_conditional_move
1977 && noce_try_cmove (&if_info))
1978 goto success;
1979 if (! HAVE_conditional_execution)
1980 {
1981 if (noce_try_store_flag_constants (&if_info))
1982 goto success;
1983 if (noce_try_addcc (&if_info))
1984 goto success;
1985 if (noce_try_store_flag_mask (&if_info))
1986 goto success;
1987 if (HAVE_conditional_move
1988 && noce_try_cmove_arith (&if_info))
1989 goto success;
1990 }
1991
1992 return FALSE;
1993
1994 success:
1995 /* The original sets may now be killed. */
1996 delete_insn (insn_a);
1997
1998 /* Several special cases here: First, we may have reused insn_b above,
1999 in which case insn_b is now NULL. Second, we want to delete insn_b
2000 if it came from the ELSE block, because follows the now correct
2001 write that appears in the TEST block. However, if we got insn_b from
2002 the TEST block, it may in fact be loading data needed for the comparison.
2003 We'll let life_analysis remove the insn if it's really dead. */
2004 if (insn_b && else_bb)
2005 delete_insn (insn_b);
2006
2007 /* The new insns will have been inserted immediately before the jump. We
2008 should be able to remove the jump with impunity, but the condition itself
2009 may have been modified by gcse to be shared across basic blocks. */
2010 delete_insn (jump);
2011
2012 /* If we used a temporary, fix it up now. */
2013 if (orig_x != x)
2014 {
2015 start_sequence ();
2016 noce_emit_move_insn (orig_x, x);
2017 insn_b = get_insns ();
2018 set_used_flags (orig_x);
2019 unshare_all_rtl_in_chain (insn_b);
2020 end_sequence ();
2021
2022 emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
2023 }
2024
2025 /* Merge the blocks! */
2026 merge_if_block (ce_info);
2027
2028 return TRUE;
2029 }
2030 \f
2031 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2032 straight line code. Return true if successful. */
2033
2034 static int
2035 process_if_block (struct ce_if_block * ce_info)
2036 {
2037 if (! reload_completed
2038 && noce_process_if_block (ce_info))
2039 return TRUE;
2040
2041 if (HAVE_conditional_execution && reload_completed)
2042 {
2043 /* If we have && and || tests, try to first handle combining the && and
2044 || tests into the conditional code, and if that fails, go back and
2045 handle it without the && and ||, which at present handles the && case
2046 if there was no ELSE block. */
2047 if (cond_exec_process_if_block (ce_info, TRUE))
2048 return TRUE;
2049
2050 if (ce_info->num_multiple_test_blocks)
2051 {
2052 cancel_changes (0);
2053
2054 if (cond_exec_process_if_block (ce_info, FALSE))
2055 return TRUE;
2056 }
2057 }
2058
2059 return FALSE;
2060 }
2061
2062 /* Merge the blocks and mark for local life update. */
2063
2064 static void
2065 merge_if_block (struct ce_if_block * ce_info)
2066 {
2067 basic_block test_bb = ce_info->test_bb; /* last test block */
2068 basic_block then_bb = ce_info->then_bb; /* THEN */
2069 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
2070 basic_block join_bb = ce_info->join_bb; /* join block */
2071 basic_block combo_bb;
2072
2073 /* All block merging is done into the lower block numbers. */
2074
2075 combo_bb = test_bb;
2076
2077 /* Merge any basic blocks to handle && and || subtests. Each of
2078 the blocks are on the fallthru path from the predecessor block. */
2079 if (ce_info->num_multiple_test_blocks > 0)
2080 {
2081 basic_block bb = test_bb;
2082 basic_block last_test_bb = ce_info->last_test_bb;
2083 basic_block fallthru = block_fallthru (bb);
2084
2085 do
2086 {
2087 bb = fallthru;
2088 fallthru = block_fallthru (bb);
2089 if (post_dominators)
2090 delete_from_dominance_info (post_dominators, bb);
2091 merge_blocks (combo_bb, bb);
2092 num_true_changes++;
2093 }
2094 while (bb != last_test_bb);
2095 }
2096
2097 /* Merge TEST block into THEN block. Normally the THEN block won't have a
2098 label, but it might if there were || tests. That label's count should be
2099 zero, and it normally should be removed. */
2100
2101 if (then_bb)
2102 {
2103 if (combo_bb->global_live_at_end)
2104 COPY_REG_SET (combo_bb->global_live_at_end,
2105 then_bb->global_live_at_end);
2106 if (post_dominators)
2107 delete_from_dominance_info (post_dominators, then_bb);
2108 merge_blocks (combo_bb, then_bb);
2109 num_true_changes++;
2110 }
2111
2112 /* The ELSE block, if it existed, had a label. That label count
2113 will almost always be zero, but odd things can happen when labels
2114 get their addresses taken. */
2115 if (else_bb)
2116 {
2117 if (post_dominators)
2118 delete_from_dominance_info (post_dominators, else_bb);
2119 merge_blocks (combo_bb, else_bb);
2120 num_true_changes++;
2121 }
2122
2123 /* If there was no join block reported, that means it was not adjacent
2124 to the others, and so we cannot merge them. */
2125
2126 if (! join_bb)
2127 {
2128 rtx last = BB_END (combo_bb);
2129
2130 /* The outgoing edge for the current COMBO block should already
2131 be correct. Verify this. */
2132 if (combo_bb->succ == NULL_EDGE)
2133 {
2134 if (find_reg_note (last, REG_NORETURN, NULL))
2135 ;
2136 else if (GET_CODE (last) == INSN
2137 && GET_CODE (PATTERN (last)) == TRAP_IF
2138 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
2139 ;
2140 else
2141 abort ();
2142 }
2143
2144 /* There should still be something at the end of the THEN or ELSE
2145 blocks taking us to our final destination. */
2146 else if (GET_CODE (last) == JUMP_INSN)
2147 ;
2148 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
2149 && GET_CODE (last) == CALL_INSN
2150 && SIBLING_CALL_P (last))
2151 ;
2152 else if ((combo_bb->succ->flags & EDGE_EH)
2153 && can_throw_internal (last))
2154 ;
2155 else
2156 abort ();
2157 }
2158
2159 /* The JOIN block may have had quite a number of other predecessors too.
2160 Since we've already merged the TEST, THEN and ELSE blocks, we should
2161 have only one remaining edge from our if-then-else diamond. If there
2162 is more than one remaining edge, it must come from elsewhere. There
2163 may be zero incoming edges if the THEN block didn't actually join
2164 back up (as with a call to abort). */
2165 else if ((join_bb->pred == NULL
2166 || join_bb->pred->pred_next == NULL)
2167 && join_bb != EXIT_BLOCK_PTR)
2168 {
2169 /* We can merge the JOIN. */
2170 if (combo_bb->global_live_at_end)
2171 COPY_REG_SET (combo_bb->global_live_at_end,
2172 join_bb->global_live_at_end);
2173
2174 if (post_dominators)
2175 delete_from_dominance_info (post_dominators, join_bb);
2176 merge_blocks (combo_bb, join_bb);
2177 num_true_changes++;
2178 }
2179 else
2180 {
2181 /* We cannot merge the JOIN. */
2182
2183 /* The outgoing edge for the current COMBO block should already
2184 be correct. Verify this. */
2185 if (combo_bb->succ->succ_next != NULL_EDGE
2186 || combo_bb->succ->dest != join_bb)
2187 abort ();
2188
2189 /* Remove the jump and cruft from the end of the COMBO block. */
2190 if (join_bb != EXIT_BLOCK_PTR)
2191 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
2192 }
2193
2194 num_updated_if_blocks++;
2195 }
2196 \f
2197 /* Find a block ending in a simple IF condition and try to transform it
2198 in some way. When converting a multi-block condition, put the new code
2199 in the first such block and delete the rest. Return a pointer to this
2200 first block if some transformation was done. Return NULL otherwise. */
2201
2202 static basic_block
2203 find_if_header (basic_block test_bb, int pass)
2204 {
2205 ce_if_block_t ce_info;
2206 edge then_edge;
2207 edge else_edge;
2208
2209 /* The kind of block we're looking for has exactly two successors. */
2210 if ((then_edge = test_bb->succ) == NULL_EDGE
2211 || (else_edge = then_edge->succ_next) == NULL_EDGE
2212 || else_edge->succ_next != NULL_EDGE)
2213 return NULL;
2214
2215 /* Neither edge should be abnormal. */
2216 if ((then_edge->flags & EDGE_COMPLEX)
2217 || (else_edge->flags & EDGE_COMPLEX))
2218 return NULL;
2219
2220 /* Nor exit the loop. */
2221 if ((then_edge->flags & EDGE_LOOP_EXIT)
2222 || (else_edge->flags & EDGE_LOOP_EXIT))
2223 return NULL;
2224
2225 /* The THEN edge is canonically the one that falls through. */
2226 if (then_edge->flags & EDGE_FALLTHRU)
2227 ;
2228 else if (else_edge->flags & EDGE_FALLTHRU)
2229 {
2230 edge e = else_edge;
2231 else_edge = then_edge;
2232 then_edge = e;
2233 }
2234 else
2235 /* Otherwise this must be a multiway branch of some sort. */
2236 return NULL;
2237
2238 memset (&ce_info, '\0', sizeof (ce_info));
2239 ce_info.test_bb = test_bb;
2240 ce_info.then_bb = then_edge->dest;
2241 ce_info.else_bb = else_edge->dest;
2242 ce_info.pass = pass;
2243
2244 #ifdef IFCVT_INIT_EXTRA_FIELDS
2245 IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2246 #endif
2247
2248 if (find_if_block (&ce_info))
2249 goto success;
2250
2251 if (HAVE_trap && HAVE_conditional_trap
2252 && find_cond_trap (test_bb, then_edge, else_edge))
2253 goto success;
2254
2255 if (post_dominators
2256 && (! HAVE_conditional_execution || reload_completed))
2257 {
2258 if (find_if_case_1 (test_bb, then_edge, else_edge))
2259 goto success;
2260 if (find_if_case_2 (test_bb, then_edge, else_edge))
2261 goto success;
2262 }
2263
2264 return NULL;
2265
2266 success:
2267 if (rtl_dump_file)
2268 fprintf (rtl_dump_file, "Conversion succeeded on pass %d.\n", pass);
2269 return ce_info.test_bb;
2270 }
2271
2272 /* Return true if a block has two edges, one of which falls through to the next
2273 block, and the other jumps to a specific block, so that we can tell if the
2274 block is part of an && test or an || test. Returns either -1 or the number
2275 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
2276
2277 static int
2278 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2279 {
2280 edge cur_edge;
2281 int fallthru_p = FALSE;
2282 int jump_p = FALSE;
2283 rtx insn;
2284 rtx end;
2285 int n_insns = 0;
2286
2287 if (!cur_bb || !target_bb)
2288 return -1;
2289
2290 /* If no edges, obviously it doesn't jump or fallthru. */
2291 if (cur_bb->succ == NULL_EDGE)
2292 return FALSE;
2293
2294 for (cur_edge = cur_bb->succ;
2295 cur_edge != NULL_EDGE;
2296 cur_edge = cur_edge->succ_next)
2297 {
2298 if (cur_edge->flags & EDGE_COMPLEX)
2299 /* Anything complex isn't what we want. */
2300 return -1;
2301
2302 else if (cur_edge->flags & EDGE_FALLTHRU)
2303 fallthru_p = TRUE;
2304
2305 else if (cur_edge->dest == target_bb)
2306 jump_p = TRUE;
2307
2308 else
2309 return -1;
2310 }
2311
2312 if ((jump_p & fallthru_p) == 0)
2313 return -1;
2314
2315 /* Don't allow calls in the block, since this is used to group && and ||
2316 together for conditional execution support. ??? we should support
2317 conditional execution support across calls for IA-64 some day, but
2318 for now it makes the code simpler. */
2319 end = BB_END (cur_bb);
2320 insn = BB_HEAD (cur_bb);
2321
2322 while (insn != NULL_RTX)
2323 {
2324 if (GET_CODE (insn) == CALL_INSN)
2325 return -1;
2326
2327 if (INSN_P (insn)
2328 && GET_CODE (insn) != JUMP_INSN
2329 && GET_CODE (PATTERN (insn)) != USE
2330 && GET_CODE (PATTERN (insn)) != CLOBBER)
2331 n_insns++;
2332
2333 if (insn == end)
2334 break;
2335
2336 insn = NEXT_INSN (insn);
2337 }
2338
2339 return n_insns;
2340 }
2341
2342 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2343 block. If so, we'll try to convert the insns to not require the branch.
2344 Return TRUE if we were successful at converting the block. */
2345
2346 static int
2347 find_if_block (struct ce_if_block * ce_info)
2348 {
2349 basic_block test_bb = ce_info->test_bb;
2350 basic_block then_bb = ce_info->then_bb;
2351 basic_block else_bb = ce_info->else_bb;
2352 basic_block join_bb = NULL_BLOCK;
2353 edge then_succ = then_bb->succ;
2354 edge else_succ = else_bb->succ;
2355 int then_predecessors;
2356 int else_predecessors;
2357 edge cur_edge;
2358 basic_block next;
2359
2360 ce_info->last_test_bb = test_bb;
2361
2362 /* Discover if any fall through predecessors of the current test basic block
2363 were && tests (which jump to the else block) or || tests (which jump to
2364 the then block). */
2365 if (HAVE_conditional_execution && reload_completed
2366 && test_bb->pred != NULL_EDGE
2367 && test_bb->pred->pred_next == NULL_EDGE
2368 && test_bb->pred->flags == EDGE_FALLTHRU)
2369 {
2370 basic_block bb = test_bb->pred->src;
2371 basic_block target_bb;
2372 int max_insns = MAX_CONDITIONAL_EXECUTE;
2373 int n_insns;
2374
2375 /* Determine if the preceding block is an && or || block. */
2376 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2377 {
2378 ce_info->and_and_p = TRUE;
2379 target_bb = else_bb;
2380 }
2381 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2382 {
2383 ce_info->and_and_p = FALSE;
2384 target_bb = then_bb;
2385 }
2386 else
2387 target_bb = NULL_BLOCK;
2388
2389 if (target_bb && n_insns <= max_insns)
2390 {
2391 int total_insns = 0;
2392 int blocks = 0;
2393
2394 ce_info->last_test_bb = test_bb;
2395
2396 /* Found at least one && or || block, look for more. */
2397 do
2398 {
2399 ce_info->test_bb = test_bb = bb;
2400 total_insns += n_insns;
2401 blocks++;
2402
2403 if (bb->pred == NULL_EDGE || bb->pred->pred_next != NULL_EDGE)
2404 break;
2405
2406 bb = bb->pred->src;
2407 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2408 }
2409 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2410
2411 ce_info->num_multiple_test_blocks = blocks;
2412 ce_info->num_multiple_test_insns = total_insns;
2413
2414 if (ce_info->and_and_p)
2415 ce_info->num_and_and_blocks = blocks;
2416 else
2417 ce_info->num_or_or_blocks = blocks;
2418 }
2419 }
2420
2421 /* Count the number of edges the THEN and ELSE blocks have. */
2422 then_predecessors = 0;
2423 for (cur_edge = then_bb->pred;
2424 cur_edge != NULL_EDGE;
2425 cur_edge = cur_edge->pred_next)
2426 {
2427 then_predecessors++;
2428 if (cur_edge->flags & EDGE_COMPLEX)
2429 return FALSE;
2430 }
2431
2432 else_predecessors = 0;
2433 for (cur_edge = else_bb->pred;
2434 cur_edge != NULL_EDGE;
2435 cur_edge = cur_edge->pred_next)
2436 {
2437 else_predecessors++;
2438 if (cur_edge->flags & EDGE_COMPLEX)
2439 return FALSE;
2440 }
2441
2442 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2443 other than any || blocks which jump to the THEN block. */
2444 if ((then_predecessors - ce_info->num_or_or_blocks) != 1)
2445 return FALSE;
2446
2447 /* The THEN block of an IF-THEN combo must have zero or one successors. */
2448 if (then_succ != NULL_EDGE
2449 && (then_succ->succ_next != NULL_EDGE
2450 || (then_succ->flags & EDGE_COMPLEX)
2451 || (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
2452 return FALSE;
2453
2454 /* If the THEN block has no successors, conditional execution can still
2455 make a conditional call. Don't do this unless the ELSE block has
2456 only one incoming edge -- the CFG manipulation is too ugly otherwise.
2457 Check for the last insn of the THEN block being an indirect jump, which
2458 is listed as not having any successors, but confuses the rest of the CE
2459 code processing. ??? we should fix this in the future. */
2460 if (then_succ == NULL)
2461 {
2462 if (else_bb->pred->pred_next == NULL_EDGE)
2463 {
2464 rtx last_insn = BB_END (then_bb);
2465
2466 while (last_insn
2467 && GET_CODE (last_insn) == NOTE
2468 && last_insn != BB_HEAD (then_bb))
2469 last_insn = PREV_INSN (last_insn);
2470
2471 if (last_insn
2472 && GET_CODE (last_insn) == JUMP_INSN
2473 && ! simplejump_p (last_insn))
2474 return FALSE;
2475
2476 join_bb = else_bb;
2477 else_bb = NULL_BLOCK;
2478 }
2479 else
2480 return FALSE;
2481 }
2482
2483 /* If the THEN block's successor is the other edge out of the TEST block,
2484 then we have an IF-THEN combo without an ELSE. */
2485 else if (then_succ->dest == else_bb)
2486 {
2487 join_bb = else_bb;
2488 else_bb = NULL_BLOCK;
2489 }
2490
2491 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2492 has exactly one predecessor and one successor, and the outgoing edge
2493 is not complex, then we have an IF-THEN-ELSE combo. */
2494 else if (else_succ != NULL_EDGE
2495 && then_succ->dest == else_succ->dest
2496 && else_bb->pred->pred_next == NULL_EDGE
2497 && else_succ->succ_next == NULL_EDGE
2498 && ! (else_succ->flags & EDGE_COMPLEX)
2499 && ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
2500 join_bb = else_succ->dest;
2501
2502 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2503 else
2504 return FALSE;
2505
2506 num_possible_if_blocks++;
2507
2508 if (rtl_dump_file)
2509 {
2510 fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
2511 (else_bb) ? "-ELSE" : "",
2512 ce_info->pass,
2513 test_bb->index, (BB_HEAD (test_bb)) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
2514 then_bb->index, (BB_HEAD (then_bb)) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
2515
2516 if (else_bb)
2517 fprintf (rtl_dump_file, ", else %d [%d]",
2518 else_bb->index, (BB_HEAD (else_bb)) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
2519
2520 fprintf (rtl_dump_file, ", join %d [%d]",
2521 join_bb->index, (BB_HEAD (join_bb)) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
2522
2523 if (ce_info->num_multiple_test_blocks > 0)
2524 fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
2525 ce_info->num_multiple_test_blocks,
2526 (ce_info->and_and_p) ? "&&" : "||",
2527 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2528 ce_info->last_test_bb->index,
2529 ((BB_HEAD (ce_info->last_test_bb))
2530 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
2531 : -1));
2532
2533 fputc ('\n', rtl_dump_file);
2534 }
2535
2536 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
2537 first condition for free, since we've already asserted that there's a
2538 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
2539 we checked the FALLTHRU flag, those are already adjacent to the last IF
2540 block. */
2541 /* ??? As an enhancement, move the ELSE block. Have to deal with
2542 BLOCK notes, if by no other means than aborting the merge if they
2543 exist. Sticky enough I don't want to think about it now. */
2544 next = then_bb;
2545 if (else_bb && (next = next->next_bb) != else_bb)
2546 return FALSE;
2547 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2548 {
2549 if (else_bb)
2550 join_bb = NULL;
2551 else
2552 return FALSE;
2553 }
2554
2555 /* Do the real work. */
2556 ce_info->else_bb = else_bb;
2557 ce_info->join_bb = join_bb;
2558
2559 return process_if_block (ce_info);
2560 }
2561
2562 /* Convert a branch over a trap, or a branch
2563 to a trap, into a conditional trap. */
2564
2565 static int
2566 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2567 {
2568 basic_block then_bb = then_edge->dest;
2569 basic_block else_bb = else_edge->dest;
2570 basic_block other_bb, trap_bb;
2571 rtx trap, jump, cond, cond_earliest, seq;
2572 enum rtx_code code;
2573
2574 /* Locate the block with the trap instruction. */
2575 /* ??? While we look for no successors, we really ought to allow
2576 EH successors. Need to fix merge_if_block for that to work. */
2577 if ((trap = block_has_only_trap (then_bb)) != NULL)
2578 trap_bb = then_bb, other_bb = else_bb;
2579 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2580 trap_bb = else_bb, other_bb = then_bb;
2581 else
2582 return FALSE;
2583
2584 if (rtl_dump_file)
2585 {
2586 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2587 test_bb->index, trap_bb->index);
2588 }
2589
2590 /* If this is not a standard conditional jump, we can't parse it. */
2591 jump = BB_END (test_bb);
2592 cond = noce_get_condition (jump, &cond_earliest);
2593 if (! cond)
2594 return FALSE;
2595
2596 /* If the conditional jump is more than just a conditional jump, then
2597 we can not do if-conversion on this block. */
2598 if (! onlyjump_p (jump))
2599 return FALSE;
2600
2601 /* We must be comparing objects whose modes imply the size. */
2602 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2603 return FALSE;
2604
2605 /* Reverse the comparison code, if necessary. */
2606 code = GET_CODE (cond);
2607 if (then_bb == trap_bb)
2608 {
2609 code = reversed_comparison_code (cond, jump);
2610 if (code == UNKNOWN)
2611 return FALSE;
2612 }
2613
2614 /* Attempt to generate the conditional trap. */
2615 seq = gen_cond_trap (code, XEXP (cond, 0),
2616 XEXP (cond, 1),
2617 TRAP_CODE (PATTERN (trap)));
2618 if (seq == NULL)
2619 return FALSE;
2620
2621 num_true_changes++;
2622
2623 /* Emit the new insns before cond_earliest. */
2624 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2625
2626 /* Delete the trap block if possible. */
2627 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2628 if (trap_bb->pred == NULL)
2629 {
2630 if (post_dominators)
2631 delete_from_dominance_info (post_dominators, trap_bb);
2632 delete_block (trap_bb);
2633 }
2634
2635 /* If the non-trap block and the test are now adjacent, merge them.
2636 Otherwise we must insert a direct branch. */
2637 if (test_bb->next_bb == other_bb)
2638 {
2639 struct ce_if_block new_ce_info;
2640 delete_insn (jump);
2641 memset (&new_ce_info, '\0', sizeof (new_ce_info));
2642 new_ce_info.test_bb = test_bb;
2643 new_ce_info.then_bb = NULL;
2644 new_ce_info.else_bb = NULL;
2645 new_ce_info.join_bb = other_bb;
2646 merge_if_block (&new_ce_info);
2647 }
2648 else
2649 {
2650 rtx lab, newjump;
2651
2652 lab = JUMP_LABEL (jump);
2653 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2654 LABEL_NUSES (lab) += 1;
2655 JUMP_LABEL (newjump) = lab;
2656 emit_barrier_after (newjump);
2657
2658 delete_insn (jump);
2659 }
2660
2661 return TRUE;
2662 }
2663
2664 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2665 return it. */
2666
2667 static rtx
2668 block_has_only_trap (basic_block bb)
2669 {
2670 rtx trap;
2671
2672 /* We're not the exit block. */
2673 if (bb == EXIT_BLOCK_PTR)
2674 return NULL_RTX;
2675
2676 /* The block must have no successors. */
2677 if (bb->succ)
2678 return NULL_RTX;
2679
2680 /* The only instruction in the THEN block must be the trap. */
2681 trap = first_active_insn (bb);
2682 if (! (trap == BB_END (bb)
2683 && GET_CODE (PATTERN (trap)) == TRAP_IF
2684 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2685 return NULL_RTX;
2686
2687 return trap;
2688 }
2689
2690 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2691 transformable, but not necessarily the other. There need be no
2692 JOIN block.
2693
2694 Return TRUE if we were successful at converting the block.
2695
2696 Cases we'd like to look at:
2697
2698 (1)
2699 if (test) goto over; // x not live
2700 x = a;
2701 goto label;
2702 over:
2703
2704 becomes
2705
2706 x = a;
2707 if (! test) goto label;
2708
2709 (2)
2710 if (test) goto E; // x not live
2711 x = big();
2712 goto L;
2713 E:
2714 x = b;
2715 goto M;
2716
2717 becomes
2718
2719 x = b;
2720 if (test) goto M;
2721 x = big();
2722 goto L;
2723
2724 (3) // This one's really only interesting for targets that can do
2725 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2726 // it results in multiple branches on a cache line, which often
2727 // does not sit well with predictors.
2728
2729 if (test1) goto E; // predicted not taken
2730 x = a;
2731 if (test2) goto F;
2732 ...
2733 E:
2734 x = b;
2735 J:
2736
2737 becomes
2738
2739 x = a;
2740 if (test1) goto E;
2741 if (test2) goto F;
2742
2743 Notes:
2744
2745 (A) Don't do (2) if the branch is predicted against the block we're
2746 eliminating. Do it anyway if we can eliminate a branch; this requires
2747 that the sole successor of the eliminated block postdominate the other
2748 side of the if.
2749
2750 (B) With CE, on (3) we can steal from both sides of the if, creating
2751
2752 if (test1) x = a;
2753 if (!test1) x = b;
2754 if (test1) goto J;
2755 if (test2) goto F;
2756 ...
2757 J:
2758
2759 Again, this is most useful if J postdominates.
2760
2761 (C) CE substitutes for helpful life information.
2762
2763 (D) These heuristics need a lot of work. */
2764
2765 /* Tests for case 1 above. */
2766
2767 static int
2768 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
2769 {
2770 basic_block then_bb = then_edge->dest;
2771 basic_block else_bb = else_edge->dest, new_bb;
2772 edge then_succ = then_bb->succ;
2773 int then_bb_index;
2774
2775 /* THEN has one successor. */
2776 if (!then_succ || then_succ->succ_next != NULL)
2777 return FALSE;
2778
2779 /* THEN does not fall through, but is not strange either. */
2780 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2781 return FALSE;
2782
2783 /* THEN has one predecessor. */
2784 if (then_bb->pred->pred_next != NULL)
2785 return FALSE;
2786
2787 /* THEN must do something. */
2788 if (forwarder_block_p (then_bb))
2789 return FALSE;
2790
2791 num_possible_if_blocks++;
2792 if (rtl_dump_file)
2793 fprintf (rtl_dump_file,
2794 "\nIF-CASE-1 found, start %d, then %d\n",
2795 test_bb->index, then_bb->index);
2796
2797 /* THEN is small. */
2798 if (count_bb_insns (then_bb) > BRANCH_COST)
2799 return FALSE;
2800
2801 /* Registers set are dead, or are predicable. */
2802 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2803 then_bb->succ->dest, 1))
2804 return FALSE;
2805
2806 /* Conversion went ok, including moving the insns and fixing up the
2807 jump. Adjust the CFG to match. */
2808
2809 bitmap_operation (test_bb->global_live_at_end,
2810 else_bb->global_live_at_start,
2811 then_bb->global_live_at_end, BITMAP_IOR);
2812
2813 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2814 then_bb_index = then_bb->index;
2815 if (post_dominators)
2816 delete_from_dominance_info (post_dominators, then_bb);
2817 delete_block (then_bb);
2818
2819 /* Make rest of code believe that the newly created block is the THEN_BB
2820 block we removed. */
2821 if (new_bb)
2822 {
2823 new_bb->index = then_bb_index;
2824 BASIC_BLOCK (then_bb_index) = new_bb;
2825 if (post_dominators)
2826 add_to_dominance_info (post_dominators, new_bb);
2827 }
2828 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2829 later. */
2830
2831 num_true_changes++;
2832 num_updated_if_blocks++;
2833
2834 return TRUE;
2835 }
2836
2837 /* Test for case 2 above. */
2838
2839 static int
2840 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
2841 {
2842 basic_block then_bb = then_edge->dest;
2843 basic_block else_bb = else_edge->dest;
2844 edge else_succ = else_bb->succ;
2845 rtx note;
2846
2847 /* ELSE has one successor. */
2848 if (!else_succ || else_succ->succ_next != NULL)
2849 return FALSE;
2850
2851 /* ELSE outgoing edge is not complex. */
2852 if (else_succ->flags & EDGE_COMPLEX)
2853 return FALSE;
2854
2855 /* ELSE has one predecessor. */
2856 if (else_bb->pred->pred_next != NULL)
2857 return FALSE;
2858
2859 /* THEN is not EXIT. */
2860 if (then_bb->index < 0)
2861 return FALSE;
2862
2863 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2864 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
2865 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2866 ;
2867 else if (else_succ->dest->index < 0
2868 || dominated_by_p (post_dominators, then_bb,
2869 else_succ->dest))
2870 ;
2871 else
2872 return FALSE;
2873
2874 num_possible_if_blocks++;
2875 if (rtl_dump_file)
2876 fprintf (rtl_dump_file,
2877 "\nIF-CASE-2 found, start %d, else %d\n",
2878 test_bb->index, else_bb->index);
2879
2880 /* ELSE is small. */
2881 if (count_bb_insns (else_bb) > BRANCH_COST)
2882 return FALSE;
2883
2884 /* Registers set are dead, or are predicable. */
2885 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2886 return FALSE;
2887
2888 /* Conversion went ok, including moving the insns and fixing up the
2889 jump. Adjust the CFG to match. */
2890
2891 bitmap_operation (test_bb->global_live_at_end,
2892 then_bb->global_live_at_start,
2893 else_bb->global_live_at_end, BITMAP_IOR);
2894
2895 if (post_dominators)
2896 delete_from_dominance_info (post_dominators, else_bb);
2897 delete_block (else_bb);
2898
2899 num_true_changes++;
2900 num_updated_if_blocks++;
2901
2902 /* ??? We may now fallthru from one of THEN's successors into a join
2903 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2904
2905 return TRUE;
2906 }
2907
2908 /* A subroutine of dead_or_predicable called through for_each_rtx.
2909 Return 1 if a memory is found. */
2910
2911 static int
2912 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
2913 {
2914 return GET_CODE (*px) == MEM;
2915 }
2916
2917 /* Used by the code above to perform the actual rtl transformations.
2918 Return TRUE if successful.
2919
2920 TEST_BB is the block containing the conditional branch. MERGE_BB
2921 is the block containing the code to manipulate. NEW_DEST is the
2922 label TEST_BB should be branching to after the conversion.
2923 REVERSEP is true if the sense of the branch should be reversed. */
2924
2925 static int
2926 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
2927 basic_block other_bb, basic_block new_dest, int reversep)
2928 {
2929 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2930
2931 jump = BB_END (test_bb);
2932
2933 /* Find the extent of the real code in the merge block. */
2934 head = BB_HEAD (merge_bb);
2935 end = BB_END (merge_bb);
2936
2937 if (GET_CODE (head) == CODE_LABEL)
2938 head = NEXT_INSN (head);
2939 if (GET_CODE (head) == NOTE)
2940 {
2941 if (head == end)
2942 {
2943 head = end = NULL_RTX;
2944 goto no_body;
2945 }
2946 head = NEXT_INSN (head);
2947 }
2948
2949 if (GET_CODE (end) == JUMP_INSN)
2950 {
2951 if (head == end)
2952 {
2953 head = end = NULL_RTX;
2954 goto no_body;
2955 }
2956 end = PREV_INSN (end);
2957 }
2958
2959 /* Disable handling dead code by conditional execution if the machine needs
2960 to do anything funny with the tests, etc. */
2961 #ifndef IFCVT_MODIFY_TESTS
2962 if (HAVE_conditional_execution)
2963 {
2964 /* In the conditional execution case, we have things easy. We know
2965 the condition is reversible. We don't have to check life info
2966 because we're going to conditionally execute the code anyway.
2967 All that's left is making sure the insns involved can actually
2968 be predicated. */
2969
2970 rtx cond, prob_val;
2971
2972 cond = cond_exec_get_condition (jump);
2973 if (! cond)
2974 return FALSE;
2975
2976 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2977 if (prob_val)
2978 prob_val = XEXP (prob_val, 0);
2979
2980 if (reversep)
2981 {
2982 enum rtx_code rev = reversed_comparison_code (cond, jump);
2983 if (rev == UNKNOWN)
2984 return FALSE;
2985 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2986 XEXP (cond, 1));
2987 if (prob_val)
2988 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2989 }
2990
2991 if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
2992 prob_val, 0))
2993 goto cancel;
2994
2995 earliest = jump;
2996 }
2997 else
2998 #endif
2999 {
3000 /* In the non-conditional execution case, we have to verify that there
3001 are no trapping operations, no calls, no references to memory, and
3002 that any registers modified are dead at the branch site. */
3003
3004 rtx insn, cond, prev;
3005 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
3006 regset merge_set, tmp, test_live, test_set;
3007 struct propagate_block_info *pbi;
3008 int i, fail = 0;
3009
3010 /* Check for no calls or trapping operations. */
3011 for (insn = head; ; insn = NEXT_INSN (insn))
3012 {
3013 if (GET_CODE (insn) == CALL_INSN)
3014 return FALSE;
3015 if (INSN_P (insn))
3016 {
3017 if (may_trap_p (PATTERN (insn)))
3018 return FALSE;
3019
3020 /* ??? Even non-trapping memories such as stack frame
3021 references must be avoided. For stores, we collect
3022 no lifetime info; for reads, we'd have to assert
3023 true_dependence false against every store in the
3024 TEST range. */
3025 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3026 return FALSE;
3027 }
3028 if (insn == end)
3029 break;
3030 }
3031
3032 if (! any_condjump_p (jump))
3033 return FALSE;
3034
3035 /* Find the extent of the conditional. */
3036 cond = noce_get_condition (jump, &earliest);
3037 if (! cond)
3038 return FALSE;
3039
3040 /* Collect:
3041 MERGE_SET = set of registers set in MERGE_BB
3042 TEST_LIVE = set of registers live at EARLIEST
3043 TEST_SET = set of registers set between EARLIEST and the
3044 end of the block. */
3045
3046 tmp = INITIALIZE_REG_SET (tmp_head);
3047 merge_set = INITIALIZE_REG_SET (merge_set_head);
3048 test_live = INITIALIZE_REG_SET (test_live_head);
3049 test_set = INITIALIZE_REG_SET (test_set_head);
3050
3051 /* ??? bb->local_set is only valid during calculate_global_regs_live,
3052 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
3053 since we've already asserted that MERGE_BB is small. */
3054 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
3055
3056 /* For small register class machines, don't lengthen lifetimes of
3057 hard registers before reload. */
3058 if (SMALL_REGISTER_CLASSES && ! reload_completed)
3059 {
3060 EXECUTE_IF_SET_IN_BITMAP
3061 (merge_set, 0, i,
3062 {
3063 if (i < FIRST_PSEUDO_REGISTER
3064 && ! fixed_regs[i]
3065 && ! global_regs[i])
3066 fail = 1;
3067 });
3068 }
3069
3070 /* For TEST, we're interested in a range of insns, not a whole block.
3071 Moreover, we're interested in the insns live from OTHER_BB. */
3072
3073 COPY_REG_SET (test_live, other_bb->global_live_at_start);
3074 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3075 0);
3076
3077 for (insn = jump; ; insn = prev)
3078 {
3079 prev = propagate_one_insn (pbi, insn);
3080 if (insn == earliest)
3081 break;
3082 }
3083
3084 free_propagate_block_info (pbi);
3085
3086 /* We can perform the transformation if
3087 MERGE_SET & (TEST_SET | TEST_LIVE)
3088 and
3089 TEST_SET & merge_bb->global_live_at_start
3090 are empty. */
3091
3092 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
3093 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
3094 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3095
3096 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
3097 BITMAP_AND);
3098 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
3099
3100 FREE_REG_SET (tmp);
3101 FREE_REG_SET (merge_set);
3102 FREE_REG_SET (test_live);
3103 FREE_REG_SET (test_set);
3104
3105 if (fail)
3106 return FALSE;
3107 }
3108
3109 no_body:
3110 /* We don't want to use normal invert_jump or redirect_jump because
3111 we don't want to delete_insn called. Also, we want to do our own
3112 change group management. */
3113
3114 old_dest = JUMP_LABEL (jump);
3115 if (other_bb != new_dest)
3116 {
3117 new_label = block_label (new_dest);
3118 if (reversep
3119 ? ! invert_jump_1 (jump, new_label)
3120 : ! redirect_jump_1 (jump, new_label))
3121 goto cancel;
3122 }
3123
3124 if (! apply_change_group ())
3125 return FALSE;
3126
3127 if (other_bb != new_dest)
3128 {
3129 if (old_dest)
3130 LABEL_NUSES (old_dest) -= 1;
3131 if (new_label)
3132 LABEL_NUSES (new_label) += 1;
3133 JUMP_LABEL (jump) = new_label;
3134 if (reversep)
3135 invert_br_probabilities (jump);
3136
3137 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3138 if (reversep)
3139 {
3140 gcov_type count, probability;
3141 count = BRANCH_EDGE (test_bb)->count;
3142 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3143 FALLTHRU_EDGE (test_bb)->count = count;
3144 probability = BRANCH_EDGE (test_bb)->probability;
3145 BRANCH_EDGE (test_bb)->probability
3146 = FALLTHRU_EDGE (test_bb)->probability;
3147 FALLTHRU_EDGE (test_bb)->probability = probability;
3148 update_br_prob_note (test_bb);
3149 }
3150 }
3151
3152 /* Move the insns out of MERGE_BB to before the branch. */
3153 if (head != NULL)
3154 {
3155 if (end == BB_END (merge_bb))
3156 BB_END (merge_bb) = PREV_INSN (head);
3157
3158 if (squeeze_notes (&head, &end))
3159 return TRUE;
3160
3161 reorder_insns (head, end, PREV_INSN (earliest));
3162 }
3163
3164 /* Remove the jump and edge if we can. */
3165 if (other_bb == new_dest)
3166 {
3167 delete_insn (jump);
3168 remove_edge (BRANCH_EDGE (test_bb));
3169 /* ??? Can't merge blocks here, as then_bb is still in use.
3170 At minimum, the merge will get done just before bb-reorder. */
3171 }
3172
3173 return TRUE;
3174
3175 cancel:
3176 cancel_changes (0);
3177 return FALSE;
3178 }
3179 \f
3180 /* Main entry point for all if-conversion. */
3181
3182 void
3183 if_convert (int x_life_data_ok)
3184 {
3185 basic_block bb;
3186 int pass;
3187
3188 num_possible_if_blocks = 0;
3189 num_updated_if_blocks = 0;
3190 num_true_changes = 0;
3191 life_data_ok = (x_life_data_ok != 0);
3192
3193 if (! (* targetm.cannot_modify_jumps_p) ())
3194 mark_loop_exit_edges ();
3195
3196 /* Free up basic_block_for_insn so that we don't have to keep it
3197 up to date, either here or in merge_blocks. */
3198 free_basic_block_vars (1);
3199
3200 /* Compute postdominators if we think we'll use them. */
3201 post_dominators = NULL;
3202 if (HAVE_conditional_execution || life_data_ok)
3203 {
3204 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
3205 }
3206 if (life_data_ok)
3207 clear_bb_flags ();
3208
3209 /* Go through each of the basic blocks looking for things to convert. If we
3210 have conditional execution, we make multiple passes to allow us to handle
3211 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
3212 pass = 0;
3213 do
3214 {
3215 cond_exec_changed_p = FALSE;
3216 pass++;
3217
3218 #ifdef IFCVT_MULTIPLE_DUMPS
3219 if (rtl_dump_file && pass > 1)
3220 fprintf (rtl_dump_file, "\n\n========== Pass %d ==========\n", pass);
3221 #endif
3222
3223 FOR_EACH_BB (bb)
3224 {
3225 basic_block new_bb;
3226 while ((new_bb = find_if_header (bb, pass)))
3227 bb = new_bb;
3228 }
3229
3230 #ifdef IFCVT_MULTIPLE_DUMPS
3231 if (rtl_dump_file && cond_exec_changed_p)
3232 print_rtl_with_bb (rtl_dump_file, get_insns ());
3233 #endif
3234 }
3235 while (cond_exec_changed_p);
3236
3237 #ifdef IFCVT_MULTIPLE_DUMPS
3238 if (rtl_dump_file)
3239 fprintf (rtl_dump_file, "\n\n========== no more changes\n");
3240 #endif
3241
3242 if (post_dominators)
3243 free_dominance_info (post_dominators);
3244
3245 if (rtl_dump_file)
3246 fflush (rtl_dump_file);
3247
3248 clear_aux_for_blocks ();
3249
3250 /* Rebuild life info for basic blocks that require it. */
3251 if (num_true_changes && life_data_ok)
3252 {
3253 /* If we allocated new pseudos, we must resize the array for sched1. */
3254 if (max_regno < max_reg_num ())
3255 {
3256 max_regno = max_reg_num ();
3257 allocate_reg_info (max_regno, FALSE, FALSE);
3258 }
3259 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3260 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3261 | PROP_KILL_DEAD_CODE);
3262 }
3263
3264 /* Write the final stats. */
3265 if (rtl_dump_file && num_possible_if_blocks > 0)
3266 {
3267 fprintf (rtl_dump_file,
3268 "\n%d possible IF blocks searched.\n",
3269 num_possible_if_blocks);
3270 fprintf (rtl_dump_file,
3271 "%d IF blocks converted.\n",
3272 num_updated_if_blocks);
3273 fprintf (rtl_dump_file,
3274 "%d true changes made.\n\n\n",
3275 num_true_changes);
3276 }
3277
3278 #ifdef ENABLE_CHECKING
3279 verify_flow_info ();
3280 #endif
3281 }