ifcvt.c (noce_try_store_flag_mask): Check rtx cost.
[gcc.git] / gcc / ifcvt.c
1 /* If-conversion support.
2 Copyright (C) 2000-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hashtab.h"
28 #include "hash-set.h"
29 #include "vec.h"
30 #include "machmode.h"
31 #include "hard-reg-set.h"
32 #include "input.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "except.h"
38 #include "predict.h"
39 #include "dominance.h"
40 #include "cfg.h"
41 #include "cfgrtl.h"
42 #include "cfganal.h"
43 #include "cfgcleanup.h"
44 #include "basic-block.h"
45 #include "expr.h"
46 #include "output.h"
47 #include "insn-codes.h"
48 #include "optabs.h"
49 #include "diagnostic-core.h"
50 #include "tm_p.h"
51 #include "cfgloop.h"
52 #include "target.h"
53 #include "tree-pass.h"
54 #include "df.h"
55 #include "dbgcnt.h"
56 #include "shrink-wrap.h"
57 #include "ifcvt.h"
58
59 #ifndef HAVE_conditional_move
60 #define HAVE_conditional_move 0
61 #endif
62 #ifndef HAVE_incscc
63 #define HAVE_incscc 0
64 #endif
65 #ifndef HAVE_decscc
66 #define HAVE_decscc 0
67 #endif
68 #ifndef HAVE_trap
69 #define HAVE_trap 0
70 #endif
71
72 #ifndef MAX_CONDITIONAL_EXECUTE
73 #define MAX_CONDITIONAL_EXECUTE \
74 (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
75 + 1)
76 #endif
77
78 #define IFCVT_MULTIPLE_DUMPS 1
79
80 #define NULL_BLOCK ((basic_block) NULL)
81
82 /* True if after combine pass. */
83 static bool ifcvt_after_combine;
84
85 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
86 static int num_possible_if_blocks;
87
88 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
89 execution. */
90 static int num_updated_if_blocks;
91
92 /* # of changes made. */
93 static int num_true_changes;
94
95 /* Whether conditional execution changes were made. */
96 static int cond_exec_changed_p;
97
98 /* Forward references. */
99 static int count_bb_insns (const_basic_block);
100 static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
101 static rtx_insn *first_active_insn (basic_block);
102 static rtx_insn *last_active_insn (basic_block, int);
103 static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
104 static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
105 static basic_block block_fallthru (basic_block);
106 static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
107 int);
108 static rtx cond_exec_get_condition (rtx_insn *);
109 static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
110 static int noce_operand_ok (const_rtx);
111 static void merge_if_block (ce_if_block *);
112 static int find_cond_trap (basic_block, edge, edge);
113 static basic_block find_if_header (basic_block, int);
114 static int block_jumps_and_fallthru_p (basic_block, basic_block);
115 static int noce_find_if_block (basic_block, edge, edge, int);
116 static int cond_exec_find_if_block (ce_if_block *);
117 static int find_if_case_1 (basic_block, edge, edge);
118 static int find_if_case_2 (basic_block, edge, edge);
119 static int dead_or_predicable (basic_block, basic_block, basic_block,
120 edge, int);
121 static void noce_emit_move_insn (rtx, rtx);
122 static rtx_insn *block_has_only_trap (basic_block);
123 \f
124 /* Count the number of non-jump active insns in BB. */
125
126 static int
127 count_bb_insns (const_basic_block bb)
128 {
129 int count = 0;
130 rtx_insn *insn = BB_HEAD (bb);
131
132 while (1)
133 {
134 if (active_insn_p (insn) && !JUMP_P (insn))
135 count++;
136
137 if (insn == BB_END (bb))
138 break;
139 insn = NEXT_INSN (insn);
140 }
141
142 return count;
143 }
144
145 /* Determine whether the total insn_rtx_cost on non-jump insns in
146 basic block BB is less than MAX_COST. This function returns
147 false if the cost of any instruction could not be estimated.
148
149 The cost of the non-jump insns in BB is scaled by REG_BR_PROB_BASE
150 as those insns are being speculated. MAX_COST is scaled with SCALE
151 plus a small fudge factor. */
152
153 static bool
154 cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
155 {
156 int count = 0;
157 rtx_insn *insn = BB_HEAD (bb);
158 bool speed = optimize_bb_for_speed_p (bb);
159
160 /* Set scale to REG_BR_PROB_BASE to void the identical scaling
161 applied to insn_rtx_cost when optimizing for size. Only do
162 this after combine because if-conversion might interfere with
163 passes before combine.
164
165 Use optimize_function_for_speed_p instead of the pre-defined
166 variable speed to make sure it is set to same value for all
167 basic blocks in one if-conversion transformation. */
168 if (!optimize_function_for_speed_p (cfun) && ifcvt_after_combine)
169 scale = REG_BR_PROB_BASE;
170 /* Our branch probability/scaling factors are just estimates and don't
171 account for cases where we can get speculation for free and other
172 secondary benefits. So we fudge the scale factor to make speculating
173 appear a little more profitable when optimizing for performance. */
174 else
175 scale += REG_BR_PROB_BASE / 8;
176
177
178 max_cost *= scale;
179
180 while (1)
181 {
182 if (NONJUMP_INSN_P (insn))
183 {
184 int cost = insn_rtx_cost (PATTERN (insn), speed) * REG_BR_PROB_BASE;
185 if (cost == 0)
186 return false;
187
188 /* If this instruction is the load or set of a "stack" register,
189 such as a floating point register on x87, then the cost of
190 speculatively executing this insn may need to include
191 the additional cost of popping its result off of the
192 register stack. Unfortunately, correctly recognizing and
193 accounting for this additional overhead is tricky, so for
194 now we simply prohibit such speculative execution. */
195 #ifdef STACK_REGS
196 {
197 rtx set = single_set (insn);
198 if (set && STACK_REG_P (SET_DEST (set)))
199 return false;
200 }
201 #endif
202
203 count += cost;
204 if (count >= max_cost)
205 return false;
206 }
207 else if (CALL_P (insn))
208 return false;
209
210 if (insn == BB_END (bb))
211 break;
212 insn = NEXT_INSN (insn);
213 }
214
215 return true;
216 }
217
218 /* Return the first non-jump active insn in the basic block. */
219
220 static rtx_insn *
221 first_active_insn (basic_block bb)
222 {
223 rtx_insn *insn = BB_HEAD (bb);
224
225 if (LABEL_P (insn))
226 {
227 if (insn == BB_END (bb))
228 return NULL;
229 insn = NEXT_INSN (insn);
230 }
231
232 while (NOTE_P (insn) || DEBUG_INSN_P (insn))
233 {
234 if (insn == BB_END (bb))
235 return NULL;
236 insn = NEXT_INSN (insn);
237 }
238
239 if (JUMP_P (insn))
240 return NULL;
241
242 return insn;
243 }
244
245 /* Return the last non-jump active (non-jump) insn in the basic block. */
246
247 static rtx_insn *
248 last_active_insn (basic_block bb, int skip_use_p)
249 {
250 rtx_insn *insn = BB_END (bb);
251 rtx_insn *head = BB_HEAD (bb);
252
253 while (NOTE_P (insn)
254 || JUMP_P (insn)
255 || DEBUG_INSN_P (insn)
256 || (skip_use_p
257 && NONJUMP_INSN_P (insn)
258 && GET_CODE (PATTERN (insn)) == USE))
259 {
260 if (insn == head)
261 return NULL;
262 insn = PREV_INSN (insn);
263 }
264
265 if (LABEL_P (insn))
266 return NULL;
267
268 return insn;
269 }
270
271 /* Return the active insn before INSN inside basic block CURR_BB. */
272
273 static rtx_insn *
274 find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
275 {
276 if (!insn || insn == BB_HEAD (curr_bb))
277 return NULL;
278
279 while ((insn = PREV_INSN (insn)) != NULL_RTX)
280 {
281 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
282 break;
283
284 /* No other active insn all the way to the start of the basic block. */
285 if (insn == BB_HEAD (curr_bb))
286 return NULL;
287 }
288
289 return insn;
290 }
291
292 /* Return the active insn after INSN inside basic block CURR_BB. */
293
294 static rtx_insn *
295 find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
296 {
297 if (!insn || insn == BB_END (curr_bb))
298 return NULL;
299
300 while ((insn = NEXT_INSN (insn)) != NULL_RTX)
301 {
302 if (NONJUMP_INSN_P (insn) || JUMP_P (insn) || CALL_P (insn))
303 break;
304
305 /* No other active insn all the way to the end of the basic block. */
306 if (insn == BB_END (curr_bb))
307 return NULL;
308 }
309
310 return insn;
311 }
312
313 /* Return the basic block reached by falling though the basic block BB. */
314
315 static basic_block
316 block_fallthru (basic_block bb)
317 {
318 edge e = find_fallthru_edge (bb->succs);
319
320 return (e) ? e->dest : NULL_BLOCK;
321 }
322
323 /* Return true if RTXs A and B can be safely interchanged. */
324
325 static bool
326 rtx_interchangeable_p (const_rtx a, const_rtx b)
327 {
328 if (!rtx_equal_p (a, b))
329 return false;
330
331 if (GET_CODE (a) != MEM)
332 return true;
333
334 /* A dead type-unsafe memory reference is legal, but a live type-unsafe memory
335 reference is not. Interchanging a dead type-unsafe memory reference with
336 a live type-safe one creates a live type-unsafe memory reference, in other
337 words, it makes the program illegal.
338 We check here conservatively whether the two memory references have equal
339 memory attributes. */
340
341 return mem_attrs_eq_p (get_mem_attrs (a), get_mem_attrs (b));
342 }
343
344 \f
345 /* Go through a bunch of insns, converting them to conditional
346 execution format if possible. Return TRUE if all of the non-note
347 insns were processed. */
348
349 static int
350 cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
351 /* if block information */rtx_insn *start,
352 /* first insn to look at */rtx end,
353 /* last insn to look at */rtx test,
354 /* conditional execution test */int prob_val,
355 /* probability of branch taken. */int mod_ok)
356 {
357 int must_be_last = FALSE;
358 rtx_insn *insn;
359 rtx xtest;
360 rtx pattern;
361
362 if (!start || !end)
363 return FALSE;
364
365 for (insn = start; ; insn = NEXT_INSN (insn))
366 {
367 /* dwarf2out can't cope with conditional prologues. */
368 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
369 return FALSE;
370
371 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
372 goto insn_done;
373
374 gcc_assert (NONJUMP_INSN_P (insn) || CALL_P (insn));
375
376 /* dwarf2out can't cope with conditional unwind info. */
377 if (RTX_FRAME_RELATED_P (insn))
378 return FALSE;
379
380 /* Remove USE insns that get in the way. */
381 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
382 {
383 /* ??? Ug. Actually unlinking the thing is problematic,
384 given what we'd have to coordinate with our callers. */
385 SET_INSN_DELETED (insn);
386 goto insn_done;
387 }
388
389 /* Last insn wasn't last? */
390 if (must_be_last)
391 return FALSE;
392
393 if (modified_in_p (test, insn))
394 {
395 if (!mod_ok)
396 return FALSE;
397 must_be_last = TRUE;
398 }
399
400 /* Now build the conditional form of the instruction. */
401 pattern = PATTERN (insn);
402 xtest = copy_rtx (test);
403
404 /* If this is already a COND_EXEC, rewrite the test to be an AND of the
405 two conditions. */
406 if (GET_CODE (pattern) == COND_EXEC)
407 {
408 if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
409 return FALSE;
410
411 xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
412 COND_EXEC_TEST (pattern));
413 pattern = COND_EXEC_CODE (pattern);
414 }
415
416 pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
417
418 /* If the machine needs to modify the insn being conditionally executed,
419 say for example to force a constant integer operand into a temp
420 register, do so here. */
421 #ifdef IFCVT_MODIFY_INSN
422 IFCVT_MODIFY_INSN (ce_info, pattern, insn);
423 if (! pattern)
424 return FALSE;
425 #endif
426
427 validate_change (insn, &PATTERN (insn), pattern, 1);
428
429 if (CALL_P (insn) && prob_val >= 0)
430 validate_change (insn, &REG_NOTES (insn),
431 gen_rtx_INT_LIST ((machine_mode) REG_BR_PROB,
432 prob_val, REG_NOTES (insn)), 1);
433
434 insn_done:
435 if (insn == end)
436 break;
437 }
438
439 return TRUE;
440 }
441
442 /* Return the condition for a jump. Do not do any special processing. */
443
444 static rtx
445 cond_exec_get_condition (rtx_insn *jump)
446 {
447 rtx test_if, cond;
448
449 if (any_condjump_p (jump))
450 test_if = SET_SRC (pc_set (jump));
451 else
452 return NULL_RTX;
453 cond = XEXP (test_if, 0);
454
455 /* If this branches to JUMP_LABEL when the condition is false,
456 reverse the condition. */
457 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
458 && LABEL_REF_LABEL (XEXP (test_if, 2)) == JUMP_LABEL (jump))
459 {
460 enum rtx_code rev = reversed_comparison_code (cond, jump);
461 if (rev == UNKNOWN)
462 return NULL_RTX;
463
464 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
465 XEXP (cond, 1));
466 }
467
468 return cond;
469 }
470
471 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
472 to conditional execution. Return TRUE if we were successful at
473 converting the block. */
474
475 static int
476 cond_exec_process_if_block (ce_if_block * ce_info,
477 /* if block information */int do_multiple_p)
478 {
479 basic_block test_bb = ce_info->test_bb; /* last test block */
480 basic_block then_bb = ce_info->then_bb; /* THEN */
481 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
482 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
483 rtx_insn *then_start; /* first insn in THEN block */
484 rtx_insn *then_end; /* last insn + 1 in THEN block */
485 rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
486 rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
487 int max; /* max # of insns to convert. */
488 int then_mod_ok; /* whether conditional mods are ok in THEN */
489 rtx true_expr; /* test for else block insns */
490 rtx false_expr; /* test for then block insns */
491 int true_prob_val; /* probability of else block */
492 int false_prob_val; /* probability of then block */
493 rtx_insn *then_last_head = NULL; /* Last match at the head of THEN */
494 rtx_insn *else_last_head = NULL; /* Last match at the head of ELSE */
495 rtx_insn *then_first_tail = NULL; /* First match at the tail of THEN */
496 rtx_insn *else_first_tail = NULL; /* First match at the tail of ELSE */
497 int then_n_insns, else_n_insns, n_insns;
498 enum rtx_code false_code;
499 rtx note;
500
501 /* If test is comprised of && or || elements, and we've failed at handling
502 all of them together, just use the last test if it is the special case of
503 && elements without an ELSE block. */
504 if (!do_multiple_p && ce_info->num_multiple_test_blocks)
505 {
506 if (else_bb || ! ce_info->and_and_p)
507 return FALSE;
508
509 ce_info->test_bb = test_bb = ce_info->last_test_bb;
510 ce_info->num_multiple_test_blocks = 0;
511 ce_info->num_and_and_blocks = 0;
512 ce_info->num_or_or_blocks = 0;
513 }
514
515 /* Find the conditional jump to the ELSE or JOIN part, and isolate
516 the test. */
517 test_expr = cond_exec_get_condition (BB_END (test_bb));
518 if (! test_expr)
519 return FALSE;
520
521 /* If the conditional jump is more than just a conditional jump,
522 then we can not do conditional execution conversion on this block. */
523 if (! onlyjump_p (BB_END (test_bb)))
524 return FALSE;
525
526 /* Collect the bounds of where we're to search, skipping any labels, jumps
527 and notes at the beginning and end of the block. Then count the total
528 number of insns and see if it is small enough to convert. */
529 then_start = first_active_insn (then_bb);
530 then_end = last_active_insn (then_bb, TRUE);
531 then_n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
532 n_insns = then_n_insns;
533 max = MAX_CONDITIONAL_EXECUTE;
534
535 if (else_bb)
536 {
537 int n_matching;
538
539 max *= 2;
540 else_start = first_active_insn (else_bb);
541 else_end = last_active_insn (else_bb, TRUE);
542 else_n_insns = ce_info->num_else_insns = count_bb_insns (else_bb);
543 n_insns += else_n_insns;
544
545 /* Look for matching sequences at the head and tail of the two blocks,
546 and limit the range of insns to be converted if possible. */
547 n_matching = flow_find_cross_jump (then_bb, else_bb,
548 &then_first_tail, &else_first_tail,
549 NULL);
550 if (then_first_tail == BB_HEAD (then_bb))
551 then_start = then_end = NULL;
552 if (else_first_tail == BB_HEAD (else_bb))
553 else_start = else_end = NULL;
554
555 if (n_matching > 0)
556 {
557 if (then_end)
558 then_end = find_active_insn_before (then_bb, then_first_tail);
559 if (else_end)
560 else_end = find_active_insn_before (else_bb, else_first_tail);
561 n_insns -= 2 * n_matching;
562 }
563
564 if (then_start
565 && else_start
566 && then_n_insns > n_matching
567 && else_n_insns > n_matching)
568 {
569 int longest_match = MIN (then_n_insns - n_matching,
570 else_n_insns - n_matching);
571 n_matching
572 = flow_find_head_matching_sequence (then_bb, else_bb,
573 &then_last_head,
574 &else_last_head,
575 longest_match);
576
577 if (n_matching > 0)
578 {
579 rtx_insn *insn;
580
581 /* We won't pass the insns in the head sequence to
582 cond_exec_process_insns, so we need to test them here
583 to make sure that they don't clobber the condition. */
584 for (insn = BB_HEAD (then_bb);
585 insn != NEXT_INSN (then_last_head);
586 insn = NEXT_INSN (insn))
587 if (!LABEL_P (insn) && !NOTE_P (insn)
588 && !DEBUG_INSN_P (insn)
589 && modified_in_p (test_expr, insn))
590 return FALSE;
591 }
592
593 if (then_last_head == then_end)
594 then_start = then_end = NULL;
595 if (else_last_head == else_end)
596 else_start = else_end = NULL;
597
598 if (n_matching > 0)
599 {
600 if (then_start)
601 then_start = find_active_insn_after (then_bb, then_last_head);
602 if (else_start)
603 else_start = find_active_insn_after (else_bb, else_last_head);
604 n_insns -= 2 * n_matching;
605 }
606 }
607 }
608
609 if (n_insns > max)
610 return FALSE;
611
612 /* Map test_expr/test_jump into the appropriate MD tests to use on
613 the conditionally executed code. */
614
615 true_expr = test_expr;
616
617 false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
618 if (false_code != UNKNOWN)
619 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
620 XEXP (true_expr, 0), XEXP (true_expr, 1));
621 else
622 false_expr = NULL_RTX;
623
624 #ifdef IFCVT_MODIFY_TESTS
625 /* If the machine description needs to modify the tests, such as setting a
626 conditional execution register from a comparison, it can do so here. */
627 IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
628
629 /* See if the conversion failed. */
630 if (!true_expr || !false_expr)
631 goto fail;
632 #endif
633
634 note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
635 if (note)
636 {
637 true_prob_val = XINT (note, 0);
638 false_prob_val = REG_BR_PROB_BASE - true_prob_val;
639 }
640 else
641 {
642 true_prob_val = -1;
643 false_prob_val = -1;
644 }
645
646 /* If we have && or || tests, do them here. These tests are in the adjacent
647 blocks after the first block containing the test. */
648 if (ce_info->num_multiple_test_blocks > 0)
649 {
650 basic_block bb = test_bb;
651 basic_block last_test_bb = ce_info->last_test_bb;
652
653 if (! false_expr)
654 goto fail;
655
656 do
657 {
658 rtx_insn *start, *end;
659 rtx t, f;
660 enum rtx_code f_code;
661
662 bb = block_fallthru (bb);
663 start = first_active_insn (bb);
664 end = last_active_insn (bb, TRUE);
665 if (start
666 && ! cond_exec_process_insns (ce_info, start, end, false_expr,
667 false_prob_val, FALSE))
668 goto fail;
669
670 /* If the conditional jump is more than just a conditional jump, then
671 we can not do conditional execution conversion on this block. */
672 if (! onlyjump_p (BB_END (bb)))
673 goto fail;
674
675 /* Find the conditional jump and isolate the test. */
676 t = cond_exec_get_condition (BB_END (bb));
677 if (! t)
678 goto fail;
679
680 f_code = reversed_comparison_code (t, BB_END (bb));
681 if (f_code == UNKNOWN)
682 goto fail;
683
684 f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
685 if (ce_info->and_and_p)
686 {
687 t = gen_rtx_AND (GET_MODE (t), true_expr, t);
688 f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
689 }
690 else
691 {
692 t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
693 f = gen_rtx_AND (GET_MODE (t), false_expr, f);
694 }
695
696 /* If the machine description needs to modify the tests, such as
697 setting a conditional execution register from a comparison, it can
698 do so here. */
699 #ifdef IFCVT_MODIFY_MULTIPLE_TESTS
700 IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
701
702 /* See if the conversion failed. */
703 if (!t || !f)
704 goto fail;
705 #endif
706
707 true_expr = t;
708 false_expr = f;
709 }
710 while (bb != last_test_bb);
711 }
712
713 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
714 on then THEN block. */
715 then_mod_ok = (else_bb == NULL_BLOCK);
716
717 /* Go through the THEN and ELSE blocks converting the insns if possible
718 to conditional execution. */
719
720 if (then_end
721 && (! false_expr
722 || ! cond_exec_process_insns (ce_info, then_start, then_end,
723 false_expr, false_prob_val,
724 then_mod_ok)))
725 goto fail;
726
727 if (else_bb && else_end
728 && ! cond_exec_process_insns (ce_info, else_start, else_end,
729 true_expr, true_prob_val, TRUE))
730 goto fail;
731
732 /* If we cannot apply the changes, fail. Do not go through the normal fail
733 processing, since apply_change_group will call cancel_changes. */
734 if (! apply_change_group ())
735 {
736 #ifdef IFCVT_MODIFY_CANCEL
737 /* Cancel any machine dependent changes. */
738 IFCVT_MODIFY_CANCEL (ce_info);
739 #endif
740 return FALSE;
741 }
742
743 #ifdef IFCVT_MODIFY_FINAL
744 /* Do any machine dependent final modifications. */
745 IFCVT_MODIFY_FINAL (ce_info);
746 #endif
747
748 /* Conversion succeeded. */
749 if (dump_file)
750 fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
751 n_insns, (n_insns == 1) ? " was" : "s were");
752
753 /* Merge the blocks! If we had matching sequences, make sure to delete one
754 copy at the appropriate location first: delete the copy in the THEN branch
755 for a tail sequence so that the remaining one is executed last for both
756 branches, and delete the copy in the ELSE branch for a head sequence so
757 that the remaining one is executed first for both branches. */
758 if (then_first_tail)
759 {
760 rtx_insn *from = then_first_tail;
761 if (!INSN_P (from))
762 from = find_active_insn_after (then_bb, from);
763 delete_insn_chain (from, BB_END (then_bb), false);
764 }
765 if (else_last_head)
766 delete_insn_chain (first_active_insn (else_bb), else_last_head, false);
767
768 merge_if_block (ce_info);
769 cond_exec_changed_p = TRUE;
770 return TRUE;
771
772 fail:
773 #ifdef IFCVT_MODIFY_CANCEL
774 /* Cancel any machine dependent changes. */
775 IFCVT_MODIFY_CANCEL (ce_info);
776 #endif
777
778 cancel_changes (0);
779 return FALSE;
780 }
781 \f
782 /* Used by noce_process_if_block to communicate with its subroutines.
783
784 The subroutines know that A and B may be evaluated freely. They
785 know that X is a register. They should insert new instructions
786 before cond_earliest. */
787
788 struct noce_if_info
789 {
790 /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block. */
791 basic_block test_bb, then_bb, else_bb, join_bb;
792
793 /* The jump that ends TEST_BB. */
794 rtx_insn *jump;
795
796 /* The jump condition. */
797 rtx cond;
798
799 /* New insns should be inserted before this one. */
800 rtx_insn *cond_earliest;
801
802 /* Insns in the THEN and ELSE block. There is always just this
803 one insns in those blocks. The insns are single_set insns.
804 If there was no ELSE block, INSN_B is the last insn before
805 COND_EARLIEST, or NULL_RTX. In the former case, the insn
806 operands are still valid, as if INSN_B was moved down below
807 the jump. */
808 rtx_insn *insn_a, *insn_b;
809
810 /* The SET_SRC of INSN_A and INSN_B. */
811 rtx a, b;
812
813 /* The SET_DEST of INSN_A. */
814 rtx x;
815
816 /* True if this if block is not canonical. In the canonical form of
817 if blocks, the THEN_BB is the block reached via the fallthru edge
818 from TEST_BB. For the noce transformations, we allow the symmetric
819 form as well. */
820 bool then_else_reversed;
821
822 /* Estimated cost of the particular branch instruction. */
823 int branch_cost;
824 };
825
826 static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
827 static int noce_try_move (struct noce_if_info *);
828 static int noce_try_store_flag (struct noce_if_info *);
829 static int noce_try_addcc (struct noce_if_info *);
830 static int noce_try_store_flag_constants (struct noce_if_info *);
831 static int noce_try_store_flag_mask (struct noce_if_info *);
832 static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
833 rtx, rtx, rtx);
834 static int noce_try_cmove (struct noce_if_info *);
835 static int noce_try_cmove_arith (struct noce_if_info *);
836 static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx_insn **);
837 static int noce_try_minmax (struct noce_if_info *);
838 static int noce_try_abs (struct noce_if_info *);
839 static int noce_try_sign_mask (struct noce_if_info *);
840
841 /* Helper function for noce_try_store_flag*. */
842
843 static rtx
844 noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
845 int normalize)
846 {
847 rtx cond = if_info->cond;
848 int cond_complex;
849 enum rtx_code code;
850
851 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
852 || ! general_operand (XEXP (cond, 1), VOIDmode));
853
854 /* If earliest == jump, or when the condition is complex, try to
855 build the store_flag insn directly. */
856
857 if (cond_complex)
858 {
859 rtx set = pc_set (if_info->jump);
860 cond = XEXP (SET_SRC (set), 0);
861 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
862 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump))
863 reversep = !reversep;
864 if (if_info->then_else_reversed)
865 reversep = !reversep;
866 }
867
868 if (reversep)
869 code = reversed_comparison_code (cond, if_info->jump);
870 else
871 code = GET_CODE (cond);
872
873 if ((if_info->cond_earliest == if_info->jump || cond_complex)
874 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
875 {
876 rtx src = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
877 XEXP (cond, 1));
878 rtx set = gen_rtx_SET (VOIDmode, x, src);
879
880 start_sequence ();
881 rtx_insn *insn = emit_insn (set);
882
883 if (recog_memoized (insn) >= 0)
884 {
885 rtx_insn *seq = get_insns ();
886 end_sequence ();
887 emit_insn (seq);
888
889 if_info->cond_earliest = if_info->jump;
890
891 return x;
892 }
893
894 end_sequence ();
895 }
896
897 /* Don't even try if the comparison operands or the mode of X are weird. */
898 if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
899 return NULL_RTX;
900
901 return emit_store_flag (x, code, XEXP (cond, 0),
902 XEXP (cond, 1), VOIDmode,
903 (code == LTU || code == LEU
904 || code == GEU || code == GTU), normalize);
905 }
906
907 /* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
908 X is the destination/target and Y is the value to copy. */
909
910 static void
911 noce_emit_move_insn (rtx x, rtx y)
912 {
913 machine_mode outmode;
914 rtx outer, inner;
915 int bitpos;
916
917 if (GET_CODE (x) != STRICT_LOW_PART)
918 {
919 rtx_insn *seq, *insn;
920 rtx target;
921 optab ot;
922
923 start_sequence ();
924 /* Check that the SET_SRC is reasonable before calling emit_move_insn,
925 otherwise construct a suitable SET pattern ourselves. */
926 insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
927 ? emit_move_insn (x, y)
928 : emit_insn (gen_rtx_SET (VOIDmode, x, y));
929 seq = get_insns ();
930 end_sequence ();
931
932 if (recog_memoized (insn) <= 0)
933 {
934 if (GET_CODE (x) == ZERO_EXTRACT)
935 {
936 rtx op = XEXP (x, 0);
937 unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
938 unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
939
940 /* store_bit_field expects START to be relative to
941 BYTES_BIG_ENDIAN and adjusts this value for machines with
942 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN. In order to be able to
943 invoke store_bit_field again it is necessary to have the START
944 value from the first call. */
945 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
946 {
947 if (MEM_P (op))
948 start = BITS_PER_UNIT - start - size;
949 else
950 {
951 gcc_assert (REG_P (op));
952 start = BITS_PER_WORD - start - size;
953 }
954 }
955
956 gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
957 store_bit_field (op, size, start, 0, 0, GET_MODE (x), y);
958 return;
959 }
960
961 switch (GET_RTX_CLASS (GET_CODE (y)))
962 {
963 case RTX_UNARY:
964 ot = code_to_optab (GET_CODE (y));
965 if (ot)
966 {
967 start_sequence ();
968 target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
969 if (target != NULL_RTX)
970 {
971 if (target != x)
972 emit_move_insn (x, target);
973 seq = get_insns ();
974 }
975 end_sequence ();
976 }
977 break;
978
979 case RTX_BIN_ARITH:
980 case RTX_COMM_ARITH:
981 ot = code_to_optab (GET_CODE (y));
982 if (ot)
983 {
984 start_sequence ();
985 target = expand_binop (GET_MODE (y), ot,
986 XEXP (y, 0), XEXP (y, 1),
987 x, 0, OPTAB_DIRECT);
988 if (target != NULL_RTX)
989 {
990 if (target != x)
991 emit_move_insn (x, target);
992 seq = get_insns ();
993 }
994 end_sequence ();
995 }
996 break;
997
998 default:
999 break;
1000 }
1001 }
1002
1003 emit_insn (seq);
1004 return;
1005 }
1006
1007 outer = XEXP (x, 0);
1008 inner = XEXP (outer, 0);
1009 outmode = GET_MODE (outer);
1010 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
1011 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
1012 0, 0, outmode, y);
1013 }
1014
1015 /* Return sequence of instructions generated by if conversion. This
1016 function calls end_sequence() to end the current stream, ensures
1017 that are instructions are unshared, recognizable non-jump insns.
1018 On failure, this function returns a NULL_RTX. */
1019
1020 static rtx_insn *
1021 end_ifcvt_sequence (struct noce_if_info *if_info)
1022 {
1023 rtx_insn *insn;
1024 rtx_insn *seq = get_insns ();
1025
1026 set_used_flags (if_info->x);
1027 set_used_flags (if_info->cond);
1028 set_used_flags (if_info->a);
1029 set_used_flags (if_info->b);
1030 unshare_all_rtl_in_chain (seq);
1031 end_sequence ();
1032
1033 /* Make sure that all of the instructions emitted are recognizable,
1034 and that we haven't introduced a new jump instruction.
1035 As an exercise for the reader, build a general mechanism that
1036 allows proper placement of required clobbers. */
1037 for (insn = seq; insn; insn = NEXT_INSN (insn))
1038 if (JUMP_P (insn)
1039 || recog_memoized (insn) == -1)
1040 return NULL;
1041
1042 return seq;
1043 }
1044
1045 /* Convert "if (a != b) x = a; else x = b" into "x = a" and
1046 "if (a == b) x = a; else x = b" into "x = b". */
1047
1048 static int
1049 noce_try_move (struct noce_if_info *if_info)
1050 {
1051 rtx cond = if_info->cond;
1052 enum rtx_code code = GET_CODE (cond);
1053 rtx y;
1054 rtx_insn *seq;
1055
1056 if (code != NE && code != EQ)
1057 return FALSE;
1058
1059 /* This optimization isn't valid if either A or B could be a NaN
1060 or a signed zero. */
1061 if (HONOR_NANS (GET_MODE (if_info->x))
1062 || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
1063 return FALSE;
1064
1065 /* Check whether the operands of the comparison are A and in
1066 either order. */
1067 if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
1068 && rtx_equal_p (if_info->b, XEXP (cond, 1)))
1069 || (rtx_equal_p (if_info->a, XEXP (cond, 1))
1070 && rtx_equal_p (if_info->b, XEXP (cond, 0))))
1071 {
1072 if (!rtx_interchangeable_p (if_info->a, if_info->b))
1073 return FALSE;
1074
1075 y = (code == EQ) ? if_info->a : if_info->b;
1076
1077 /* Avoid generating the move if the source is the destination. */
1078 if (! rtx_equal_p (if_info->x, y))
1079 {
1080 start_sequence ();
1081 noce_emit_move_insn (if_info->x, y);
1082 seq = end_ifcvt_sequence (if_info);
1083 if (!seq)
1084 return FALSE;
1085
1086 emit_insn_before_setloc (seq, if_info->jump,
1087 INSN_LOCATION (if_info->insn_a));
1088 }
1089 return TRUE;
1090 }
1091 return FALSE;
1092 }
1093
1094 /* Convert "if (test) x = 1; else x = 0".
1095
1096 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
1097 tried in noce_try_store_flag_constants after noce_try_cmove has had
1098 a go at the conversion. */
1099
1100 static int
1101 noce_try_store_flag (struct noce_if_info *if_info)
1102 {
1103 int reversep;
1104 rtx target;
1105 rtx_insn *seq;
1106
1107 if (CONST_INT_P (if_info->b)
1108 && INTVAL (if_info->b) == STORE_FLAG_VALUE
1109 && if_info->a == const0_rtx)
1110 reversep = 0;
1111 else if (if_info->b == const0_rtx
1112 && CONST_INT_P (if_info->a)
1113 && INTVAL (if_info->a) == STORE_FLAG_VALUE
1114 && (reversed_comparison_code (if_info->cond, if_info->jump)
1115 != UNKNOWN))
1116 reversep = 1;
1117 else
1118 return FALSE;
1119
1120 start_sequence ();
1121
1122 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
1123 if (target)
1124 {
1125 if (target != if_info->x)
1126 noce_emit_move_insn (if_info->x, target);
1127
1128 seq = end_ifcvt_sequence (if_info);
1129 if (! seq)
1130 return FALSE;
1131
1132 emit_insn_before_setloc (seq, if_info->jump,
1133 INSN_LOCATION (if_info->insn_a));
1134 return TRUE;
1135 }
1136 else
1137 {
1138 end_sequence ();
1139 return FALSE;
1140 }
1141 }
1142
1143 /* Convert "if (test) x = a; else x = b", for A and B constant. */
1144
1145 static int
1146 noce_try_store_flag_constants (struct noce_if_info *if_info)
1147 {
1148 rtx target;
1149 rtx_insn *seq;
1150 int reversep;
1151 HOST_WIDE_INT itrue, ifalse, diff, tmp;
1152 int normalize, can_reverse;
1153 machine_mode mode;
1154
1155 if (CONST_INT_P (if_info->a)
1156 && CONST_INT_P (if_info->b))
1157 {
1158 mode = GET_MODE (if_info->x);
1159 ifalse = INTVAL (if_info->a);
1160 itrue = INTVAL (if_info->b);
1161
1162 diff = (unsigned HOST_WIDE_INT) itrue - ifalse;
1163 /* Make sure we can represent the difference between the two values. */
1164 if ((diff > 0)
1165 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
1166 return FALSE;
1167
1168 diff = trunc_int_for_mode (diff, mode);
1169
1170 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
1171 != UNKNOWN);
1172
1173 reversep = 0;
1174 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1175 normalize = 0;
1176 else if (ifalse == 0 && exact_log2 (itrue) >= 0
1177 && (STORE_FLAG_VALUE == 1
1178 || if_info->branch_cost >= 2))
1179 normalize = 1;
1180 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
1181 && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
1182 normalize = 1, reversep = 1;
1183 else if (itrue == -1
1184 && (STORE_FLAG_VALUE == -1
1185 || if_info->branch_cost >= 2))
1186 normalize = -1;
1187 else if (ifalse == -1 && can_reverse
1188 && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
1189 normalize = -1, reversep = 1;
1190 else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
1191 || if_info->branch_cost >= 3)
1192 normalize = -1;
1193 else
1194 return FALSE;
1195
1196 if (reversep)
1197 {
1198 tmp = itrue; itrue = ifalse; ifalse = tmp;
1199 diff = trunc_int_for_mode (-(unsigned HOST_WIDE_INT) diff, mode);
1200 }
1201
1202 start_sequence ();
1203 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1204 if (! target)
1205 {
1206 end_sequence ();
1207 return FALSE;
1208 }
1209
1210 /* if (test) x = 3; else x = 4;
1211 => x = 3 + (test == 0); */
1212 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1213 {
1214 target = expand_simple_binop (mode,
1215 (diff == STORE_FLAG_VALUE
1216 ? PLUS : MINUS),
1217 gen_int_mode (ifalse, mode), target,
1218 if_info->x, 0, OPTAB_WIDEN);
1219 }
1220
1221 /* if (test) x = 8; else x = 0;
1222 => x = (test != 0) << 3; */
1223 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1224 {
1225 target = expand_simple_binop (mode, ASHIFT,
1226 target, GEN_INT (tmp), if_info->x, 0,
1227 OPTAB_WIDEN);
1228 }
1229
1230 /* if (test) x = -1; else x = b;
1231 => x = -(test != 0) | b; */
1232 else if (itrue == -1)
1233 {
1234 target = expand_simple_binop (mode, IOR,
1235 target, gen_int_mode (ifalse, mode),
1236 if_info->x, 0, OPTAB_WIDEN);
1237 }
1238
1239 /* if (test) x = a; else x = b;
1240 => x = (-(test != 0) & (b - a)) + a; */
1241 else
1242 {
1243 target = expand_simple_binop (mode, AND,
1244 target, gen_int_mode (diff, mode),
1245 if_info->x, 0, OPTAB_WIDEN);
1246 if (target)
1247 target = expand_simple_binop (mode, PLUS,
1248 target, gen_int_mode (ifalse, mode),
1249 if_info->x, 0, OPTAB_WIDEN);
1250 }
1251
1252 if (! target)
1253 {
1254 end_sequence ();
1255 return FALSE;
1256 }
1257
1258 if (target != if_info->x)
1259 noce_emit_move_insn (if_info->x, target);
1260
1261 seq = end_ifcvt_sequence (if_info);
1262 if (!seq)
1263 return FALSE;
1264
1265 emit_insn_before_setloc (seq, if_info->jump,
1266 INSN_LOCATION (if_info->insn_a));
1267 return TRUE;
1268 }
1269
1270 return FALSE;
1271 }
1272
1273 /* Convert "if (test) foo++" into "foo += (test != 0)", and
1274 similarly for "foo--". */
1275
1276 static int
1277 noce_try_addcc (struct noce_if_info *if_info)
1278 {
1279 rtx target;
1280 rtx_insn *seq;
1281 int subtract, normalize;
1282
1283 if (GET_CODE (if_info->a) == PLUS
1284 && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1285 && (reversed_comparison_code (if_info->cond, if_info->jump)
1286 != UNKNOWN))
1287 {
1288 rtx cond = if_info->cond;
1289 enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1290
1291 /* First try to use addcc pattern. */
1292 if (general_operand (XEXP (cond, 0), VOIDmode)
1293 && general_operand (XEXP (cond, 1), VOIDmode))
1294 {
1295 start_sequence ();
1296 target = emit_conditional_add (if_info->x, code,
1297 XEXP (cond, 0),
1298 XEXP (cond, 1),
1299 VOIDmode,
1300 if_info->b,
1301 XEXP (if_info->a, 1),
1302 GET_MODE (if_info->x),
1303 (code == LTU || code == GEU
1304 || code == LEU || code == GTU));
1305 if (target)
1306 {
1307 if (target != if_info->x)
1308 noce_emit_move_insn (if_info->x, target);
1309
1310 seq = end_ifcvt_sequence (if_info);
1311 if (!seq)
1312 return FALSE;
1313
1314 emit_insn_before_setloc (seq, if_info->jump,
1315 INSN_LOCATION (if_info->insn_a));
1316 return TRUE;
1317 }
1318 end_sequence ();
1319 }
1320
1321 /* If that fails, construct conditional increment or decrement using
1322 setcc. */
1323 if (if_info->branch_cost >= 2
1324 && (XEXP (if_info->a, 1) == const1_rtx
1325 || XEXP (if_info->a, 1) == constm1_rtx))
1326 {
1327 start_sequence ();
1328 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1329 subtract = 0, normalize = 0;
1330 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1331 subtract = 1, normalize = 0;
1332 else
1333 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1334
1335
1336 target = noce_emit_store_flag (if_info,
1337 gen_reg_rtx (GET_MODE (if_info->x)),
1338 1, normalize);
1339
1340 if (target)
1341 target = expand_simple_binop (GET_MODE (if_info->x),
1342 subtract ? MINUS : PLUS,
1343 if_info->b, target, if_info->x,
1344 0, OPTAB_WIDEN);
1345 if (target)
1346 {
1347 if (target != if_info->x)
1348 noce_emit_move_insn (if_info->x, target);
1349
1350 seq = end_ifcvt_sequence (if_info);
1351 if (!seq)
1352 return FALSE;
1353
1354 emit_insn_before_setloc (seq, if_info->jump,
1355 INSN_LOCATION (if_info->insn_a));
1356 return TRUE;
1357 }
1358 end_sequence ();
1359 }
1360 }
1361
1362 return FALSE;
1363 }
1364
1365 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
1366
1367 static int
1368 noce_try_store_flag_mask (struct noce_if_info *if_info)
1369 {
1370 rtx target;
1371 rtx_insn *seq;
1372 int reversep;
1373
1374 reversep = 0;
1375 if ((if_info->branch_cost >= 2
1376 || STORE_FLAG_VALUE == -1)
1377 && ((if_info->a == const0_rtx
1378 && rtx_equal_p (if_info->b, if_info->x))
1379 || ((reversep = (reversed_comparison_code (if_info->cond,
1380 if_info->jump)
1381 != UNKNOWN))
1382 && if_info->b == const0_rtx
1383 && rtx_equal_p (if_info->a, if_info->x))))
1384 {
1385 start_sequence ();
1386 target = noce_emit_store_flag (if_info,
1387 gen_reg_rtx (GET_MODE (if_info->x)),
1388 reversep, -1);
1389 if (target)
1390 target = expand_simple_binop (GET_MODE (if_info->x), AND,
1391 if_info->x,
1392 target, if_info->x, 0,
1393 OPTAB_WIDEN);
1394
1395 if (target)
1396 {
1397 int old_cost, new_cost, insn_cost;
1398 int speed_p;
1399
1400 if (target != if_info->x)
1401 noce_emit_move_insn (if_info->x, target);
1402
1403 seq = end_ifcvt_sequence (if_info);
1404 if (!seq)
1405 return FALSE;
1406
1407 speed_p = optimize_bb_for_speed_p (BLOCK_FOR_INSN (if_info->insn_a));
1408 insn_cost = insn_rtx_cost (PATTERN (if_info->insn_a), speed_p);
1409 old_cost = COSTS_N_INSNS (if_info->branch_cost) + insn_cost;
1410 new_cost = seq_cost (seq, speed_p);
1411
1412 if (new_cost > old_cost)
1413 return FALSE;
1414
1415 emit_insn_before_setloc (seq, if_info->jump,
1416 INSN_LOCATION (if_info->insn_a));
1417 return TRUE;
1418 }
1419
1420 end_sequence ();
1421 }
1422
1423 return FALSE;
1424 }
1425
1426 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
1427
1428 static rtx
1429 noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1430 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1431 {
1432 rtx target ATTRIBUTE_UNUSED;
1433 int unsignedp ATTRIBUTE_UNUSED;
1434
1435 /* If earliest == jump, try to build the cmove insn directly.
1436 This is helpful when combine has created some complex condition
1437 (like for alpha's cmovlbs) that we can't hope to regenerate
1438 through the normal interface. */
1439
1440 if (if_info->cond_earliest == if_info->jump)
1441 {
1442 rtx cond = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1443 rtx if_then_else = gen_rtx_IF_THEN_ELSE (GET_MODE (x),
1444 cond, vtrue, vfalse);
1445 rtx set = gen_rtx_SET (VOIDmode, x, if_then_else);
1446
1447 start_sequence ();
1448 rtx_insn *insn = emit_insn (set);
1449
1450 if (recog_memoized (insn) >= 0)
1451 {
1452 rtx_insn *seq = get_insns ();
1453 end_sequence ();
1454 emit_insn (seq);
1455
1456 return x;
1457 }
1458
1459 end_sequence ();
1460 }
1461
1462 /* Don't even try if the comparison operands are weird
1463 except that the target supports cbranchcc4. */
1464 if (! general_operand (cmp_a, GET_MODE (cmp_a))
1465 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1466 {
1467 #if HAVE_cbranchcc4
1468 if (GET_MODE_CLASS (GET_MODE (cmp_a)) != MODE_CC
1469 || cmp_b != const0_rtx)
1470 #endif
1471 return NULL_RTX;
1472 }
1473
1474 #if HAVE_conditional_move
1475 unsignedp = (code == LTU || code == GEU
1476 || code == LEU || code == GTU);
1477
1478 target = emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1479 vtrue, vfalse, GET_MODE (x),
1480 unsignedp);
1481 if (target)
1482 return target;
1483
1484 /* We might be faced with a situation like:
1485
1486 x = (reg:M TARGET)
1487 vtrue = (subreg:M (reg:N VTRUE) BYTE)
1488 vfalse = (subreg:M (reg:N VFALSE) BYTE)
1489
1490 We can't do a conditional move in mode M, but it's possible that we
1491 could do a conditional move in mode N instead and take a subreg of
1492 the result.
1493
1494 If we can't create new pseudos, though, don't bother. */
1495 if (reload_completed)
1496 return NULL_RTX;
1497
1498 if (GET_CODE (vtrue) == SUBREG && GET_CODE (vfalse) == SUBREG)
1499 {
1500 rtx reg_vtrue = SUBREG_REG (vtrue);
1501 rtx reg_vfalse = SUBREG_REG (vfalse);
1502 unsigned int byte_vtrue = SUBREG_BYTE (vtrue);
1503 unsigned int byte_vfalse = SUBREG_BYTE (vfalse);
1504 rtx promoted_target;
1505
1506 if (GET_MODE (reg_vtrue) != GET_MODE (reg_vfalse)
1507 || byte_vtrue != byte_vfalse
1508 || (SUBREG_PROMOTED_VAR_P (vtrue)
1509 != SUBREG_PROMOTED_VAR_P (vfalse))
1510 || (SUBREG_PROMOTED_GET (vtrue)
1511 != SUBREG_PROMOTED_GET (vfalse)))
1512 return NULL_RTX;
1513
1514 promoted_target = gen_reg_rtx (GET_MODE (reg_vtrue));
1515
1516 target = emit_conditional_move (promoted_target, code, cmp_a, cmp_b,
1517 VOIDmode, reg_vtrue, reg_vfalse,
1518 GET_MODE (reg_vtrue), unsignedp);
1519 /* Nope, couldn't do it in that mode either. */
1520 if (!target)
1521 return NULL_RTX;
1522
1523 target = gen_rtx_SUBREG (GET_MODE (vtrue), promoted_target, byte_vtrue);
1524 SUBREG_PROMOTED_VAR_P (target) = SUBREG_PROMOTED_VAR_P (vtrue);
1525 SUBREG_PROMOTED_SET (target, SUBREG_PROMOTED_GET (vtrue));
1526 emit_move_insn (x, target);
1527 return x;
1528 }
1529 else
1530 return NULL_RTX;
1531 #else
1532 /* We'll never get here, as noce_process_if_block doesn't call the
1533 functions involved. Ifdef code, however, should be discouraged
1534 because it leads to typos in the code not selected. However,
1535 emit_conditional_move won't exist either. */
1536 return NULL_RTX;
1537 #endif
1538 }
1539
1540 /* Try only simple constants and registers here. More complex cases
1541 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1542 has had a go at it. */
1543
1544 static int
1545 noce_try_cmove (struct noce_if_info *if_info)
1546 {
1547 enum rtx_code code;
1548 rtx target;
1549 rtx_insn *seq;
1550
1551 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1552 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1553 {
1554 start_sequence ();
1555
1556 code = GET_CODE (if_info->cond);
1557 target = noce_emit_cmove (if_info, if_info->x, code,
1558 XEXP (if_info->cond, 0),
1559 XEXP (if_info->cond, 1),
1560 if_info->a, if_info->b);
1561
1562 if (target)
1563 {
1564 if (target != if_info->x)
1565 noce_emit_move_insn (if_info->x, target);
1566
1567 seq = end_ifcvt_sequence (if_info);
1568 if (!seq)
1569 return FALSE;
1570
1571 emit_insn_before_setloc (seq, if_info->jump,
1572 INSN_LOCATION (if_info->insn_a));
1573 return TRUE;
1574 }
1575 else
1576 {
1577 end_sequence ();
1578 return FALSE;
1579 }
1580 }
1581
1582 return FALSE;
1583 }
1584
1585 /* Try more complex cases involving conditional_move. */
1586
1587 static int
1588 noce_try_cmove_arith (struct noce_if_info *if_info)
1589 {
1590 rtx a = if_info->a;
1591 rtx b = if_info->b;
1592 rtx x = if_info->x;
1593 rtx orig_a, orig_b;
1594 rtx_insn *insn_a, *insn_b;
1595 rtx target;
1596 int is_mem = 0;
1597 int insn_cost;
1598 enum rtx_code code;
1599 rtx_insn *ifcvt_seq;
1600
1601 /* A conditional move from two memory sources is equivalent to a
1602 conditional on their addresses followed by a load. Don't do this
1603 early because it'll screw alias analysis. Note that we've
1604 already checked for no side effects. */
1605 /* ??? FIXME: Magic number 5. */
1606 if (cse_not_expected
1607 && MEM_P (a) && MEM_P (b)
1608 && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1609 && if_info->branch_cost >= 5)
1610 {
1611 machine_mode address_mode = get_address_mode (a);
1612
1613 a = XEXP (a, 0);
1614 b = XEXP (b, 0);
1615 x = gen_reg_rtx (address_mode);
1616 is_mem = 1;
1617 }
1618
1619 /* ??? We could handle this if we knew that a load from A or B could
1620 not trap or fault. This is also true if we've already loaded
1621 from the address along the path from ENTRY. */
1622 else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
1623 return FALSE;
1624
1625 /* if (test) x = a + b; else x = c - d;
1626 => y = a + b;
1627 x = c - d;
1628 if (test)
1629 x = y;
1630 */
1631
1632 code = GET_CODE (if_info->cond);
1633 insn_a = if_info->insn_a;
1634 insn_b = if_info->insn_b;
1635
1636 /* Total insn_rtx_cost should be smaller than branch cost. Exit
1637 if insn_rtx_cost can't be estimated. */
1638 if (insn_a)
1639 {
1640 insn_cost
1641 = insn_rtx_cost (PATTERN (insn_a),
1642 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1643 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1644 return FALSE;
1645 }
1646 else
1647 insn_cost = 0;
1648
1649 if (insn_b)
1650 {
1651 insn_cost
1652 += insn_rtx_cost (PATTERN (insn_b),
1653 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1654 if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1655 return FALSE;
1656 }
1657
1658 /* Possibly rearrange operands to make things come out more natural. */
1659 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1660 {
1661 int reversep = 0;
1662 if (rtx_equal_p (b, x))
1663 reversep = 1;
1664 else if (general_operand (b, GET_MODE (b)))
1665 reversep = 1;
1666
1667 if (reversep)
1668 {
1669 rtx tmp;
1670 rtx_insn *tmp_insn;
1671 code = reversed_comparison_code (if_info->cond, if_info->jump);
1672 tmp = a, a = b, b = tmp;
1673 tmp_insn = insn_a, insn_a = insn_b, insn_b = tmp_insn;
1674 }
1675 }
1676
1677 start_sequence ();
1678
1679 orig_a = a;
1680 orig_b = b;
1681
1682 /* If either operand is complex, load it into a register first.
1683 The best way to do this is to copy the original insn. In this
1684 way we preserve any clobbers etc that the insn may have had.
1685 This is of course not possible in the IS_MEM case. */
1686 if (! general_operand (a, GET_MODE (a)))
1687 {
1688 rtx_insn *insn;
1689
1690 if (is_mem)
1691 {
1692 rtx reg = gen_reg_rtx (GET_MODE (a));
1693 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, a));
1694 }
1695 else if (! insn_a)
1696 goto end_seq_and_fail;
1697 else
1698 {
1699 a = gen_reg_rtx (GET_MODE (a));
1700 rtx_insn *copy_of_a = as_a <rtx_insn *> (copy_rtx (insn_a));
1701 rtx set = single_set (copy_of_a);
1702 SET_DEST (set) = a;
1703 insn = emit_insn (PATTERN (copy_of_a));
1704 }
1705 if (recog_memoized (insn) < 0)
1706 goto end_seq_and_fail;
1707 }
1708 if (! general_operand (b, GET_MODE (b)))
1709 {
1710 rtx pat;
1711 rtx_insn *last;
1712 rtx_insn *new_insn;
1713
1714 if (is_mem)
1715 {
1716 rtx reg = gen_reg_rtx (GET_MODE (b));
1717 pat = gen_rtx_SET (VOIDmode, reg, b);
1718 }
1719 else if (! insn_b)
1720 goto end_seq_and_fail;
1721 else
1722 {
1723 b = gen_reg_rtx (GET_MODE (b));
1724 rtx_insn *copy_of_insn_b = as_a <rtx_insn *> (copy_rtx (insn_b));
1725 rtx set = single_set (copy_of_insn_b);
1726 SET_DEST (set) = b;
1727 pat = PATTERN (copy_of_insn_b);
1728 }
1729
1730 /* If insn to set up A clobbers any registers B depends on, try to
1731 swap insn that sets up A with the one that sets up B. If even
1732 that doesn't help, punt. */
1733 last = get_last_insn ();
1734 if (last && modified_in_p (orig_b, last))
1735 {
1736 new_insn = emit_insn_before (pat, get_insns ());
1737 if (modified_in_p (orig_a, new_insn))
1738 goto end_seq_and_fail;
1739 }
1740 else
1741 new_insn = emit_insn (pat);
1742
1743 if (recog_memoized (new_insn) < 0)
1744 goto end_seq_and_fail;
1745 }
1746
1747 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1748 XEXP (if_info->cond, 1), a, b);
1749
1750 if (! target)
1751 goto end_seq_and_fail;
1752
1753 /* If we're handling a memory for above, emit the load now. */
1754 if (is_mem)
1755 {
1756 rtx mem = gen_rtx_MEM (GET_MODE (if_info->x), target);
1757
1758 /* Copy over flags as appropriate. */
1759 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1760 MEM_VOLATILE_P (mem) = 1;
1761 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1762 set_mem_alias_set (mem, MEM_ALIAS_SET (if_info->a));
1763 set_mem_align (mem,
1764 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1765
1766 gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1767 set_mem_addr_space (mem, MEM_ADDR_SPACE (if_info->a));
1768
1769 noce_emit_move_insn (if_info->x, mem);
1770 }
1771 else if (target != x)
1772 noce_emit_move_insn (x, target);
1773
1774 ifcvt_seq = end_ifcvt_sequence (if_info);
1775 if (!ifcvt_seq)
1776 return FALSE;
1777
1778 emit_insn_before_setloc (ifcvt_seq, if_info->jump,
1779 INSN_LOCATION (if_info->insn_a));
1780 return TRUE;
1781
1782 end_seq_and_fail:
1783 end_sequence ();
1784 return FALSE;
1785 }
1786
1787 /* For most cases, the simplified condition we found is the best
1788 choice, but this is not the case for the min/max/abs transforms.
1789 For these we wish to know that it is A or B in the condition. */
1790
1791 static rtx
1792 noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1793 rtx_insn **earliest)
1794 {
1795 rtx cond, set;
1796 rtx_insn *insn;
1797 int reverse;
1798 int allow_cc_mode = false;
1799 #if HAVE_cbranchcc4
1800 allow_cc_mode = true;
1801 #endif
1802
1803
1804 /* If target is already mentioned in the known condition, return it. */
1805 if (reg_mentioned_p (target, if_info->cond))
1806 {
1807 *earliest = if_info->cond_earliest;
1808 return if_info->cond;
1809 }
1810
1811 set = pc_set (if_info->jump);
1812 cond = XEXP (SET_SRC (set), 0);
1813 reverse
1814 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1815 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (if_info->jump);
1816 if (if_info->then_else_reversed)
1817 reverse = !reverse;
1818
1819 /* If we're looking for a constant, try to make the conditional
1820 have that constant in it. There are two reasons why it may
1821 not have the constant we want:
1822
1823 1. GCC may have needed to put the constant in a register, because
1824 the target can't compare directly against that constant. For
1825 this case, we look for a SET immediately before the comparison
1826 that puts a constant in that register.
1827
1828 2. GCC may have canonicalized the conditional, for example
1829 replacing "if x < 4" with "if x <= 3". We can undo that (or
1830 make equivalent types of changes) to get the constants we need
1831 if they're off by one in the right direction. */
1832
1833 if (CONST_INT_P (target))
1834 {
1835 enum rtx_code code = GET_CODE (if_info->cond);
1836 rtx op_a = XEXP (if_info->cond, 0);
1837 rtx op_b = XEXP (if_info->cond, 1);
1838 rtx prev_insn;
1839
1840 /* First, look to see if we put a constant in a register. */
1841 prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1842 if (prev_insn
1843 && BLOCK_FOR_INSN (prev_insn)
1844 == BLOCK_FOR_INSN (if_info->cond_earliest)
1845 && INSN_P (prev_insn)
1846 && GET_CODE (PATTERN (prev_insn)) == SET)
1847 {
1848 rtx src = find_reg_equal_equiv_note (prev_insn);
1849 if (!src)
1850 src = SET_SRC (PATTERN (prev_insn));
1851 if (CONST_INT_P (src))
1852 {
1853 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1854 op_a = src;
1855 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1856 op_b = src;
1857
1858 if (CONST_INT_P (op_a))
1859 {
1860 rtx tmp = op_a;
1861 op_a = op_b;
1862 op_b = tmp;
1863 code = swap_condition (code);
1864 }
1865 }
1866 }
1867
1868 /* Now, look to see if we can get the right constant by
1869 adjusting the conditional. */
1870 if (CONST_INT_P (op_b))
1871 {
1872 HOST_WIDE_INT desired_val = INTVAL (target);
1873 HOST_WIDE_INT actual_val = INTVAL (op_b);
1874
1875 switch (code)
1876 {
1877 case LT:
1878 if (actual_val == desired_val + 1)
1879 {
1880 code = LE;
1881 op_b = GEN_INT (desired_val);
1882 }
1883 break;
1884 case LE:
1885 if (actual_val == desired_val - 1)
1886 {
1887 code = LT;
1888 op_b = GEN_INT (desired_val);
1889 }
1890 break;
1891 case GT:
1892 if (actual_val == desired_val - 1)
1893 {
1894 code = GE;
1895 op_b = GEN_INT (desired_val);
1896 }
1897 break;
1898 case GE:
1899 if (actual_val == desired_val + 1)
1900 {
1901 code = GT;
1902 op_b = GEN_INT (desired_val);
1903 }
1904 break;
1905 default:
1906 break;
1907 }
1908 }
1909
1910 /* If we made any changes, generate a new conditional that is
1911 equivalent to what we started with, but has the right
1912 constants in it. */
1913 if (code != GET_CODE (if_info->cond)
1914 || op_a != XEXP (if_info->cond, 0)
1915 || op_b != XEXP (if_info->cond, 1))
1916 {
1917 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1918 *earliest = if_info->cond_earliest;
1919 return cond;
1920 }
1921 }
1922
1923 cond = canonicalize_condition (if_info->jump, cond, reverse,
1924 earliest, target, allow_cc_mode, true);
1925 if (! cond || ! reg_mentioned_p (target, cond))
1926 return NULL;
1927
1928 /* We almost certainly searched back to a different place.
1929 Need to re-verify correct lifetimes. */
1930
1931 /* X may not be mentioned in the range (cond_earliest, jump]. */
1932 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1933 if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1934 return NULL;
1935
1936 /* A and B may not be modified in the range [cond_earliest, jump). */
1937 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1938 if (INSN_P (insn)
1939 && (modified_in_p (if_info->a, insn)
1940 || modified_in_p (if_info->b, insn)))
1941 return NULL;
1942
1943 return cond;
1944 }
1945
1946 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1947
1948 static int
1949 noce_try_minmax (struct noce_if_info *if_info)
1950 {
1951 rtx cond, target;
1952 rtx_insn *earliest, *seq;
1953 enum rtx_code code, op;
1954 int unsignedp;
1955
1956 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1957 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1958 to get the target to tell us... */
1959 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1960 || HONOR_NANS (GET_MODE (if_info->x)))
1961 return FALSE;
1962
1963 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1964 if (!cond)
1965 return FALSE;
1966
1967 /* Verify the condition is of the form we expect, and canonicalize
1968 the comparison code. */
1969 code = GET_CODE (cond);
1970 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1971 {
1972 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1973 return FALSE;
1974 }
1975 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1976 {
1977 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1978 return FALSE;
1979 code = swap_condition (code);
1980 }
1981 else
1982 return FALSE;
1983
1984 /* Determine what sort of operation this is. Note that the code is for
1985 a taken branch, so the code->operation mapping appears backwards. */
1986 switch (code)
1987 {
1988 case LT:
1989 case LE:
1990 case UNLT:
1991 case UNLE:
1992 op = SMAX;
1993 unsignedp = 0;
1994 break;
1995 case GT:
1996 case GE:
1997 case UNGT:
1998 case UNGE:
1999 op = SMIN;
2000 unsignedp = 0;
2001 break;
2002 case LTU:
2003 case LEU:
2004 op = UMAX;
2005 unsignedp = 1;
2006 break;
2007 case GTU:
2008 case GEU:
2009 op = UMIN;
2010 unsignedp = 1;
2011 break;
2012 default:
2013 return FALSE;
2014 }
2015
2016 start_sequence ();
2017
2018 target = expand_simple_binop (GET_MODE (if_info->x), op,
2019 if_info->a, if_info->b,
2020 if_info->x, unsignedp, OPTAB_WIDEN);
2021 if (! target)
2022 {
2023 end_sequence ();
2024 return FALSE;
2025 }
2026 if (target != if_info->x)
2027 noce_emit_move_insn (if_info->x, target);
2028
2029 seq = end_ifcvt_sequence (if_info);
2030 if (!seq)
2031 return FALSE;
2032
2033 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2034 if_info->cond = cond;
2035 if_info->cond_earliest = earliest;
2036
2037 return TRUE;
2038 }
2039
2040 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
2041 "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
2042 etc. */
2043
2044 static int
2045 noce_try_abs (struct noce_if_info *if_info)
2046 {
2047 rtx cond, target, a, b, c;
2048 rtx_insn *earliest, *seq;
2049 int negate;
2050 bool one_cmpl = false;
2051
2052 /* Reject modes with signed zeros. */
2053 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
2054 return FALSE;
2055
2056 /* Recognize A and B as constituting an ABS or NABS. The canonical
2057 form is a branch around the negation, taken when the object is the
2058 first operand of a comparison against 0 that evaluates to true. */
2059 a = if_info->a;
2060 b = if_info->b;
2061 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
2062 negate = 0;
2063 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
2064 {
2065 c = a; a = b; b = c;
2066 negate = 1;
2067 }
2068 else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
2069 {
2070 negate = 0;
2071 one_cmpl = true;
2072 }
2073 else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
2074 {
2075 c = a; a = b; b = c;
2076 negate = 1;
2077 one_cmpl = true;
2078 }
2079 else
2080 return FALSE;
2081
2082 cond = noce_get_alt_condition (if_info, b, &earliest);
2083 if (!cond)
2084 return FALSE;
2085
2086 /* Verify the condition is of the form we expect. */
2087 if (rtx_equal_p (XEXP (cond, 0), b))
2088 c = XEXP (cond, 1);
2089 else if (rtx_equal_p (XEXP (cond, 1), b))
2090 {
2091 c = XEXP (cond, 0);
2092 negate = !negate;
2093 }
2094 else
2095 return FALSE;
2096
2097 /* Verify that C is zero. Search one step backward for a
2098 REG_EQUAL note or a simple source if necessary. */
2099 if (REG_P (c))
2100 {
2101 rtx set;
2102 rtx_insn *insn = prev_nonnote_insn (earliest);
2103 if (insn
2104 && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
2105 && (set = single_set (insn))
2106 && rtx_equal_p (SET_DEST (set), c))
2107 {
2108 rtx note = find_reg_equal_equiv_note (insn);
2109 if (note)
2110 c = XEXP (note, 0);
2111 else
2112 c = SET_SRC (set);
2113 }
2114 else
2115 return FALSE;
2116 }
2117 if (MEM_P (c)
2118 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
2119 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
2120 c = get_pool_constant (XEXP (c, 0));
2121
2122 /* Work around funny ideas get_condition has wrt canonicalization.
2123 Note that these rtx constants are known to be CONST_INT, and
2124 therefore imply integer comparisons. */
2125 if (c == constm1_rtx && GET_CODE (cond) == GT)
2126 ;
2127 else if (c == const1_rtx && GET_CODE (cond) == LT)
2128 ;
2129 else if (c != CONST0_RTX (GET_MODE (b)))
2130 return FALSE;
2131
2132 /* Determine what sort of operation this is. */
2133 switch (GET_CODE (cond))
2134 {
2135 case LT:
2136 case LE:
2137 case UNLT:
2138 case UNLE:
2139 negate = !negate;
2140 break;
2141 case GT:
2142 case GE:
2143 case UNGT:
2144 case UNGE:
2145 break;
2146 default:
2147 return FALSE;
2148 }
2149
2150 start_sequence ();
2151 if (one_cmpl)
2152 target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
2153 if_info->x);
2154 else
2155 target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
2156
2157 /* ??? It's a quandary whether cmove would be better here, especially
2158 for integers. Perhaps combine will clean things up. */
2159 if (target && negate)
2160 {
2161 if (one_cmpl)
2162 target = expand_simple_unop (GET_MODE (target), NOT, target,
2163 if_info->x, 0);
2164 else
2165 target = expand_simple_unop (GET_MODE (target), NEG, target,
2166 if_info->x, 0);
2167 }
2168
2169 if (! target)
2170 {
2171 end_sequence ();
2172 return FALSE;
2173 }
2174
2175 if (target != if_info->x)
2176 noce_emit_move_insn (if_info->x, target);
2177
2178 seq = end_ifcvt_sequence (if_info);
2179 if (!seq)
2180 return FALSE;
2181
2182 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2183 if_info->cond = cond;
2184 if_info->cond_earliest = earliest;
2185
2186 return TRUE;
2187 }
2188
2189 /* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;". */
2190
2191 static int
2192 noce_try_sign_mask (struct noce_if_info *if_info)
2193 {
2194 rtx cond, t, m, c;
2195 rtx_insn *seq;
2196 machine_mode mode;
2197 enum rtx_code code;
2198 bool t_unconditional;
2199
2200 cond = if_info->cond;
2201 code = GET_CODE (cond);
2202 m = XEXP (cond, 0);
2203 c = XEXP (cond, 1);
2204
2205 t = NULL_RTX;
2206 if (if_info->a == const0_rtx)
2207 {
2208 if ((code == LT && c == const0_rtx)
2209 || (code == LE && c == constm1_rtx))
2210 t = if_info->b;
2211 }
2212 else if (if_info->b == const0_rtx)
2213 {
2214 if ((code == GE && c == const0_rtx)
2215 || (code == GT && c == constm1_rtx))
2216 t = if_info->a;
2217 }
2218
2219 if (! t || side_effects_p (t))
2220 return FALSE;
2221
2222 /* We currently don't handle different modes. */
2223 mode = GET_MODE (t);
2224 if (GET_MODE (m) != mode)
2225 return FALSE;
2226
2227 /* This is only profitable if T is unconditionally executed/evaluated in the
2228 original insn sequence or T is cheap. The former happens if B is the
2229 non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
2230 INSN_B which can happen for e.g. conditional stores to memory. For the
2231 cost computation use the block TEST_BB where the evaluation will end up
2232 after the transformation. */
2233 t_unconditional =
2234 (t == if_info->b
2235 && (if_info->insn_b == NULL_RTX
2236 || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
2237 if (!(t_unconditional
2238 || (set_src_cost (t, optimize_bb_for_speed_p (if_info->test_bb))
2239 < COSTS_N_INSNS (2))))
2240 return FALSE;
2241
2242 start_sequence ();
2243 /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
2244 "(signed) m >> 31" directly. This benefits targets with specialized
2245 insns to obtain the signmask, but still uses ashr_optab otherwise. */
2246 m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
2247 t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
2248 : NULL_RTX;
2249
2250 if (!t)
2251 {
2252 end_sequence ();
2253 return FALSE;
2254 }
2255
2256 noce_emit_move_insn (if_info->x, t);
2257
2258 seq = end_ifcvt_sequence (if_info);
2259 if (!seq)
2260 return FALSE;
2261
2262 emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATION (if_info->insn_a));
2263 return TRUE;
2264 }
2265
2266
2267 /* Optimize away "if (x & C) x |= C" and similar bit manipulation
2268 transformations. */
2269
2270 static int
2271 noce_try_bitop (struct noce_if_info *if_info)
2272 {
2273 rtx cond, x, a, result;
2274 rtx_insn *seq;
2275 machine_mode mode;
2276 enum rtx_code code;
2277 int bitnum;
2278
2279 x = if_info->x;
2280 cond = if_info->cond;
2281 code = GET_CODE (cond);
2282
2283 /* Check for no else condition. */
2284 if (! rtx_equal_p (x, if_info->b))
2285 return FALSE;
2286
2287 /* Check for a suitable condition. */
2288 if (code != NE && code != EQ)
2289 return FALSE;
2290 if (XEXP (cond, 1) != const0_rtx)
2291 return FALSE;
2292 cond = XEXP (cond, 0);
2293
2294 /* ??? We could also handle AND here. */
2295 if (GET_CODE (cond) == ZERO_EXTRACT)
2296 {
2297 if (XEXP (cond, 1) != const1_rtx
2298 || !CONST_INT_P (XEXP (cond, 2))
2299 || ! rtx_equal_p (x, XEXP (cond, 0)))
2300 return FALSE;
2301 bitnum = INTVAL (XEXP (cond, 2));
2302 mode = GET_MODE (x);
2303 if (BITS_BIG_ENDIAN)
2304 bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2305 if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2306 return FALSE;
2307 }
2308 else
2309 return FALSE;
2310
2311 a = if_info->a;
2312 if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2313 {
2314 /* Check for "if (X & C) x = x op C". */
2315 if (! rtx_equal_p (x, XEXP (a, 0))
2316 || !CONST_INT_P (XEXP (a, 1))
2317 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2318 != (unsigned HOST_WIDE_INT) 1 << bitnum)
2319 return FALSE;
2320
2321 /* if ((x & C) == 0) x |= C; is transformed to x |= C. */
2322 /* if ((x & C) != 0) x |= C; is transformed to nothing. */
2323 if (GET_CODE (a) == IOR)
2324 result = (code == NE) ? a : NULL_RTX;
2325 else if (code == NE)
2326 {
2327 /* if ((x & C) == 0) x ^= C; is transformed to x |= C. */
2328 result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2329 result = simplify_gen_binary (IOR, mode, x, result);
2330 }
2331 else
2332 {
2333 /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C. */
2334 result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2335 result = simplify_gen_binary (AND, mode, x, result);
2336 }
2337 }
2338 else if (GET_CODE (a) == AND)
2339 {
2340 /* Check for "if (X & C) x &= ~C". */
2341 if (! rtx_equal_p (x, XEXP (a, 0))
2342 || !CONST_INT_P (XEXP (a, 1))
2343 || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2344 != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2345 return FALSE;
2346
2347 /* if ((x & C) == 0) x &= ~C; is transformed to nothing. */
2348 /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C. */
2349 result = (code == EQ) ? a : NULL_RTX;
2350 }
2351 else
2352 return FALSE;
2353
2354 if (result)
2355 {
2356 start_sequence ();
2357 noce_emit_move_insn (x, result);
2358 seq = end_ifcvt_sequence (if_info);
2359 if (!seq)
2360 return FALSE;
2361
2362 emit_insn_before_setloc (seq, if_info->jump,
2363 INSN_LOCATION (if_info->insn_a));
2364 }
2365 return TRUE;
2366 }
2367
2368
2369 /* Similar to get_condition, only the resulting condition must be
2370 valid at JUMP, instead of at EARLIEST.
2371
2372 If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2373 THEN block of the caller, and we have to reverse the condition. */
2374
2375 static rtx
2376 noce_get_condition (rtx_insn *jump, rtx_insn **earliest, bool then_else_reversed)
2377 {
2378 rtx cond, set, tmp;
2379 bool reverse;
2380 int allow_cc_mode = false;
2381 #if HAVE_cbranchcc4
2382 allow_cc_mode = true;
2383 #endif
2384
2385 if (! any_condjump_p (jump))
2386 return NULL_RTX;
2387
2388 set = pc_set (jump);
2389
2390 /* If this branches to JUMP_LABEL when the condition is false,
2391 reverse the condition. */
2392 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2393 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump));
2394
2395 /* We may have to reverse because the caller's if block is not canonical,
2396 i.e. the THEN block isn't the fallthrough block for the TEST block
2397 (see find_if_header). */
2398 if (then_else_reversed)
2399 reverse = !reverse;
2400
2401 /* If the condition variable is a register and is MODE_INT, accept it. */
2402
2403 cond = XEXP (SET_SRC (set), 0);
2404 tmp = XEXP (cond, 0);
2405 if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT
2406 && (GET_MODE (tmp) != BImode
2407 || !targetm.small_register_classes_for_mode_p (BImode)))
2408 {
2409 *earliest = jump;
2410
2411 if (reverse)
2412 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2413 GET_MODE (cond), tmp, XEXP (cond, 1));
2414 return cond;
2415 }
2416
2417 /* Otherwise, fall back on canonicalize_condition to do the dirty
2418 work of manipulating MODE_CC values and COMPARE rtx codes. */
2419 tmp = canonicalize_condition (jump, cond, reverse, earliest,
2420 NULL_RTX, allow_cc_mode, true);
2421
2422 /* We don't handle side-effects in the condition, like handling
2423 REG_INC notes and making sure no duplicate conditions are emitted. */
2424 if (tmp != NULL_RTX && side_effects_p (tmp))
2425 return NULL_RTX;
2426
2427 return tmp;
2428 }
2429
2430 /* Return true if OP is ok for if-then-else processing. */
2431
2432 static int
2433 noce_operand_ok (const_rtx op)
2434 {
2435 if (side_effects_p (op))
2436 return FALSE;
2437
2438 /* We special-case memories, so handle any of them with
2439 no address side effects. */
2440 if (MEM_P (op))
2441 return ! side_effects_p (XEXP (op, 0));
2442
2443 return ! may_trap_p (op);
2444 }
2445
2446 /* Return true if a write into MEM may trap or fault. */
2447
2448 static bool
2449 noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2450 {
2451 rtx addr;
2452
2453 if (MEM_READONLY_P (mem))
2454 return true;
2455
2456 if (may_trap_or_fault_p (mem))
2457 return true;
2458
2459 addr = XEXP (mem, 0);
2460
2461 /* Call target hook to avoid the effects of -fpic etc.... */
2462 addr = targetm.delegitimize_address (addr);
2463
2464 while (addr)
2465 switch (GET_CODE (addr))
2466 {
2467 case CONST:
2468 case PRE_DEC:
2469 case PRE_INC:
2470 case POST_DEC:
2471 case POST_INC:
2472 case POST_MODIFY:
2473 addr = XEXP (addr, 0);
2474 break;
2475 case LO_SUM:
2476 case PRE_MODIFY:
2477 addr = XEXP (addr, 1);
2478 break;
2479 case PLUS:
2480 if (CONST_INT_P (XEXP (addr, 1)))
2481 addr = XEXP (addr, 0);
2482 else
2483 return false;
2484 break;
2485 case LABEL_REF:
2486 return true;
2487 case SYMBOL_REF:
2488 if (SYMBOL_REF_DECL (addr)
2489 && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2490 return true;
2491 return false;
2492 default:
2493 return false;
2494 }
2495
2496 return false;
2497 }
2498
2499 /* Return whether we can use store speculation for MEM. TOP_BB is the
2500 basic block above the conditional block where we are considering
2501 doing the speculative store. We look for whether MEM is set
2502 unconditionally later in the function. */
2503
2504 static bool
2505 noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2506 {
2507 basic_block dominator;
2508
2509 for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2510 dominator != NULL;
2511 dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2512 {
2513 rtx_insn *insn;
2514
2515 FOR_BB_INSNS (dominator, insn)
2516 {
2517 /* If we see something that might be a memory barrier, we
2518 have to stop looking. Even if the MEM is set later in
2519 the function, we still don't want to set it
2520 unconditionally before the barrier. */
2521 if (INSN_P (insn)
2522 && (volatile_insn_p (PATTERN (insn))
2523 || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2524 return false;
2525
2526 if (memory_must_be_modified_in_insn_p (mem, insn))
2527 return true;
2528 if (modified_in_p (XEXP (mem, 0), insn))
2529 return false;
2530
2531 }
2532 }
2533
2534 return false;
2535 }
2536
2537 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2538 it without using conditional execution. Return TRUE if we were successful
2539 at converting the block. */
2540
2541 static int
2542 noce_process_if_block (struct noce_if_info *if_info)
2543 {
2544 basic_block test_bb = if_info->test_bb; /* test block */
2545 basic_block then_bb = if_info->then_bb; /* THEN */
2546 basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
2547 basic_block join_bb = if_info->join_bb; /* JOIN */
2548 rtx_insn *jump = if_info->jump;
2549 rtx cond = if_info->cond;
2550 rtx_insn *insn_a, *insn_b;
2551 rtx set_a, set_b;
2552 rtx orig_x, x, a, b;
2553
2554 /* We're looking for patterns of the form
2555
2556 (1) if (...) x = a; else x = b;
2557 (2) x = b; if (...) x = a;
2558 (3) if (...) x = a; // as if with an initial x = x.
2559
2560 The later patterns require jumps to be more expensive.
2561
2562 ??? For future expansion, look for multiple X in such patterns. */
2563
2564 /* Look for one of the potential sets. */
2565 insn_a = first_active_insn (then_bb);
2566 if (! insn_a
2567 || insn_a != last_active_insn (then_bb, FALSE)
2568 || (set_a = single_set (insn_a)) == NULL_RTX)
2569 return FALSE;
2570
2571 x = SET_DEST (set_a);
2572 a = SET_SRC (set_a);
2573
2574 /* Look for the other potential set. Make sure we've got equivalent
2575 destinations. */
2576 /* ??? This is overconservative. Storing to two different mems is
2577 as easy as conditionally computing the address. Storing to a
2578 single mem merely requires a scratch memory to use as one of the
2579 destination addresses; often the memory immediately below the
2580 stack pointer is available for this. */
2581 set_b = NULL_RTX;
2582 if (else_bb)
2583 {
2584 insn_b = first_active_insn (else_bb);
2585 if (! insn_b
2586 || insn_b != last_active_insn (else_bb, FALSE)
2587 || (set_b = single_set (insn_b)) == NULL_RTX
2588 || ! rtx_interchangeable_p (x, SET_DEST (set_b)))
2589 return FALSE;
2590 }
2591 else
2592 {
2593 insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2594 /* We're going to be moving the evaluation of B down from above
2595 COND_EARLIEST to JUMP. Make sure the relevant data is still
2596 intact. */
2597 if (! insn_b
2598 || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2599 || !NONJUMP_INSN_P (insn_b)
2600 || (set_b = single_set (insn_b)) == NULL_RTX
2601 || ! rtx_interchangeable_p (x, SET_DEST (set_b))
2602 || ! noce_operand_ok (SET_SRC (set_b))
2603 || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2604 || modified_between_p (SET_SRC (set_b), insn_b, jump)
2605 /* Avoid extending the lifetime of hard registers on small
2606 register class machines. */
2607 || (REG_P (SET_SRC (set_b))
2608 && HARD_REGISTER_P (SET_SRC (set_b))
2609 && targetm.small_register_classes_for_mode_p
2610 (GET_MODE (SET_SRC (set_b))))
2611 /* Likewise with X. In particular this can happen when
2612 noce_get_condition looks farther back in the instruction
2613 stream than one might expect. */
2614 || reg_overlap_mentioned_p (x, cond)
2615 || reg_overlap_mentioned_p (x, a)
2616 || modified_between_p (x, insn_b, jump))
2617 {
2618 insn_b = NULL;
2619 set_b = NULL_RTX;
2620 }
2621 }
2622
2623 /* If x has side effects then only the if-then-else form is safe to
2624 convert. But even in that case we would need to restore any notes
2625 (such as REG_INC) at then end. That can be tricky if
2626 noce_emit_move_insn expands to more than one insn, so disable the
2627 optimization entirely for now if there are side effects. */
2628 if (side_effects_p (x))
2629 return FALSE;
2630
2631 b = (set_b ? SET_SRC (set_b) : x);
2632
2633 /* Only operate on register destinations, and even then avoid extending
2634 the lifetime of hard registers on small register class machines. */
2635 orig_x = x;
2636 if (!REG_P (x)
2637 || (HARD_REGISTER_P (x)
2638 && targetm.small_register_classes_for_mode_p (GET_MODE (x))))
2639 {
2640 if (GET_MODE (x) == BLKmode)
2641 return FALSE;
2642
2643 if (GET_CODE (x) == ZERO_EXTRACT
2644 && (!CONST_INT_P (XEXP (x, 1))
2645 || !CONST_INT_P (XEXP (x, 2))))
2646 return FALSE;
2647
2648 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2649 ? XEXP (x, 0) : x));
2650 }
2651
2652 /* Don't operate on sources that may trap or are volatile. */
2653 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2654 return FALSE;
2655
2656 retry:
2657 /* Set up the info block for our subroutines. */
2658 if_info->insn_a = insn_a;
2659 if_info->insn_b = insn_b;
2660 if_info->x = x;
2661 if_info->a = a;
2662 if_info->b = b;
2663
2664 /* Try optimizations in some approximation of a useful order. */
2665 /* ??? Should first look to see if X is live incoming at all. If it
2666 isn't, we don't need anything but an unconditional set. */
2667
2668 /* Look and see if A and B are really the same. Avoid creating silly
2669 cmove constructs that no one will fix up later. */
2670 if (rtx_interchangeable_p (a, b))
2671 {
2672 /* If we have an INSN_B, we don't have to create any new rtl. Just
2673 move the instruction that we already have. If we don't have an
2674 INSN_B, that means that A == X, and we've got a noop move. In
2675 that case don't do anything and let the code below delete INSN_A. */
2676 if (insn_b && else_bb)
2677 {
2678 rtx note;
2679
2680 if (else_bb && insn_b == BB_END (else_bb))
2681 BB_END (else_bb) = PREV_INSN (insn_b);
2682 reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2683
2684 /* If there was a REG_EQUAL note, delete it since it may have been
2685 true due to this insn being after a jump. */
2686 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2687 remove_note (insn_b, note);
2688
2689 insn_b = NULL;
2690 }
2691 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2692 x must be executed twice. */
2693 else if (insn_b && side_effects_p (orig_x))
2694 return FALSE;
2695
2696 x = orig_x;
2697 goto success;
2698 }
2699
2700 if (!set_b && MEM_P (orig_x))
2701 {
2702 /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2703 for optimizations if writing to x may trap or fault,
2704 i.e. it's a memory other than a static var or a stack slot,
2705 is misaligned on strict aligned machines or is read-only. If
2706 x is a read-only memory, then the program is valid only if we
2707 avoid the store into it. If there are stores on both the
2708 THEN and ELSE arms, then we can go ahead with the conversion;
2709 either the program is broken, or the condition is always
2710 false such that the other memory is selected. */
2711 if (noce_mem_write_may_trap_or_fault_p (orig_x))
2712 return FALSE;
2713
2714 /* Avoid store speculation: given "if (...) x = a" where x is a
2715 MEM, we only want to do the store if x is always set
2716 somewhere in the function. This avoids cases like
2717 if (pthread_mutex_trylock(mutex))
2718 ++global_variable;
2719 where we only want global_variable to be changed if the mutex
2720 is held. FIXME: This should ideally be expressed directly in
2721 RTL somehow. */
2722 if (!noce_can_store_speculate_p (test_bb, orig_x))
2723 return FALSE;
2724 }
2725
2726 if (noce_try_move (if_info))
2727 goto success;
2728 if (noce_try_store_flag (if_info))
2729 goto success;
2730 if (noce_try_bitop (if_info))
2731 goto success;
2732 if (noce_try_minmax (if_info))
2733 goto success;
2734 if (noce_try_abs (if_info))
2735 goto success;
2736 if (HAVE_conditional_move
2737 && noce_try_cmove (if_info))
2738 goto success;
2739 if (! targetm.have_conditional_execution ())
2740 {
2741 if (noce_try_store_flag_constants (if_info))
2742 goto success;
2743 if (noce_try_addcc (if_info))
2744 goto success;
2745 if (noce_try_store_flag_mask (if_info))
2746 goto success;
2747 if (HAVE_conditional_move
2748 && noce_try_cmove_arith (if_info))
2749 goto success;
2750 if (noce_try_sign_mask (if_info))
2751 goto success;
2752 }
2753
2754 if (!else_bb && set_b)
2755 {
2756 insn_b = NULL;
2757 set_b = NULL_RTX;
2758 b = orig_x;
2759 goto retry;
2760 }
2761
2762 return FALSE;
2763
2764 success:
2765
2766 /* If we used a temporary, fix it up now. */
2767 if (orig_x != x)
2768 {
2769 rtx_insn *seq;
2770
2771 start_sequence ();
2772 noce_emit_move_insn (orig_x, x);
2773 seq = get_insns ();
2774 set_used_flags (orig_x);
2775 unshare_all_rtl_in_chain (seq);
2776 end_sequence ();
2777
2778 emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATION (insn_a));
2779 }
2780
2781 /* The original THEN and ELSE blocks may now be removed. The test block
2782 must now jump to the join block. If the test block and the join block
2783 can be merged, do so. */
2784 if (else_bb)
2785 {
2786 delete_basic_block (else_bb);
2787 num_true_changes++;
2788 }
2789 else
2790 remove_edge (find_edge (test_bb, join_bb));
2791
2792 remove_edge (find_edge (then_bb, join_bb));
2793 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2794 delete_basic_block (then_bb);
2795 num_true_changes++;
2796
2797 if (can_merge_blocks_p (test_bb, join_bb))
2798 {
2799 merge_blocks (test_bb, join_bb);
2800 num_true_changes++;
2801 }
2802
2803 num_updated_if_blocks++;
2804 return TRUE;
2805 }
2806
2807 /* Check whether a block is suitable for conditional move conversion.
2808 Every insn must be a simple set of a register to a constant or a
2809 register. For each assignment, store the value in the pointer map
2810 VALS, keyed indexed by register pointer, then store the register
2811 pointer in REGS. COND is the condition we will test. */
2812
2813 static int
2814 check_cond_move_block (basic_block bb,
2815 hash_map<rtx, rtx> *vals,
2816 vec<rtx> *regs,
2817 rtx cond)
2818 {
2819 rtx_insn *insn;
2820
2821 /* We can only handle simple jumps at the end of the basic block.
2822 It is almost impossible to update the CFG otherwise. */
2823 insn = BB_END (bb);
2824 if (JUMP_P (insn) && !onlyjump_p (insn))
2825 return FALSE;
2826
2827 FOR_BB_INSNS (bb, insn)
2828 {
2829 rtx set, dest, src;
2830
2831 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2832 continue;
2833 set = single_set (insn);
2834 if (!set)
2835 return FALSE;
2836
2837 dest = SET_DEST (set);
2838 src = SET_SRC (set);
2839 if (!REG_P (dest)
2840 || (HARD_REGISTER_P (dest)
2841 && targetm.small_register_classes_for_mode_p (GET_MODE (dest))))
2842 return FALSE;
2843
2844 if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2845 return FALSE;
2846
2847 if (side_effects_p (src) || side_effects_p (dest))
2848 return FALSE;
2849
2850 if (may_trap_p (src) || may_trap_p (dest))
2851 return FALSE;
2852
2853 /* Don't try to handle this if the source register was
2854 modified earlier in the block. */
2855 if ((REG_P (src)
2856 && vals->get (src))
2857 || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2858 && vals->get (SUBREG_REG (src))))
2859 return FALSE;
2860
2861 /* Don't try to handle this if the destination register was
2862 modified earlier in the block. */
2863 if (vals->get (dest))
2864 return FALSE;
2865
2866 /* Don't try to handle this if the condition uses the
2867 destination register. */
2868 if (reg_overlap_mentioned_p (dest, cond))
2869 return FALSE;
2870
2871 /* Don't try to handle this if the source register is modified
2872 later in the block. */
2873 if (!CONSTANT_P (src)
2874 && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2875 return FALSE;
2876
2877 vals->put (dest, src);
2878
2879 regs->safe_push (dest);
2880 }
2881
2882 return TRUE;
2883 }
2884
2885 /* Given a basic block BB suitable for conditional move conversion,
2886 a condition COND, and pointer maps THEN_VALS and ELSE_VALS containing
2887 the register values depending on COND, emit the insns in the block as
2888 conditional moves. If ELSE_BLOCK is true, THEN_BB was already
2889 processed. The caller has started a sequence for the conversion.
2890 Return true if successful, false if something goes wrong. */
2891
2892 static bool
2893 cond_move_convert_if_block (struct noce_if_info *if_infop,
2894 basic_block bb, rtx cond,
2895 hash_map<rtx, rtx> *then_vals,
2896 hash_map<rtx, rtx> *else_vals,
2897 bool else_block_p)
2898 {
2899 enum rtx_code code;
2900 rtx_insn *insn;
2901 rtx cond_arg0, cond_arg1;
2902
2903 code = GET_CODE (cond);
2904 cond_arg0 = XEXP (cond, 0);
2905 cond_arg1 = XEXP (cond, 1);
2906
2907 FOR_BB_INSNS (bb, insn)
2908 {
2909 rtx set, target, dest, t, e;
2910
2911 /* ??? Maybe emit conditional debug insn? */
2912 if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2913 continue;
2914 set = single_set (insn);
2915 gcc_assert (set && REG_P (SET_DEST (set)));
2916
2917 dest = SET_DEST (set);
2918
2919 rtx *then_slot = then_vals->get (dest);
2920 rtx *else_slot = else_vals->get (dest);
2921 t = then_slot ? *then_slot : NULL_RTX;
2922 e = else_slot ? *else_slot : NULL_RTX;
2923
2924 if (else_block_p)
2925 {
2926 /* If this register was set in the then block, we already
2927 handled this case there. */
2928 if (t)
2929 continue;
2930 t = dest;
2931 gcc_assert (e);
2932 }
2933 else
2934 {
2935 gcc_assert (t);
2936 if (!e)
2937 e = dest;
2938 }
2939
2940 target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2941 t, e);
2942 if (!target)
2943 return false;
2944
2945 if (target != dest)
2946 noce_emit_move_insn (dest, target);
2947 }
2948
2949 return true;
2950 }
2951
2952 /* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2953 it using only conditional moves. Return TRUE if we were successful at
2954 converting the block. */
2955
2956 static int
2957 cond_move_process_if_block (struct noce_if_info *if_info)
2958 {
2959 basic_block test_bb = if_info->test_bb;
2960 basic_block then_bb = if_info->then_bb;
2961 basic_block else_bb = if_info->else_bb;
2962 basic_block join_bb = if_info->join_bb;
2963 rtx_insn *jump = if_info->jump;
2964 rtx cond = if_info->cond;
2965 rtx_insn *seq, *loc_insn;
2966 rtx reg;
2967 int c;
2968 vec<rtx> then_regs = vNULL;
2969 vec<rtx> else_regs = vNULL;
2970 unsigned int i;
2971 int success_p = FALSE;
2972
2973 /* Build a mapping for each block to the value used for each
2974 register. */
2975 hash_map<rtx, rtx> then_vals;
2976 hash_map<rtx, rtx> else_vals;
2977
2978 /* Make sure the blocks are suitable. */
2979 if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
2980 || (else_bb
2981 && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
2982 goto done;
2983
2984 /* Make sure the blocks can be used together. If the same register
2985 is set in both blocks, and is not set to a constant in both
2986 cases, then both blocks must set it to the same register. We
2987 have already verified that if it is set to a register, that the
2988 source register does not change after the assignment. Also count
2989 the number of registers set in only one of the blocks. */
2990 c = 0;
2991 FOR_EACH_VEC_ELT (then_regs, i, reg)
2992 {
2993 rtx *then_slot = then_vals.get (reg);
2994 rtx *else_slot = else_vals.get (reg);
2995
2996 gcc_checking_assert (then_slot);
2997 if (!else_slot)
2998 ++c;
2999 else
3000 {
3001 rtx then_val = *then_slot;
3002 rtx else_val = *else_slot;
3003 if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
3004 && !rtx_equal_p (then_val, else_val))
3005 goto done;
3006 }
3007 }
3008
3009 /* Finish off c for MAX_CONDITIONAL_EXECUTE. */
3010 FOR_EACH_VEC_ELT (else_regs, i, reg)
3011 {
3012 gcc_checking_assert (else_vals.get (reg));
3013 if (!then_vals.get (reg))
3014 ++c;
3015 }
3016
3017 /* Make sure it is reasonable to convert this block. What matters
3018 is the number of assignments currently made in only one of the
3019 branches, since if we convert we are going to always execute
3020 them. */
3021 if (c > MAX_CONDITIONAL_EXECUTE)
3022 goto done;
3023
3024 /* Try to emit the conditional moves. First do the then block,
3025 then do anything left in the else blocks. */
3026 start_sequence ();
3027 if (!cond_move_convert_if_block (if_info, then_bb, cond,
3028 &then_vals, &else_vals, false)
3029 || (else_bb
3030 && !cond_move_convert_if_block (if_info, else_bb, cond,
3031 &then_vals, &else_vals, true)))
3032 {
3033 end_sequence ();
3034 goto done;
3035 }
3036 seq = end_ifcvt_sequence (if_info);
3037 if (!seq)
3038 goto done;
3039
3040 loc_insn = first_active_insn (then_bb);
3041 if (!loc_insn)
3042 {
3043 loc_insn = first_active_insn (else_bb);
3044 gcc_assert (loc_insn);
3045 }
3046 emit_insn_before_setloc (seq, jump, INSN_LOCATION (loc_insn));
3047
3048 if (else_bb)
3049 {
3050 delete_basic_block (else_bb);
3051 num_true_changes++;
3052 }
3053 else
3054 remove_edge (find_edge (test_bb, join_bb));
3055
3056 remove_edge (find_edge (then_bb, join_bb));
3057 redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
3058 delete_basic_block (then_bb);
3059 num_true_changes++;
3060
3061 if (can_merge_blocks_p (test_bb, join_bb))
3062 {
3063 merge_blocks (test_bb, join_bb);
3064 num_true_changes++;
3065 }
3066
3067 num_updated_if_blocks++;
3068
3069 success_p = TRUE;
3070
3071 done:
3072 then_regs.release ();
3073 else_regs.release ();
3074 return success_p;
3075 }
3076
3077 \f
3078 /* Determine if a given basic block heads a simple IF-THEN-JOIN or an
3079 IF-THEN-ELSE-JOIN block.
3080
3081 If so, we'll try to convert the insns to not require the branch,
3082 using only transformations that do not require conditional execution.
3083
3084 Return TRUE if we were successful at converting the block. */
3085
3086 static int
3087 noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge,
3088 int pass)
3089 {
3090 basic_block then_bb, else_bb, join_bb;
3091 bool then_else_reversed = false;
3092 rtx_insn *jump;
3093 rtx cond;
3094 rtx_insn *cond_earliest;
3095 struct noce_if_info if_info;
3096
3097 /* We only ever should get here before reload. */
3098 gcc_assert (!reload_completed);
3099
3100 /* Recognize an IF-THEN-ELSE-JOIN block. */
3101 if (single_pred_p (then_edge->dest)
3102 && single_succ_p (then_edge->dest)
3103 && single_pred_p (else_edge->dest)
3104 && single_succ_p (else_edge->dest)
3105 && single_succ (then_edge->dest) == single_succ (else_edge->dest))
3106 {
3107 then_bb = then_edge->dest;
3108 else_bb = else_edge->dest;
3109 join_bb = single_succ (then_bb);
3110 }
3111 /* Recognize an IF-THEN-JOIN block. */
3112 else if (single_pred_p (then_edge->dest)
3113 && single_succ_p (then_edge->dest)
3114 && single_succ (then_edge->dest) == else_edge->dest)
3115 {
3116 then_bb = then_edge->dest;
3117 else_bb = NULL_BLOCK;
3118 join_bb = else_edge->dest;
3119 }
3120 /* Recognize an IF-ELSE-JOIN block. We can have those because the order
3121 of basic blocks in cfglayout mode does not matter, so the fallthrough
3122 edge can go to any basic block (and not just to bb->next_bb, like in
3123 cfgrtl mode). */
3124 else if (single_pred_p (else_edge->dest)
3125 && single_succ_p (else_edge->dest)
3126 && single_succ (else_edge->dest) == then_edge->dest)
3127 {
3128 /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
3129 To make this work, we have to invert the THEN and ELSE blocks
3130 and reverse the jump condition. */
3131 then_bb = else_edge->dest;
3132 else_bb = NULL_BLOCK;
3133 join_bb = single_succ (then_bb);
3134 then_else_reversed = true;
3135 }
3136 else
3137 /* Not a form we can handle. */
3138 return FALSE;
3139
3140 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3141 if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3142 return FALSE;
3143 if (else_bb
3144 && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3145 return FALSE;
3146
3147 num_possible_if_blocks++;
3148
3149 if (dump_file)
3150 {
3151 fprintf (dump_file,
3152 "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
3153 (else_bb) ? "-ELSE" : "",
3154 pass, test_bb->index, then_bb->index);
3155
3156 if (else_bb)
3157 fprintf (dump_file, ", else %d", else_bb->index);
3158
3159 fprintf (dump_file, ", join %d\n", join_bb->index);
3160 }
3161
3162 /* If the conditional jump is more than just a conditional
3163 jump, then we can not do if-conversion on this block. */
3164 jump = BB_END (test_bb);
3165 if (! onlyjump_p (jump))
3166 return FALSE;
3167
3168 /* If this is not a standard conditional jump, we can't parse it. */
3169 cond = noce_get_condition (jump, &cond_earliest, then_else_reversed);
3170 if (!cond)
3171 return FALSE;
3172
3173 /* We must be comparing objects whose modes imply the size. */
3174 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3175 return FALSE;
3176
3177 /* Initialize an IF_INFO struct to pass around. */
3178 memset (&if_info, 0, sizeof if_info);
3179 if_info.test_bb = test_bb;
3180 if_info.then_bb = then_bb;
3181 if_info.else_bb = else_bb;
3182 if_info.join_bb = join_bb;
3183 if_info.cond = cond;
3184 if_info.cond_earliest = cond_earliest;
3185 if_info.jump = jump;
3186 if_info.then_else_reversed = then_else_reversed;
3187 if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
3188 predictable_edge_p (then_edge));
3189
3190 /* Do the real work. */
3191
3192 if (noce_process_if_block (&if_info))
3193 return TRUE;
3194
3195 if (HAVE_conditional_move
3196 && cond_move_process_if_block (&if_info))
3197 return TRUE;
3198
3199 return FALSE;
3200 }
3201 \f
3202
3203 /* Merge the blocks and mark for local life update. */
3204
3205 static void
3206 merge_if_block (struct ce_if_block * ce_info)
3207 {
3208 basic_block test_bb = ce_info->test_bb; /* last test block */
3209 basic_block then_bb = ce_info->then_bb; /* THEN */
3210 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
3211 basic_block join_bb = ce_info->join_bb; /* join block */
3212 basic_block combo_bb;
3213
3214 /* All block merging is done into the lower block numbers. */
3215
3216 combo_bb = test_bb;
3217 df_set_bb_dirty (test_bb);
3218
3219 /* Merge any basic blocks to handle && and || subtests. Each of
3220 the blocks are on the fallthru path from the predecessor block. */
3221 if (ce_info->num_multiple_test_blocks > 0)
3222 {
3223 basic_block bb = test_bb;
3224 basic_block last_test_bb = ce_info->last_test_bb;
3225 basic_block fallthru = block_fallthru (bb);
3226
3227 do
3228 {
3229 bb = fallthru;
3230 fallthru = block_fallthru (bb);
3231 merge_blocks (combo_bb, bb);
3232 num_true_changes++;
3233 }
3234 while (bb != last_test_bb);
3235 }
3236
3237 /* Merge TEST block into THEN block. Normally the THEN block won't have a
3238 label, but it might if there were || tests. That label's count should be
3239 zero, and it normally should be removed. */
3240
3241 if (then_bb)
3242 {
3243 /* If THEN_BB has no successors, then there's a BARRIER after it.
3244 If COMBO_BB has more than one successor (THEN_BB), then that BARRIER
3245 is no longer needed, and in fact it is incorrect to leave it in
3246 the insn stream. */
3247 if (EDGE_COUNT (then_bb->succs) == 0
3248 && EDGE_COUNT (combo_bb->succs) > 1)
3249 {
3250 rtx_insn *end = NEXT_INSN (BB_END (then_bb));
3251 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3252 end = NEXT_INSN (end);
3253
3254 if (end && BARRIER_P (end))
3255 delete_insn (end);
3256 }
3257 merge_blocks (combo_bb, then_bb);
3258 num_true_changes++;
3259 }
3260
3261 /* The ELSE block, if it existed, had a label. That label count
3262 will almost always be zero, but odd things can happen when labels
3263 get their addresses taken. */
3264 if (else_bb)
3265 {
3266 /* If ELSE_BB has no successors, then there's a BARRIER after it.
3267 If COMBO_BB has more than one successor (ELSE_BB), then that BARRIER
3268 is no longer needed, and in fact it is incorrect to leave it in
3269 the insn stream. */
3270 if (EDGE_COUNT (else_bb->succs) == 0
3271 && EDGE_COUNT (combo_bb->succs) > 1)
3272 {
3273 rtx_insn *end = NEXT_INSN (BB_END (else_bb));
3274 while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
3275 end = NEXT_INSN (end);
3276
3277 if (end && BARRIER_P (end))
3278 delete_insn (end);
3279 }
3280 merge_blocks (combo_bb, else_bb);
3281 num_true_changes++;
3282 }
3283
3284 /* If there was no join block reported, that means it was not adjacent
3285 to the others, and so we cannot merge them. */
3286
3287 if (! join_bb)
3288 {
3289 rtx_insn *last = BB_END (combo_bb);
3290
3291 /* The outgoing edge for the current COMBO block should already
3292 be correct. Verify this. */
3293 if (EDGE_COUNT (combo_bb->succs) == 0)
3294 gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
3295 || (NONJUMP_INSN_P (last)
3296 && GET_CODE (PATTERN (last)) == TRAP_IF
3297 && (TRAP_CONDITION (PATTERN (last))
3298 == const_true_rtx)));
3299
3300 else
3301 /* There should still be something at the end of the THEN or ELSE
3302 blocks taking us to our final destination. */
3303 gcc_assert (JUMP_P (last)
3304 || (EDGE_SUCC (combo_bb, 0)->dest
3305 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3306 && CALL_P (last)
3307 && SIBLING_CALL_P (last))
3308 || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
3309 && can_throw_internal (last)));
3310 }
3311
3312 /* The JOIN block may have had quite a number of other predecessors too.
3313 Since we've already merged the TEST, THEN and ELSE blocks, we should
3314 have only one remaining edge from our if-then-else diamond. If there
3315 is more than one remaining edge, it must come from elsewhere. There
3316 may be zero incoming edges if the THEN block didn't actually join
3317 back up (as with a call to a non-return function). */
3318 else if (EDGE_COUNT (join_bb->preds) < 2
3319 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3320 {
3321 /* We can merge the JOIN cleanly and update the dataflow try
3322 again on this pass.*/
3323 merge_blocks (combo_bb, join_bb);
3324 num_true_changes++;
3325 }
3326 else
3327 {
3328 /* We cannot merge the JOIN. */
3329
3330 /* The outgoing edge for the current COMBO block should already
3331 be correct. Verify this. */
3332 gcc_assert (single_succ_p (combo_bb)
3333 && single_succ (combo_bb) == join_bb);
3334
3335 /* Remove the jump and cruft from the end of the COMBO block. */
3336 if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3337 tidy_fallthru_edge (single_succ_edge (combo_bb));
3338 }
3339
3340 num_updated_if_blocks++;
3341 }
3342 \f
3343 /* Find a block ending in a simple IF condition and try to transform it
3344 in some way. When converting a multi-block condition, put the new code
3345 in the first such block and delete the rest. Return a pointer to this
3346 first block if some transformation was done. Return NULL otherwise. */
3347
3348 static basic_block
3349 find_if_header (basic_block test_bb, int pass)
3350 {
3351 ce_if_block ce_info;
3352 edge then_edge;
3353 edge else_edge;
3354
3355 /* The kind of block we're looking for has exactly two successors. */
3356 if (EDGE_COUNT (test_bb->succs) != 2)
3357 return NULL;
3358
3359 then_edge = EDGE_SUCC (test_bb, 0);
3360 else_edge = EDGE_SUCC (test_bb, 1);
3361
3362 if (df_get_bb_dirty (then_edge->dest))
3363 return NULL;
3364 if (df_get_bb_dirty (else_edge->dest))
3365 return NULL;
3366
3367 /* Neither edge should be abnormal. */
3368 if ((then_edge->flags & EDGE_COMPLEX)
3369 || (else_edge->flags & EDGE_COMPLEX))
3370 return NULL;
3371
3372 /* Nor exit the loop. */
3373 if ((then_edge->flags & EDGE_LOOP_EXIT)
3374 || (else_edge->flags & EDGE_LOOP_EXIT))
3375 return NULL;
3376
3377 /* The THEN edge is canonically the one that falls through. */
3378 if (then_edge->flags & EDGE_FALLTHRU)
3379 ;
3380 else if (else_edge->flags & EDGE_FALLTHRU)
3381 {
3382 edge e = else_edge;
3383 else_edge = then_edge;
3384 then_edge = e;
3385 }
3386 else
3387 /* Otherwise this must be a multiway branch of some sort. */
3388 return NULL;
3389
3390 memset (&ce_info, 0, sizeof (ce_info));
3391 ce_info.test_bb = test_bb;
3392 ce_info.then_bb = then_edge->dest;
3393 ce_info.else_bb = else_edge->dest;
3394 ce_info.pass = pass;
3395
3396 #ifdef IFCVT_MACHDEP_INIT
3397 IFCVT_MACHDEP_INIT (&ce_info);
3398 #endif
3399
3400 if (!reload_completed
3401 && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3402 goto success;
3403
3404 if (reload_completed
3405 && targetm.have_conditional_execution ()
3406 && cond_exec_find_if_block (&ce_info))
3407 goto success;
3408
3409 if (HAVE_trap
3410 && optab_handler (ctrap_optab, word_mode) != CODE_FOR_nothing
3411 && find_cond_trap (test_bb, then_edge, else_edge))
3412 goto success;
3413
3414 if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3415 && (reload_completed || !targetm.have_conditional_execution ()))
3416 {
3417 if (find_if_case_1 (test_bb, then_edge, else_edge))
3418 goto success;
3419 if (find_if_case_2 (test_bb, then_edge, else_edge))
3420 goto success;
3421 }
3422
3423 return NULL;
3424
3425 success:
3426 if (dump_file)
3427 fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3428 /* Set this so we continue looking. */
3429 cond_exec_changed_p = TRUE;
3430 return ce_info.test_bb;
3431 }
3432
3433 /* Return true if a block has two edges, one of which falls through to the next
3434 block, and the other jumps to a specific block, so that we can tell if the
3435 block is part of an && test or an || test. Returns either -1 or the number
3436 of non-note, non-jump, non-USE/CLOBBER insns in the block. */
3437
3438 static int
3439 block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3440 {
3441 edge cur_edge;
3442 int fallthru_p = FALSE;
3443 int jump_p = FALSE;
3444 rtx_insn *insn;
3445 rtx_insn *end;
3446 int n_insns = 0;
3447 edge_iterator ei;
3448
3449 if (!cur_bb || !target_bb)
3450 return -1;
3451
3452 /* If no edges, obviously it doesn't jump or fallthru. */
3453 if (EDGE_COUNT (cur_bb->succs) == 0)
3454 return FALSE;
3455
3456 FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3457 {
3458 if (cur_edge->flags & EDGE_COMPLEX)
3459 /* Anything complex isn't what we want. */
3460 return -1;
3461
3462 else if (cur_edge->flags & EDGE_FALLTHRU)
3463 fallthru_p = TRUE;
3464
3465 else if (cur_edge->dest == target_bb)
3466 jump_p = TRUE;
3467
3468 else
3469 return -1;
3470 }
3471
3472 if ((jump_p & fallthru_p) == 0)
3473 return -1;
3474
3475 /* Don't allow calls in the block, since this is used to group && and ||
3476 together for conditional execution support. ??? we should support
3477 conditional execution support across calls for IA-64 some day, but
3478 for now it makes the code simpler. */
3479 end = BB_END (cur_bb);
3480 insn = BB_HEAD (cur_bb);
3481
3482 while (insn != NULL_RTX)
3483 {
3484 if (CALL_P (insn))
3485 return -1;
3486
3487 if (INSN_P (insn)
3488 && !JUMP_P (insn)
3489 && !DEBUG_INSN_P (insn)
3490 && GET_CODE (PATTERN (insn)) != USE
3491 && GET_CODE (PATTERN (insn)) != CLOBBER)
3492 n_insns++;
3493
3494 if (insn == end)
3495 break;
3496
3497 insn = NEXT_INSN (insn);
3498 }
3499
3500 return n_insns;
3501 }
3502
3503 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3504 block. If so, we'll try to convert the insns to not require the branch.
3505 Return TRUE if we were successful at converting the block. */
3506
3507 static int
3508 cond_exec_find_if_block (struct ce_if_block * ce_info)
3509 {
3510 basic_block test_bb = ce_info->test_bb;
3511 basic_block then_bb = ce_info->then_bb;
3512 basic_block else_bb = ce_info->else_bb;
3513 basic_block join_bb = NULL_BLOCK;
3514 edge cur_edge;
3515 basic_block next;
3516 edge_iterator ei;
3517
3518 ce_info->last_test_bb = test_bb;
3519
3520 /* We only ever should get here after reload,
3521 and if we have conditional execution. */
3522 gcc_assert (reload_completed && targetm.have_conditional_execution ());
3523
3524 /* Discover if any fall through predecessors of the current test basic block
3525 were && tests (which jump to the else block) or || tests (which jump to
3526 the then block). */
3527 if (single_pred_p (test_bb)
3528 && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3529 {
3530 basic_block bb = single_pred (test_bb);
3531 basic_block target_bb;
3532 int max_insns = MAX_CONDITIONAL_EXECUTE;
3533 int n_insns;
3534
3535 /* Determine if the preceding block is an && or || block. */
3536 if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3537 {
3538 ce_info->and_and_p = TRUE;
3539 target_bb = else_bb;
3540 }
3541 else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3542 {
3543 ce_info->and_and_p = FALSE;
3544 target_bb = then_bb;
3545 }
3546 else
3547 target_bb = NULL_BLOCK;
3548
3549 if (target_bb && n_insns <= max_insns)
3550 {
3551 int total_insns = 0;
3552 int blocks = 0;
3553
3554 ce_info->last_test_bb = test_bb;
3555
3556 /* Found at least one && or || block, look for more. */
3557 do
3558 {
3559 ce_info->test_bb = test_bb = bb;
3560 total_insns += n_insns;
3561 blocks++;
3562
3563 if (!single_pred_p (bb))
3564 break;
3565
3566 bb = single_pred (bb);
3567 n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3568 }
3569 while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3570
3571 ce_info->num_multiple_test_blocks = blocks;
3572 ce_info->num_multiple_test_insns = total_insns;
3573
3574 if (ce_info->and_and_p)
3575 ce_info->num_and_and_blocks = blocks;
3576 else
3577 ce_info->num_or_or_blocks = blocks;
3578 }
3579 }
3580
3581 /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3582 other than any || blocks which jump to the THEN block. */
3583 if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3584 return FALSE;
3585
3586 /* The edges of the THEN and ELSE blocks cannot have complex edges. */
3587 FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3588 {
3589 if (cur_edge->flags & EDGE_COMPLEX)
3590 return FALSE;
3591 }
3592
3593 FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3594 {
3595 if (cur_edge->flags & EDGE_COMPLEX)
3596 return FALSE;
3597 }
3598
3599 /* The THEN block of an IF-THEN combo must have zero or one successors. */
3600 if (EDGE_COUNT (then_bb->succs) > 0
3601 && (!single_succ_p (then_bb)
3602 || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3603 || (epilogue_completed
3604 && tablejump_p (BB_END (then_bb), NULL, NULL))))
3605 return FALSE;
3606
3607 /* If the THEN block has no successors, conditional execution can still
3608 make a conditional call. Don't do this unless the ELSE block has
3609 only one incoming edge -- the CFG manipulation is too ugly otherwise.
3610 Check for the last insn of the THEN block being an indirect jump, which
3611 is listed as not having any successors, but confuses the rest of the CE
3612 code processing. ??? we should fix this in the future. */
3613 if (EDGE_COUNT (then_bb->succs) == 0)
3614 {
3615 if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3616 {
3617 rtx_insn *last_insn = BB_END (then_bb);
3618
3619 while (last_insn
3620 && NOTE_P (last_insn)
3621 && last_insn != BB_HEAD (then_bb))
3622 last_insn = PREV_INSN (last_insn);
3623
3624 if (last_insn
3625 && JUMP_P (last_insn)
3626 && ! simplejump_p (last_insn))
3627 return FALSE;
3628
3629 join_bb = else_bb;
3630 else_bb = NULL_BLOCK;
3631 }
3632 else
3633 return FALSE;
3634 }
3635
3636 /* If the THEN block's successor is the other edge out of the TEST block,
3637 then we have an IF-THEN combo without an ELSE. */
3638 else if (single_succ (then_bb) == else_bb)
3639 {
3640 join_bb = else_bb;
3641 else_bb = NULL_BLOCK;
3642 }
3643
3644 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3645 has exactly one predecessor and one successor, and the outgoing edge
3646 is not complex, then we have an IF-THEN-ELSE combo. */
3647 else if (single_succ_p (else_bb)
3648 && single_succ (then_bb) == single_succ (else_bb)
3649 && single_pred_p (else_bb)
3650 && !(single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3651 && !(epilogue_completed
3652 && tablejump_p (BB_END (else_bb), NULL, NULL)))
3653 join_bb = single_succ (else_bb);
3654
3655 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
3656 else
3657 return FALSE;
3658
3659 num_possible_if_blocks++;
3660
3661 if (dump_file)
3662 {
3663 fprintf (dump_file,
3664 "\nIF-THEN%s block found, pass %d, start block %d "
3665 "[insn %d], then %d [%d]",
3666 (else_bb) ? "-ELSE" : "",
3667 ce_info->pass,
3668 test_bb->index,
3669 BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3670 then_bb->index,
3671 BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3672
3673 if (else_bb)
3674 fprintf (dump_file, ", else %d [%d]",
3675 else_bb->index,
3676 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3677
3678 fprintf (dump_file, ", join %d [%d]",
3679 join_bb->index,
3680 BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3681
3682 if (ce_info->num_multiple_test_blocks > 0)
3683 fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3684 ce_info->num_multiple_test_blocks,
3685 (ce_info->and_and_p) ? "&&" : "||",
3686 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3687 ce_info->last_test_bb->index,
3688 ((BB_HEAD (ce_info->last_test_bb))
3689 ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3690 : -1));
3691
3692 fputc ('\n', dump_file);
3693 }
3694
3695 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we get the
3696 first condition for free, since we've already asserted that there's a
3697 fallthru edge from IF to THEN. Likewise for the && and || blocks, since
3698 we checked the FALLTHRU flag, those are already adjacent to the last IF
3699 block. */
3700 /* ??? As an enhancement, move the ELSE block. Have to deal with
3701 BLOCK notes, if by no other means than backing out the merge if they
3702 exist. Sticky enough I don't want to think about it now. */
3703 next = then_bb;
3704 if (else_bb && (next = next->next_bb) != else_bb)
3705 return FALSE;
3706 if ((next = next->next_bb) != join_bb
3707 && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3708 {
3709 if (else_bb)
3710 join_bb = NULL;
3711 else
3712 return FALSE;
3713 }
3714
3715 /* Do the real work. */
3716
3717 ce_info->else_bb = else_bb;
3718 ce_info->join_bb = join_bb;
3719
3720 /* If we have && and || tests, try to first handle combining the && and ||
3721 tests into the conditional code, and if that fails, go back and handle
3722 it without the && and ||, which at present handles the && case if there
3723 was no ELSE block. */
3724 if (cond_exec_process_if_block (ce_info, TRUE))
3725 return TRUE;
3726
3727 if (ce_info->num_multiple_test_blocks)
3728 {
3729 cancel_changes (0);
3730
3731 if (cond_exec_process_if_block (ce_info, FALSE))
3732 return TRUE;
3733 }
3734
3735 return FALSE;
3736 }
3737
3738 /* Convert a branch over a trap, or a branch
3739 to a trap, into a conditional trap. */
3740
3741 static int
3742 find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3743 {
3744 basic_block then_bb = then_edge->dest;
3745 basic_block else_bb = else_edge->dest;
3746 basic_block other_bb, trap_bb;
3747 rtx_insn *trap, *jump;
3748 rtx cond, seq;
3749 rtx_insn *cond_earliest;
3750 enum rtx_code code;
3751
3752 /* Locate the block with the trap instruction. */
3753 /* ??? While we look for no successors, we really ought to allow
3754 EH successors. Need to fix merge_if_block for that to work. */
3755 if ((trap = block_has_only_trap (then_bb)) != NULL)
3756 trap_bb = then_bb, other_bb = else_bb;
3757 else if ((trap = block_has_only_trap (else_bb)) != NULL)
3758 trap_bb = else_bb, other_bb = then_bb;
3759 else
3760 return FALSE;
3761
3762 if (dump_file)
3763 {
3764 fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3765 test_bb->index, trap_bb->index);
3766 }
3767
3768 /* If this is not a standard conditional jump, we can't parse it. */
3769 jump = BB_END (test_bb);
3770 cond = noce_get_condition (jump, &cond_earliest, false);
3771 if (! cond)
3772 return FALSE;
3773
3774 /* If the conditional jump is more than just a conditional jump, then
3775 we can not do if-conversion on this block. */
3776 if (! onlyjump_p (jump))
3777 return FALSE;
3778
3779 /* We must be comparing objects whose modes imply the size. */
3780 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3781 return FALSE;
3782
3783 /* Reverse the comparison code, if necessary. */
3784 code = GET_CODE (cond);
3785 if (then_bb == trap_bb)
3786 {
3787 code = reversed_comparison_code (cond, jump);
3788 if (code == UNKNOWN)
3789 return FALSE;
3790 }
3791
3792 /* Attempt to generate the conditional trap. */
3793 seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3794 copy_rtx (XEXP (cond, 1)),
3795 TRAP_CODE (PATTERN (trap)));
3796 if (seq == NULL)
3797 return FALSE;
3798
3799 /* Emit the new insns before cond_earliest. */
3800 emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATION (trap));
3801
3802 /* Delete the trap block if possible. */
3803 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3804 df_set_bb_dirty (test_bb);
3805 df_set_bb_dirty (then_bb);
3806 df_set_bb_dirty (else_bb);
3807
3808 if (EDGE_COUNT (trap_bb->preds) == 0)
3809 {
3810 delete_basic_block (trap_bb);
3811 num_true_changes++;
3812 }
3813
3814 /* Wire together the blocks again. */
3815 if (current_ir_type () == IR_RTL_CFGLAYOUT)
3816 single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3817 else if (trap_bb == then_bb)
3818 {
3819 rtx lab;
3820 rtx_insn *newjump;
3821
3822 lab = JUMP_LABEL (jump);
3823 newjump = emit_jump_insn_after (gen_jump (lab), jump);
3824 LABEL_NUSES (lab) += 1;
3825 JUMP_LABEL (newjump) = lab;
3826 emit_barrier_after (newjump);
3827 }
3828 delete_insn (jump);
3829
3830 if (can_merge_blocks_p (test_bb, other_bb))
3831 {
3832 merge_blocks (test_bb, other_bb);
3833 num_true_changes++;
3834 }
3835
3836 num_updated_if_blocks++;
3837 return TRUE;
3838 }
3839
3840 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
3841 return it. */
3842
3843 static rtx_insn *
3844 block_has_only_trap (basic_block bb)
3845 {
3846 rtx_insn *trap;
3847
3848 /* We're not the exit block. */
3849 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3850 return NULL;
3851
3852 /* The block must have no successors. */
3853 if (EDGE_COUNT (bb->succs) > 0)
3854 return NULL;
3855
3856 /* The only instruction in the THEN block must be the trap. */
3857 trap = first_active_insn (bb);
3858 if (! (trap == BB_END (bb)
3859 && GET_CODE (PATTERN (trap)) == TRAP_IF
3860 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3861 return NULL;
3862
3863 return trap;
3864 }
3865
3866 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3867 transformable, but not necessarily the other. There need be no
3868 JOIN block.
3869
3870 Return TRUE if we were successful at converting the block.
3871
3872 Cases we'd like to look at:
3873
3874 (1)
3875 if (test) goto over; // x not live
3876 x = a;
3877 goto label;
3878 over:
3879
3880 becomes
3881
3882 x = a;
3883 if (! test) goto label;
3884
3885 (2)
3886 if (test) goto E; // x not live
3887 x = big();
3888 goto L;
3889 E:
3890 x = b;
3891 goto M;
3892
3893 becomes
3894
3895 x = b;
3896 if (test) goto M;
3897 x = big();
3898 goto L;
3899
3900 (3) // This one's really only interesting for targets that can do
3901 // multiway branching, e.g. IA-64 BBB bundles. For other targets
3902 // it results in multiple branches on a cache line, which often
3903 // does not sit well with predictors.
3904
3905 if (test1) goto E; // predicted not taken
3906 x = a;
3907 if (test2) goto F;
3908 ...
3909 E:
3910 x = b;
3911 J:
3912
3913 becomes
3914
3915 x = a;
3916 if (test1) goto E;
3917 if (test2) goto F;
3918
3919 Notes:
3920
3921 (A) Don't do (2) if the branch is predicted against the block we're
3922 eliminating. Do it anyway if we can eliminate a branch; this requires
3923 that the sole successor of the eliminated block postdominate the other
3924 side of the if.
3925
3926 (B) With CE, on (3) we can steal from both sides of the if, creating
3927
3928 if (test1) x = a;
3929 if (!test1) x = b;
3930 if (test1) goto J;
3931 if (test2) goto F;
3932 ...
3933 J:
3934
3935 Again, this is most useful if J postdominates.
3936
3937 (C) CE substitutes for helpful life information.
3938
3939 (D) These heuristics need a lot of work. */
3940
3941 /* Tests for case 1 above. */
3942
3943 static int
3944 find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3945 {
3946 basic_block then_bb = then_edge->dest;
3947 basic_block else_bb = else_edge->dest;
3948 basic_block new_bb;
3949 int then_bb_index, then_prob;
3950 rtx else_target = NULL_RTX;
3951
3952 /* If we are partitioning hot/cold basic blocks, we don't want to
3953 mess up unconditional or indirect jumps that cross between hot
3954 and cold sections.
3955
3956 Basic block partitioning may result in some jumps that appear to
3957 be optimizable (or blocks that appear to be mergeable), but which really
3958 must be left untouched (they are required to make it safely across
3959 partition boundaries). See the comments at the top of
3960 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3961
3962 if ((BB_END (then_bb)
3963 && JUMP_P (BB_END (then_bb))
3964 && CROSSING_JUMP_P (BB_END (then_bb)))
3965 || (BB_END (test_bb)
3966 && JUMP_P (BB_END (test_bb))
3967 && CROSSING_JUMP_P (BB_END (test_bb)))
3968 || (BB_END (else_bb)
3969 && JUMP_P (BB_END (else_bb))
3970 && CROSSING_JUMP_P (BB_END (else_bb))))
3971 return FALSE;
3972
3973 /* THEN has one successor. */
3974 if (!single_succ_p (then_bb))
3975 return FALSE;
3976
3977 /* THEN does not fall through, but is not strange either. */
3978 if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3979 return FALSE;
3980
3981 /* THEN has one predecessor. */
3982 if (!single_pred_p (then_bb))
3983 return FALSE;
3984
3985 /* THEN must do something. */
3986 if (forwarder_block_p (then_bb))
3987 return FALSE;
3988
3989 num_possible_if_blocks++;
3990 if (dump_file)
3991 fprintf (dump_file,
3992 "\nIF-CASE-1 found, start %d, then %d\n",
3993 test_bb->index, then_bb->index);
3994
3995 if (then_edge->probability)
3996 then_prob = REG_BR_PROB_BASE - then_edge->probability;
3997 else
3998 then_prob = REG_BR_PROB_BASE / 2;
3999
4000 /* We're speculating from the THEN path, we want to make sure the cost
4001 of speculation is within reason. */
4002 if (! cheap_bb_rtx_cost_p (then_bb, then_prob,
4003 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
4004 predictable_edge_p (then_edge)))))
4005 return FALSE;
4006
4007 if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4008 {
4009 rtx_insn *jump = BB_END (else_edge->src);
4010 gcc_assert (JUMP_P (jump));
4011 else_target = JUMP_LABEL (jump);
4012 }
4013
4014 /* Registers set are dead, or are predicable. */
4015 if (! dead_or_predicable (test_bb, then_bb, else_bb,
4016 single_succ_edge (then_bb), 1))
4017 return FALSE;
4018
4019 /* Conversion went ok, including moving the insns and fixing up the
4020 jump. Adjust the CFG to match. */
4021
4022 /* We can avoid creating a new basic block if then_bb is immediately
4023 followed by else_bb, i.e. deleting then_bb allows test_bb to fall
4024 through to else_bb. */
4025
4026 if (then_bb->next_bb == else_bb
4027 && then_bb->prev_bb == test_bb
4028 && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4029 {
4030 redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
4031 new_bb = 0;
4032 }
4033 else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4034 new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
4035 else_bb, else_target);
4036 else
4037 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
4038 else_bb);
4039
4040 df_set_bb_dirty (test_bb);
4041 df_set_bb_dirty (else_bb);
4042
4043 then_bb_index = then_bb->index;
4044 delete_basic_block (then_bb);
4045
4046 /* Make rest of code believe that the newly created block is the THEN_BB
4047 block we removed. */
4048 if (new_bb)
4049 {
4050 df_bb_replace (then_bb_index, new_bb);
4051 /* This should have been done above via force_nonfallthru_and_redirect
4052 (possibly called from redirect_edge_and_branch_force). */
4053 gcc_checking_assert (BB_PARTITION (new_bb) == BB_PARTITION (test_bb));
4054 }
4055
4056 num_true_changes++;
4057 num_updated_if_blocks++;
4058
4059 return TRUE;
4060 }
4061
4062 /* Test for case 2 above. */
4063
4064 static int
4065 find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
4066 {
4067 basic_block then_bb = then_edge->dest;
4068 basic_block else_bb = else_edge->dest;
4069 edge else_succ;
4070 int then_prob, else_prob;
4071
4072 /* We do not want to speculate (empty) loop latches. */
4073 if (current_loops
4074 && else_bb->loop_father->latch == else_bb)
4075 return FALSE;
4076
4077 /* If we are partitioning hot/cold basic blocks, we don't want to
4078 mess up unconditional or indirect jumps that cross between hot
4079 and cold sections.
4080
4081 Basic block partitioning may result in some jumps that appear to
4082 be optimizable (or blocks that appear to be mergeable), but which really
4083 must be left untouched (they are required to make it safely across
4084 partition boundaries). See the comments at the top of
4085 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4086
4087 if ((BB_END (then_bb)
4088 && JUMP_P (BB_END (then_bb))
4089 && CROSSING_JUMP_P (BB_END (then_bb)))
4090 || (BB_END (test_bb)
4091 && JUMP_P (BB_END (test_bb))
4092 && CROSSING_JUMP_P (BB_END (test_bb)))
4093 || (BB_END (else_bb)
4094 && JUMP_P (BB_END (else_bb))
4095 && CROSSING_JUMP_P (BB_END (else_bb))))
4096 return FALSE;
4097
4098 /* ELSE has one successor. */
4099 if (!single_succ_p (else_bb))
4100 return FALSE;
4101 else
4102 else_succ = single_succ_edge (else_bb);
4103
4104 /* ELSE outgoing edge is not complex. */
4105 if (else_succ->flags & EDGE_COMPLEX)
4106 return FALSE;
4107
4108 /* ELSE has one predecessor. */
4109 if (!single_pred_p (else_bb))
4110 return FALSE;
4111
4112 /* THEN is not EXIT. */
4113 if (then_bb->index < NUM_FIXED_BLOCKS)
4114 return FALSE;
4115
4116 if (else_edge->probability)
4117 {
4118 else_prob = else_edge->probability;
4119 then_prob = REG_BR_PROB_BASE - else_prob;
4120 }
4121 else
4122 {
4123 else_prob = REG_BR_PROB_BASE / 2;
4124 then_prob = REG_BR_PROB_BASE / 2;
4125 }
4126
4127 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
4128 if (else_prob > then_prob)
4129 ;
4130 else if (else_succ->dest->index < NUM_FIXED_BLOCKS
4131 || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
4132 else_succ->dest))
4133 ;
4134 else
4135 return FALSE;
4136
4137 num_possible_if_blocks++;
4138 if (dump_file)
4139 fprintf (dump_file,
4140 "\nIF-CASE-2 found, start %d, else %d\n",
4141 test_bb->index, else_bb->index);
4142
4143 /* We're speculating from the ELSE path, we want to make sure the cost
4144 of speculation is within reason. */
4145 if (! cheap_bb_rtx_cost_p (else_bb, else_prob,
4146 COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
4147 predictable_edge_p (else_edge)))))
4148 return FALSE;
4149
4150 /* Registers set are dead, or are predicable. */
4151 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0))
4152 return FALSE;
4153
4154 /* Conversion went ok, including moving the insns and fixing up the
4155 jump. Adjust the CFG to match. */
4156
4157 df_set_bb_dirty (test_bb);
4158 df_set_bb_dirty (then_bb);
4159 delete_basic_block (else_bb);
4160
4161 num_true_changes++;
4162 num_updated_if_blocks++;
4163
4164 /* ??? We may now fallthru from one of THEN's successors into a join
4165 block. Rerun cleanup_cfg? Examine things manually? Wait? */
4166
4167 return TRUE;
4168 }
4169
4170 /* Used by the code above to perform the actual rtl transformations.
4171 Return TRUE if successful.
4172
4173 TEST_BB is the block containing the conditional branch. MERGE_BB
4174 is the block containing the code to manipulate. DEST_EDGE is an
4175 edge representing a jump to the join block; after the conversion,
4176 TEST_BB should be branching to its destination.
4177 REVERSEP is true if the sense of the branch should be reversed. */
4178
4179 static int
4180 dead_or_predicable (basic_block test_bb, basic_block merge_bb,
4181 basic_block other_bb, edge dest_edge, int reversep)
4182 {
4183 basic_block new_dest = dest_edge->dest;
4184 rtx_insn *head, *end, *jump;
4185 rtx_insn *earliest = NULL;
4186 rtx old_dest;
4187 bitmap merge_set = NULL;
4188 /* Number of pending changes. */
4189 int n_validated_changes = 0;
4190 rtx new_dest_label = NULL_RTX;
4191
4192 jump = BB_END (test_bb);
4193
4194 /* Find the extent of the real code in the merge block. */
4195 head = BB_HEAD (merge_bb);
4196 end = BB_END (merge_bb);
4197
4198 while (DEBUG_INSN_P (end) && end != head)
4199 end = PREV_INSN (end);
4200
4201 /* If merge_bb ends with a tablejump, predicating/moving insn's
4202 into test_bb and then deleting merge_bb will result in the jumptable
4203 that follows merge_bb being removed along with merge_bb and then we
4204 get an unresolved reference to the jumptable. */
4205 if (tablejump_p (end, NULL, NULL))
4206 return FALSE;
4207
4208 if (LABEL_P (head))
4209 head = NEXT_INSN (head);
4210 while (DEBUG_INSN_P (head) && head != end)
4211 head = NEXT_INSN (head);
4212 if (NOTE_P (head))
4213 {
4214 if (head == end)
4215 {
4216 head = end = NULL;
4217 goto no_body;
4218 }
4219 head = NEXT_INSN (head);
4220 while (DEBUG_INSN_P (head) && head != end)
4221 head = NEXT_INSN (head);
4222 }
4223
4224 if (JUMP_P (end))
4225 {
4226 if (!onlyjump_p (end))
4227 return FALSE;
4228 if (head == end)
4229 {
4230 head = end = NULL;
4231 goto no_body;
4232 }
4233 end = PREV_INSN (end);
4234 while (DEBUG_INSN_P (end) && end != head)
4235 end = PREV_INSN (end);
4236 }
4237
4238 /* Don't move frame-related insn across the conditional branch. This
4239 can lead to one of the paths of the branch having wrong unwind info. */
4240 if (epilogue_completed)
4241 {
4242 rtx_insn *insn = head;
4243 while (1)
4244 {
4245 if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
4246 return FALSE;
4247 if (insn == end)
4248 break;
4249 insn = NEXT_INSN (insn);
4250 }
4251 }
4252
4253 /* Disable handling dead code by conditional execution if the machine needs
4254 to do anything funny with the tests, etc. */
4255 #ifndef IFCVT_MODIFY_TESTS
4256 if (targetm.have_conditional_execution ())
4257 {
4258 /* In the conditional execution case, we have things easy. We know
4259 the condition is reversible. We don't have to check life info
4260 because we're going to conditionally execute the code anyway.
4261 All that's left is making sure the insns involved can actually
4262 be predicated. */
4263
4264 rtx cond;
4265
4266 cond = cond_exec_get_condition (jump);
4267 if (! cond)
4268 return FALSE;
4269
4270 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
4271 int prob_val = (note ? XINT (note, 0) : -1);
4272
4273 if (reversep)
4274 {
4275 enum rtx_code rev = reversed_comparison_code (cond, jump);
4276 if (rev == UNKNOWN)
4277 return FALSE;
4278 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
4279 XEXP (cond, 1));
4280 if (prob_val >= 0)
4281 prob_val = REG_BR_PROB_BASE - prob_val;
4282 }
4283
4284 if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
4285 && verify_changes (0))
4286 n_validated_changes = num_validated_changes ();
4287 else
4288 cancel_changes (0);
4289
4290 earliest = jump;
4291 }
4292 #endif
4293
4294 /* If we allocated new pseudos (e.g. in the conditional move
4295 expander called from noce_emit_cmove), we must resize the
4296 array first. */
4297 if (max_regno < max_reg_num ())
4298 max_regno = max_reg_num ();
4299
4300 /* Try the NCE path if the CE path did not result in any changes. */
4301 if (n_validated_changes == 0)
4302 {
4303 rtx cond;
4304 rtx_insn *insn;
4305 regset live;
4306 bool success;
4307
4308 /* In the non-conditional execution case, we have to verify that there
4309 are no trapping operations, no calls, no references to memory, and
4310 that any registers modified are dead at the branch site. */
4311
4312 if (!any_condjump_p (jump))
4313 return FALSE;
4314
4315 /* Find the extent of the conditional. */
4316 cond = noce_get_condition (jump, &earliest, false);
4317 if (!cond)
4318 return FALSE;
4319
4320 live = BITMAP_ALLOC (&reg_obstack);
4321 simulate_backwards_to_point (merge_bb, live, end);
4322 success = can_move_insns_across (head, end, earliest, jump,
4323 merge_bb, live,
4324 df_get_live_in (other_bb), NULL);
4325 BITMAP_FREE (live);
4326 if (!success)
4327 return FALSE;
4328
4329 /* Collect the set of registers set in MERGE_BB. */
4330 merge_set = BITMAP_ALLOC (&reg_obstack);
4331
4332 FOR_BB_INSNS (merge_bb, insn)
4333 if (NONDEBUG_INSN_P (insn))
4334 df_simulate_find_defs (insn, merge_set);
4335
4336 /* If shrink-wrapping, disable this optimization when test_bb is
4337 the first basic block and merge_bb exits. The idea is to not
4338 move code setting up a return register as that may clobber a
4339 register used to pass function parameters, which then must be
4340 saved in caller-saved regs. A caller-saved reg requires the
4341 prologue, killing a shrink-wrap opportunity. */
4342 if ((SHRINK_WRAPPING_ENABLED && !epilogue_completed)
4343 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
4344 && single_succ_p (new_dest)
4345 && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
4346 && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
4347 {
4348 regset return_regs;
4349 unsigned int i;
4350
4351 return_regs = BITMAP_ALLOC (&reg_obstack);
4352
4353 /* Start off with the intersection of regs used to pass
4354 params and regs used to return values. */
4355 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4356 if (FUNCTION_ARG_REGNO_P (i)
4357 && targetm.calls.function_value_regno_p (i))
4358 bitmap_set_bit (return_regs, INCOMING_REGNO (i));
4359
4360 bitmap_and_into (return_regs,
4361 df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4362 bitmap_and_into (return_regs,
4363 df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
4364 if (!bitmap_empty_p (return_regs))
4365 {
4366 FOR_BB_INSNS_REVERSE (new_dest, insn)
4367 if (NONDEBUG_INSN_P (insn))
4368 {
4369 df_ref def;
4370
4371 /* If this insn sets any reg in return_regs, add all
4372 reg uses to the set of regs we're interested in. */
4373 FOR_EACH_INSN_DEF (def, insn)
4374 if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
4375 {
4376 df_simulate_uses (insn, return_regs);
4377 break;
4378 }
4379 }
4380 if (bitmap_intersect_p (merge_set, return_regs))
4381 {
4382 BITMAP_FREE (return_regs);
4383 BITMAP_FREE (merge_set);
4384 return FALSE;
4385 }
4386 }
4387 BITMAP_FREE (return_regs);
4388 }
4389 }
4390
4391 no_body:
4392 /* We don't want to use normal invert_jump or redirect_jump because
4393 we don't want to delete_insn called. Also, we want to do our own
4394 change group management. */
4395
4396 old_dest = JUMP_LABEL (jump);
4397 if (other_bb != new_dest)
4398 {
4399 if (!any_condjump_p (jump))
4400 goto cancel;
4401
4402 if (JUMP_P (BB_END (dest_edge->src)))
4403 new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
4404 else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4405 new_dest_label = ret_rtx;
4406 else
4407 new_dest_label = block_label (new_dest);
4408
4409 if (reversep
4410 ? ! invert_jump_1 (jump, new_dest_label)
4411 : ! redirect_jump_1 (jump, new_dest_label))
4412 goto cancel;
4413 }
4414
4415 if (verify_changes (n_validated_changes))
4416 confirm_change_group ();
4417 else
4418 goto cancel;
4419
4420 if (other_bb != new_dest)
4421 {
4422 redirect_jump_2 (jump, old_dest, new_dest_label, 0, reversep);
4423
4424 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4425 if (reversep)
4426 {
4427 gcov_type count, probability;
4428 count = BRANCH_EDGE (test_bb)->count;
4429 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
4430 FALLTHRU_EDGE (test_bb)->count = count;
4431 probability = BRANCH_EDGE (test_bb)->probability;
4432 BRANCH_EDGE (test_bb)->probability
4433 = FALLTHRU_EDGE (test_bb)->probability;
4434 FALLTHRU_EDGE (test_bb)->probability = probability;
4435 update_br_prob_note (test_bb);
4436 }
4437 }
4438
4439 /* Move the insns out of MERGE_BB to before the branch. */
4440 if (head != NULL)
4441 {
4442 rtx_insn *insn;
4443
4444 if (end == BB_END (merge_bb))
4445 BB_END (merge_bb) = PREV_INSN (head);
4446
4447 /* PR 21767: when moving insns above a conditional branch, the REG_EQUAL
4448 notes being moved might become invalid. */
4449 insn = head;
4450 do
4451 {
4452 rtx note;
4453
4454 if (! INSN_P (insn))
4455 continue;
4456 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4457 if (! note)
4458 continue;
4459 remove_note (insn, note);
4460 } while (insn != end && (insn = NEXT_INSN (insn)));
4461
4462 /* PR46315: when moving insns above a conditional branch, the REG_EQUAL
4463 notes referring to the registers being set might become invalid. */
4464 if (merge_set)
4465 {
4466 unsigned i;
4467 bitmap_iterator bi;
4468
4469 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4470 remove_reg_equal_equiv_notes_for_regno (i);
4471
4472 BITMAP_FREE (merge_set);
4473 }
4474
4475 reorder_insns (head, end, PREV_INSN (earliest));
4476 }
4477
4478 /* Remove the jump and edge if we can. */
4479 if (other_bb == new_dest)
4480 {
4481 delete_insn (jump);
4482 remove_edge (BRANCH_EDGE (test_bb));
4483 /* ??? Can't merge blocks here, as then_bb is still in use.
4484 At minimum, the merge will get done just before bb-reorder. */
4485 }
4486
4487 return TRUE;
4488
4489 cancel:
4490 cancel_changes (0);
4491
4492 if (merge_set)
4493 BITMAP_FREE (merge_set);
4494
4495 return FALSE;
4496 }
4497 \f
4498 /* Main entry point for all if-conversion. AFTER_COMBINE is true if
4499 we are after combine pass. */
4500
4501 static void
4502 if_convert (bool after_combine)
4503 {
4504 basic_block bb;
4505 int pass;
4506
4507 if (optimize == 1)
4508 {
4509 df_live_add_problem ();
4510 df_live_set_all_dirty ();
4511 }
4512
4513 /* Record whether we are after combine pass. */
4514 ifcvt_after_combine = after_combine;
4515 num_possible_if_blocks = 0;
4516 num_updated_if_blocks = 0;
4517 num_true_changes = 0;
4518
4519 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4520 mark_loop_exit_edges ();
4521 loop_optimizer_finalize ();
4522 free_dominance_info (CDI_DOMINATORS);
4523
4524 /* Compute postdominators. */
4525 calculate_dominance_info (CDI_POST_DOMINATORS);
4526
4527 df_set_flags (DF_LR_RUN_DCE);
4528
4529 /* Go through each of the basic blocks looking for things to convert. If we
4530 have conditional execution, we make multiple passes to allow us to handle
4531 IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks. */
4532 pass = 0;
4533 do
4534 {
4535 df_analyze ();
4536 /* Only need to do dce on the first pass. */
4537 df_clear_flags (DF_LR_RUN_DCE);
4538 cond_exec_changed_p = FALSE;
4539 pass++;
4540
4541 #ifdef IFCVT_MULTIPLE_DUMPS
4542 if (dump_file && pass > 1)
4543 fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4544 #endif
4545
4546 FOR_EACH_BB_FN (bb, cfun)
4547 {
4548 basic_block new_bb;
4549 while (!df_get_bb_dirty (bb)
4550 && (new_bb = find_if_header (bb, pass)) != NULL)
4551 bb = new_bb;
4552 }
4553
4554 #ifdef IFCVT_MULTIPLE_DUMPS
4555 if (dump_file && cond_exec_changed_p)
4556 print_rtl_with_bb (dump_file, get_insns (), dump_flags);
4557 #endif
4558 }
4559 while (cond_exec_changed_p);
4560
4561 #ifdef IFCVT_MULTIPLE_DUMPS
4562 if (dump_file)
4563 fprintf (dump_file, "\n\n========== no more changes\n");
4564 #endif
4565
4566 free_dominance_info (CDI_POST_DOMINATORS);
4567
4568 if (dump_file)
4569 fflush (dump_file);
4570
4571 clear_aux_for_blocks ();
4572
4573 /* If we allocated new pseudos, we must resize the array for sched1. */
4574 if (max_regno < max_reg_num ())
4575 max_regno = max_reg_num ();
4576
4577 /* Write the final stats. */
4578 if (dump_file && num_possible_if_blocks > 0)
4579 {
4580 fprintf (dump_file,
4581 "\n%d possible IF blocks searched.\n",
4582 num_possible_if_blocks);
4583 fprintf (dump_file,
4584 "%d IF blocks converted.\n",
4585 num_updated_if_blocks);
4586 fprintf (dump_file,
4587 "%d true changes made.\n\n\n",
4588 num_true_changes);
4589 }
4590
4591 if (optimize == 1)
4592 df_remove_problem (df_live);
4593
4594 #ifdef ENABLE_CHECKING
4595 verify_flow_info ();
4596 #endif
4597 }
4598 \f
4599 /* If-conversion and CFG cleanup. */
4600 static unsigned int
4601 rest_of_handle_if_conversion (void)
4602 {
4603 if (flag_if_conversion)
4604 {
4605 if (dump_file)
4606 {
4607 dump_reg_info (dump_file);
4608 dump_flow_info (dump_file, dump_flags);
4609 }
4610 cleanup_cfg (CLEANUP_EXPENSIVE);
4611 if_convert (false);
4612 }
4613
4614 cleanup_cfg (0);
4615 return 0;
4616 }
4617
4618 namespace {
4619
4620 const pass_data pass_data_rtl_ifcvt =
4621 {
4622 RTL_PASS, /* type */
4623 "ce1", /* name */
4624 OPTGROUP_NONE, /* optinfo_flags */
4625 TV_IFCVT, /* tv_id */
4626 0, /* properties_required */
4627 0, /* properties_provided */
4628 0, /* properties_destroyed */
4629 0, /* todo_flags_start */
4630 TODO_df_finish, /* todo_flags_finish */
4631 };
4632
4633 class pass_rtl_ifcvt : public rtl_opt_pass
4634 {
4635 public:
4636 pass_rtl_ifcvt (gcc::context *ctxt)
4637 : rtl_opt_pass (pass_data_rtl_ifcvt, ctxt)
4638 {}
4639
4640 /* opt_pass methods: */
4641 virtual bool gate (function *)
4642 {
4643 return (optimize > 0) && dbg_cnt (if_conversion);
4644 }
4645
4646 virtual unsigned int execute (function *)
4647 {
4648 return rest_of_handle_if_conversion ();
4649 }
4650
4651 }; // class pass_rtl_ifcvt
4652
4653 } // anon namespace
4654
4655 rtl_opt_pass *
4656 make_pass_rtl_ifcvt (gcc::context *ctxt)
4657 {
4658 return new pass_rtl_ifcvt (ctxt);
4659 }
4660
4661
4662 /* Rerun if-conversion, as combine may have simplified things enough
4663 to now meet sequence length restrictions. */
4664
4665 namespace {
4666
4667 const pass_data pass_data_if_after_combine =
4668 {
4669 RTL_PASS, /* type */
4670 "ce2", /* name */
4671 OPTGROUP_NONE, /* optinfo_flags */
4672 TV_IFCVT, /* tv_id */
4673 0, /* properties_required */
4674 0, /* properties_provided */
4675 0, /* properties_destroyed */
4676 0, /* todo_flags_start */
4677 TODO_df_finish, /* todo_flags_finish */
4678 };
4679
4680 class pass_if_after_combine : public rtl_opt_pass
4681 {
4682 public:
4683 pass_if_after_combine (gcc::context *ctxt)
4684 : rtl_opt_pass (pass_data_if_after_combine, ctxt)
4685 {}
4686
4687 /* opt_pass methods: */
4688 virtual bool gate (function *)
4689 {
4690 return optimize > 0 && flag_if_conversion
4691 && dbg_cnt (if_after_combine);
4692 }
4693
4694 virtual unsigned int execute (function *)
4695 {
4696 if_convert (true);
4697 return 0;
4698 }
4699
4700 }; // class pass_if_after_combine
4701
4702 } // anon namespace
4703
4704 rtl_opt_pass *
4705 make_pass_if_after_combine (gcc::context *ctxt)
4706 {
4707 return new pass_if_after_combine (ctxt);
4708 }
4709
4710
4711 namespace {
4712
4713 const pass_data pass_data_if_after_reload =
4714 {
4715 RTL_PASS, /* type */
4716 "ce3", /* name */
4717 OPTGROUP_NONE, /* optinfo_flags */
4718 TV_IFCVT2, /* tv_id */
4719 0, /* properties_required */
4720 0, /* properties_provided */
4721 0, /* properties_destroyed */
4722 0, /* todo_flags_start */
4723 TODO_df_finish, /* todo_flags_finish */
4724 };
4725
4726 class pass_if_after_reload : public rtl_opt_pass
4727 {
4728 public:
4729 pass_if_after_reload (gcc::context *ctxt)
4730 : rtl_opt_pass (pass_data_if_after_reload, ctxt)
4731 {}
4732
4733 /* opt_pass methods: */
4734 virtual bool gate (function *)
4735 {
4736 return optimize > 0 && flag_if_conversion2
4737 && dbg_cnt (if_after_reload);
4738 }
4739
4740 virtual unsigned int execute (function *)
4741 {
4742 if_convert (true);
4743 return 0;
4744 }
4745
4746 }; // class pass_if_after_reload
4747
4748 } // anon namespace
4749
4750 rtl_opt_pass *
4751 make_pass_if_after_reload (gcc::context *ctxt)
4752 {
4753 return new pass_if_after_reload (ctxt);
4754 }