except.c (emit_note_eh_region_end): New helper function.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This is the pathetic reminder of old fame of the jump-optimization pass
21 of the compiler. Now it contains basically a set of utility functions to
22 operate with jumps.
23
24 Each CODE_LABEL has a count of the times it is used
25 stored in the LABEL_NUSES internal field, and each JUMP_INSN
26 has one label that it refers to stored in the
27 JUMP_LABEL internal field. With this we can detect labels that
28 become unused because of the deletion of all the jumps that
29 formerly used them. The JUMP_LABEL info is sometimes looked
30 at by later passes. For return insns, it contains either a
31 RETURN or a SIMPLE_RETURN rtx.
32
33 The subroutines redirect_jump and invert_jump are used
34 from other passes as well. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "hard-reg-set.h"
44 #include "regs.h"
45 #include "insn-config.h"
46 #include "insn-attr.h"
47 #include "recog.h"
48 #include "function.h"
49 #include "basic-block.h"
50 #include "expr.h"
51 #include "except.h"
52 #include "diagnostic-core.h"
53 #include "reload.h"
54 #include "predict.h"
55 #include "tree-pass.h"
56 #include "target.h"
57
58 /* Optimize jump y; x: ... y: jumpif... x?
59 Don't know if it is worth bothering with. */
60 /* Optimize two cases of conditional jump to conditional jump?
61 This can never delete any instruction or make anything dead,
62 or even change what is live at any point.
63 So perhaps let combiner do it. */
64
65 static void init_label_info (rtx);
66 static void mark_all_labels (rtx);
67 static void mark_jump_label_1 (rtx, rtx, bool, bool);
68 static void mark_jump_label_asm (rtx, rtx);
69 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
70 static int invert_exp_1 (rtx, rtx);
71 static int returnjump_p_1 (rtx *, void *);
72 \f
73 /* Worker for rebuild_jump_labels and rebuild_jump_labels_chain. */
74 static void
75 rebuild_jump_labels_1 (rtx f, bool count_forced)
76 {
77 rtx insn;
78
79 timevar_push (TV_REBUILD_JUMP);
80 init_label_info (f);
81 mark_all_labels (f);
82
83 /* Keep track of labels used from static data; we don't track them
84 closely enough to delete them here, so make sure their reference
85 count doesn't drop to zero. */
86
87 if (count_forced)
88 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
89 if (LABEL_P (XEXP (insn, 0)))
90 LABEL_NUSES (XEXP (insn, 0))++;
91 timevar_pop (TV_REBUILD_JUMP);
92 }
93
94 /* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
95 notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
96 instructions and jumping insns that have labels as operands
97 (e.g. cbranchsi4). */
98 void
99 rebuild_jump_labels (rtx f)
100 {
101 rebuild_jump_labels_1 (f, true);
102 }
103
104 /* This function is like rebuild_jump_labels, but doesn't run over
105 forced_labels. It can be used on insn chains that aren't the
106 main function chain. */
107 void
108 rebuild_jump_labels_chain (rtx chain)
109 {
110 rebuild_jump_labels_1 (chain, false);
111 }
112 \f
113 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
114 non-fallthru insn. This is not generally true, as multiple barriers
115 may have crept in, or the BARRIER may be separated from the last
116 real insn by one or more NOTEs.
117
118 This simple pass moves barriers and removes duplicates so that the
119 old code is happy.
120 */
121 static unsigned int
122 cleanup_barriers (void)
123 {
124 rtx insn;
125 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
126 {
127 if (BARRIER_P (insn))
128 {
129 rtx prev = prev_nonnote_insn (insn);
130 if (!prev)
131 continue;
132
133 if (CALL_P (prev))
134 {
135 /* Make sure we do not split a call and its corresponding
136 CALL_ARG_LOCATION note. */
137 rtx next = NEXT_INSN (prev);
138
139 if (NOTE_P (next)
140 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
141 prev = next;
142 }
143
144 if (BARRIER_P (prev))
145 delete_insn (insn);
146 else if (prev != PREV_INSN (insn))
147 reorder_insns_nobb (insn, insn, prev);
148 }
149 }
150 return 0;
151 }
152
153 namespace {
154
155 const pass_data pass_data_cleanup_barriers =
156 {
157 RTL_PASS, /* type */
158 "barriers", /* name */
159 OPTGROUP_NONE, /* optinfo_flags */
160 true, /* has_execute */
161 TV_NONE, /* tv_id */
162 0, /* properties_required */
163 0, /* properties_provided */
164 0, /* properties_destroyed */
165 0, /* todo_flags_start */
166 0, /* todo_flags_finish */
167 };
168
169 class pass_cleanup_barriers : public rtl_opt_pass
170 {
171 public:
172 pass_cleanup_barriers (gcc::context *ctxt)
173 : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
174 {}
175
176 /* opt_pass methods: */
177 virtual unsigned int execute (function *) { return cleanup_barriers (); }
178
179 }; // class pass_cleanup_barriers
180
181 } // anon namespace
182
183 rtl_opt_pass *
184 make_pass_cleanup_barriers (gcc::context *ctxt)
185 {
186 return new pass_cleanup_barriers (ctxt);
187 }
188
189 \f
190 /* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
191 for remaining targets for JUMP_P. Delete any REG_LABEL_OPERAND
192 notes whose labels don't occur in the insn any more. */
193
194 static void
195 init_label_info (rtx f)
196 {
197 rtx insn;
198
199 for (insn = f; insn; insn = NEXT_INSN (insn))
200 {
201 if (LABEL_P (insn))
202 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
203
204 /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
205 sticky and not reset here; that way we won't lose association
206 with a label when e.g. the source for a target register
207 disappears out of reach for targets that may use jump-target
208 registers. Jump transformations are supposed to transform
209 any REG_LABEL_TARGET notes. The target label reference in a
210 branch may disappear from the branch (and from the
211 instruction before it) for other reasons, like register
212 allocation. */
213
214 if (INSN_P (insn))
215 {
216 rtx note, next;
217
218 for (note = REG_NOTES (insn); note; note = next)
219 {
220 next = XEXP (note, 1);
221 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
222 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
223 remove_note (insn, note);
224 }
225 }
226 }
227 }
228
229 /* A subroutine of mark_all_labels. Trivially propagate a simple label
230 load into a jump_insn that uses it. */
231
232 static void
233 maybe_propagate_label_ref (rtx jump_insn, rtx prev_nonjump_insn)
234 {
235 rtx label_note, pc, pc_src;
236
237 pc = pc_set (jump_insn);
238 pc_src = pc != NULL ? SET_SRC (pc) : NULL;
239 label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
240
241 /* If the previous non-jump insn sets something to a label,
242 something that this jump insn uses, make that label the primary
243 target of this insn if we don't yet have any. That previous
244 insn must be a single_set and not refer to more than one label.
245 The jump insn must not refer to other labels as jump targets
246 and must be a plain (set (pc) ...), maybe in a parallel, and
247 may refer to the item being set only directly or as one of the
248 arms in an IF_THEN_ELSE. */
249
250 if (label_note != NULL && pc_src != NULL)
251 {
252 rtx label_set = single_set (prev_nonjump_insn);
253 rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
254
255 if (label_set != NULL
256 /* The source must be the direct LABEL_REF, not a
257 PLUS, UNSPEC, IF_THEN_ELSE etc. */
258 && GET_CODE (SET_SRC (label_set)) == LABEL_REF
259 && (rtx_equal_p (label_dest, pc_src)
260 || (GET_CODE (pc_src) == IF_THEN_ELSE
261 && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
262 || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
263 {
264 /* The CODE_LABEL referred to in the note must be the
265 CODE_LABEL in the LABEL_REF of the "set". We can
266 conveniently use it for the marker function, which
267 requires a LABEL_REF wrapping. */
268 gcc_assert (XEXP (label_note, 0) == XEXP (SET_SRC (label_set), 0));
269
270 mark_jump_label_1 (label_set, jump_insn, false, true);
271
272 gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
273 }
274 }
275 }
276
277 /* Mark the label each jump jumps to.
278 Combine consecutive labels, and count uses of labels. */
279
280 static void
281 mark_all_labels (rtx f)
282 {
283 rtx insn;
284
285 if (current_ir_type () == IR_RTL_CFGLAYOUT)
286 {
287 basic_block bb;
288 FOR_EACH_BB_FN (bb, cfun)
289 {
290 /* In cfglayout mode, we don't bother with trivial next-insn
291 propagation of LABEL_REFs into JUMP_LABEL. This will be
292 handled by other optimizers using better algorithms. */
293 FOR_BB_INSNS (bb, insn)
294 {
295 gcc_assert (! INSN_DELETED_P (insn));
296 if (NONDEBUG_INSN_P (insn))
297 mark_jump_label (PATTERN (insn), insn, 0);
298 }
299
300 /* In cfglayout mode, there may be non-insns between the
301 basic blocks. If those non-insns represent tablejump data,
302 they contain label references that we must record. */
303 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
304 if (JUMP_TABLE_DATA_P (insn))
305 mark_jump_label (PATTERN (insn), insn, 0);
306 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
307 if (JUMP_TABLE_DATA_P (insn))
308 mark_jump_label (PATTERN (insn), insn, 0);
309 }
310 }
311 else
312 {
313 rtx prev_nonjump_insn = NULL;
314 for (insn = f; insn; insn = NEXT_INSN (insn))
315 {
316 if (INSN_DELETED_P (insn))
317 ;
318 else if (LABEL_P (insn))
319 prev_nonjump_insn = NULL;
320 else if (JUMP_TABLE_DATA_P (insn))
321 mark_jump_label (PATTERN (insn), insn, 0);
322 else if (NONDEBUG_INSN_P (insn))
323 {
324 mark_jump_label (PATTERN (insn), insn, 0);
325 if (JUMP_P (insn))
326 {
327 if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
328 maybe_propagate_label_ref (insn, prev_nonjump_insn);
329 }
330 else
331 prev_nonjump_insn = insn;
332 }
333 }
334 }
335 }
336 \f
337 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
338 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
339 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
340 know whether it's source is floating point or integer comparison. Machine
341 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
342 to help this function avoid overhead in these cases. */
343 enum rtx_code
344 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
345 const_rtx arg1, const_rtx insn)
346 {
347 enum machine_mode mode;
348
349 /* If this is not actually a comparison, we can't reverse it. */
350 if (GET_RTX_CLASS (code) != RTX_COMPARE
351 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
352 return UNKNOWN;
353
354 mode = GET_MODE (arg0);
355 if (mode == VOIDmode)
356 mode = GET_MODE (arg1);
357
358 /* First see if machine description supplies us way to reverse the
359 comparison. Give it priority over everything else to allow
360 machine description to do tricks. */
361 if (GET_MODE_CLASS (mode) == MODE_CC
362 && REVERSIBLE_CC_MODE (mode))
363 {
364 #ifdef REVERSE_CONDITION
365 return REVERSE_CONDITION (code, mode);
366 #else
367 return reverse_condition (code);
368 #endif
369 }
370
371 /* Try a few special cases based on the comparison code. */
372 switch (code)
373 {
374 case GEU:
375 case GTU:
376 case LEU:
377 case LTU:
378 case NE:
379 case EQ:
380 /* It is always safe to reverse EQ and NE, even for the floating
381 point. Similarly the unsigned comparisons are never used for
382 floating point so we can reverse them in the default way. */
383 return reverse_condition (code);
384 case ORDERED:
385 case UNORDERED:
386 case LTGT:
387 case UNEQ:
388 /* In case we already see unordered comparison, we can be sure to
389 be dealing with floating point so we don't need any more tests. */
390 return reverse_condition_maybe_unordered (code);
391 case UNLT:
392 case UNLE:
393 case UNGT:
394 case UNGE:
395 /* We don't have safe way to reverse these yet. */
396 return UNKNOWN;
397 default:
398 break;
399 }
400
401 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
402 {
403 const_rtx prev;
404 /* Try to search for the comparison to determine the real mode.
405 This code is expensive, but with sane machine description it
406 will be never used, since REVERSIBLE_CC_MODE will return true
407 in all cases. */
408 if (! insn)
409 return UNKNOWN;
410
411 /* These CONST_CAST's are okay because prev_nonnote_insn just
412 returns its argument and we assign it to a const_rtx
413 variable. */
414 for (prev = prev_nonnote_insn (CONST_CAST_RTX (insn));
415 prev != 0 && !LABEL_P (prev);
416 prev = prev_nonnote_insn (CONST_CAST_RTX (prev)))
417 {
418 const_rtx set = set_of (arg0, prev);
419 if (set && GET_CODE (set) == SET
420 && rtx_equal_p (SET_DEST (set), arg0))
421 {
422 rtx src = SET_SRC (set);
423
424 if (GET_CODE (src) == COMPARE)
425 {
426 rtx comparison = src;
427 arg0 = XEXP (src, 0);
428 mode = GET_MODE (arg0);
429 if (mode == VOIDmode)
430 mode = GET_MODE (XEXP (comparison, 1));
431 break;
432 }
433 /* We can get past reg-reg moves. This may be useful for model
434 of i387 comparisons that first move flag registers around. */
435 if (REG_P (src))
436 {
437 arg0 = src;
438 continue;
439 }
440 }
441 /* If register is clobbered in some ununderstandable way,
442 give up. */
443 if (set)
444 return UNKNOWN;
445 }
446 }
447
448 /* Test for an integer condition, or a floating-point comparison
449 in which NaNs can be ignored. */
450 if (CONST_INT_P (arg0)
451 || (GET_MODE (arg0) != VOIDmode
452 && GET_MODE_CLASS (mode) != MODE_CC
453 && !HONOR_NANS (mode)))
454 return reverse_condition (code);
455
456 return UNKNOWN;
457 }
458
459 /* A wrapper around the previous function to take COMPARISON as rtx
460 expression. This simplifies many callers. */
461 enum rtx_code
462 reversed_comparison_code (const_rtx comparison, const_rtx insn)
463 {
464 if (!COMPARISON_P (comparison))
465 return UNKNOWN;
466 return reversed_comparison_code_parts (GET_CODE (comparison),
467 XEXP (comparison, 0),
468 XEXP (comparison, 1), insn);
469 }
470
471 /* Return comparison with reversed code of EXP.
472 Return NULL_RTX in case we fail to do the reversal. */
473 rtx
474 reversed_comparison (const_rtx exp, enum machine_mode mode)
475 {
476 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
477 if (reversed_code == UNKNOWN)
478 return NULL_RTX;
479 else
480 return simplify_gen_relational (reversed_code, mode, VOIDmode,
481 XEXP (exp, 0), XEXP (exp, 1));
482 }
483
484 \f
485 /* Given an rtx-code for a comparison, return the code for the negated
486 comparison. If no such code exists, return UNKNOWN.
487
488 WATCH OUT! reverse_condition is not safe to use on a jump that might
489 be acting on the results of an IEEE floating point comparison, because
490 of the special treatment of non-signaling nans in comparisons.
491 Use reversed_comparison_code instead. */
492
493 enum rtx_code
494 reverse_condition (enum rtx_code code)
495 {
496 switch (code)
497 {
498 case EQ:
499 return NE;
500 case NE:
501 return EQ;
502 case GT:
503 return LE;
504 case GE:
505 return LT;
506 case LT:
507 return GE;
508 case LE:
509 return GT;
510 case GTU:
511 return LEU;
512 case GEU:
513 return LTU;
514 case LTU:
515 return GEU;
516 case LEU:
517 return GTU;
518 case UNORDERED:
519 return ORDERED;
520 case ORDERED:
521 return UNORDERED;
522
523 case UNLT:
524 case UNLE:
525 case UNGT:
526 case UNGE:
527 case UNEQ:
528 case LTGT:
529 return UNKNOWN;
530
531 default:
532 gcc_unreachable ();
533 }
534 }
535
536 /* Similar, but we're allowed to generate unordered comparisons, which
537 makes it safe for IEEE floating-point. Of course, we have to recognize
538 that the target will support them too... */
539
540 enum rtx_code
541 reverse_condition_maybe_unordered (enum rtx_code code)
542 {
543 switch (code)
544 {
545 case EQ:
546 return NE;
547 case NE:
548 return EQ;
549 case GT:
550 return UNLE;
551 case GE:
552 return UNLT;
553 case LT:
554 return UNGE;
555 case LE:
556 return UNGT;
557 case LTGT:
558 return UNEQ;
559 case UNORDERED:
560 return ORDERED;
561 case ORDERED:
562 return UNORDERED;
563 case UNLT:
564 return GE;
565 case UNLE:
566 return GT;
567 case UNGT:
568 return LE;
569 case UNGE:
570 return LT;
571 case UNEQ:
572 return LTGT;
573
574 default:
575 gcc_unreachable ();
576 }
577 }
578
579 /* Similar, but return the code when two operands of a comparison are swapped.
580 This IS safe for IEEE floating-point. */
581
582 enum rtx_code
583 swap_condition (enum rtx_code code)
584 {
585 switch (code)
586 {
587 case EQ:
588 case NE:
589 case UNORDERED:
590 case ORDERED:
591 case UNEQ:
592 case LTGT:
593 return code;
594
595 case GT:
596 return LT;
597 case GE:
598 return LE;
599 case LT:
600 return GT;
601 case LE:
602 return GE;
603 case GTU:
604 return LTU;
605 case GEU:
606 return LEU;
607 case LTU:
608 return GTU;
609 case LEU:
610 return GEU;
611 case UNLT:
612 return UNGT;
613 case UNLE:
614 return UNGE;
615 case UNGT:
616 return UNLT;
617 case UNGE:
618 return UNLE;
619
620 default:
621 gcc_unreachable ();
622 }
623 }
624
625 /* Given a comparison CODE, return the corresponding unsigned comparison.
626 If CODE is an equality comparison or already an unsigned comparison,
627 CODE is returned. */
628
629 enum rtx_code
630 unsigned_condition (enum rtx_code code)
631 {
632 switch (code)
633 {
634 case EQ:
635 case NE:
636 case GTU:
637 case GEU:
638 case LTU:
639 case LEU:
640 return code;
641
642 case GT:
643 return GTU;
644 case GE:
645 return GEU;
646 case LT:
647 return LTU;
648 case LE:
649 return LEU;
650
651 default:
652 gcc_unreachable ();
653 }
654 }
655
656 /* Similarly, return the signed version of a comparison. */
657
658 enum rtx_code
659 signed_condition (enum rtx_code code)
660 {
661 switch (code)
662 {
663 case EQ:
664 case NE:
665 case GT:
666 case GE:
667 case LT:
668 case LE:
669 return code;
670
671 case GTU:
672 return GT;
673 case GEU:
674 return GE;
675 case LTU:
676 return LT;
677 case LEU:
678 return LE;
679
680 default:
681 gcc_unreachable ();
682 }
683 }
684 \f
685 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
686 truth of CODE1 implies the truth of CODE2. */
687
688 int
689 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
690 {
691 /* UNKNOWN comparison codes can happen as a result of trying to revert
692 comparison codes.
693 They can't match anything, so we have to reject them here. */
694 if (code1 == UNKNOWN || code2 == UNKNOWN)
695 return 0;
696
697 if (code1 == code2)
698 return 1;
699
700 switch (code1)
701 {
702 case UNEQ:
703 if (code2 == UNLE || code2 == UNGE)
704 return 1;
705 break;
706
707 case EQ:
708 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
709 || code2 == ORDERED)
710 return 1;
711 break;
712
713 case UNLT:
714 if (code2 == UNLE || code2 == NE)
715 return 1;
716 break;
717
718 case LT:
719 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
720 return 1;
721 break;
722
723 case UNGT:
724 if (code2 == UNGE || code2 == NE)
725 return 1;
726 break;
727
728 case GT:
729 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
730 return 1;
731 break;
732
733 case GE:
734 case LE:
735 if (code2 == ORDERED)
736 return 1;
737 break;
738
739 case LTGT:
740 if (code2 == NE || code2 == ORDERED)
741 return 1;
742 break;
743
744 case LTU:
745 if (code2 == LEU || code2 == NE)
746 return 1;
747 break;
748
749 case GTU:
750 if (code2 == GEU || code2 == NE)
751 return 1;
752 break;
753
754 case UNORDERED:
755 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
756 || code2 == UNGE || code2 == UNGT)
757 return 1;
758 break;
759
760 default:
761 break;
762 }
763
764 return 0;
765 }
766 \f
767 /* Return 1 if INSN is an unconditional jump and nothing else. */
768
769 int
770 simplejump_p (const_rtx insn)
771 {
772 return (JUMP_P (insn)
773 && GET_CODE (PATTERN (insn)) == SET
774 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
775 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
776 }
777
778 /* Return nonzero if INSN is a (possibly) conditional jump
779 and nothing more.
780
781 Use of this function is deprecated, since we need to support combined
782 branch and compare insns. Use any_condjump_p instead whenever possible. */
783
784 int
785 condjump_p (const_rtx insn)
786 {
787 const_rtx x = PATTERN (insn);
788
789 if (GET_CODE (x) != SET
790 || GET_CODE (SET_DEST (x)) != PC)
791 return 0;
792
793 x = SET_SRC (x);
794 if (GET_CODE (x) == LABEL_REF)
795 return 1;
796 else
797 return (GET_CODE (x) == IF_THEN_ELSE
798 && ((GET_CODE (XEXP (x, 2)) == PC
799 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
800 || ANY_RETURN_P (XEXP (x, 1))))
801 || (GET_CODE (XEXP (x, 1)) == PC
802 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
803 || ANY_RETURN_P (XEXP (x, 2))))));
804 }
805
806 /* Return nonzero if INSN is a (possibly) conditional jump inside a
807 PARALLEL.
808
809 Use this function is deprecated, since we need to support combined
810 branch and compare insns. Use any_condjump_p instead whenever possible. */
811
812 int
813 condjump_in_parallel_p (const_rtx insn)
814 {
815 const_rtx x = PATTERN (insn);
816
817 if (GET_CODE (x) != PARALLEL)
818 return 0;
819 else
820 x = XVECEXP (x, 0, 0);
821
822 if (GET_CODE (x) != SET)
823 return 0;
824 if (GET_CODE (SET_DEST (x)) != PC)
825 return 0;
826 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
827 return 1;
828 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
829 return 0;
830 if (XEXP (SET_SRC (x), 2) == pc_rtx
831 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
832 || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
833 return 1;
834 if (XEXP (SET_SRC (x), 1) == pc_rtx
835 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
836 || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
837 return 1;
838 return 0;
839 }
840
841 /* Return set of PC, otherwise NULL. */
842
843 rtx
844 pc_set (const_rtx insn)
845 {
846 rtx pat;
847 if (!JUMP_P (insn))
848 return NULL_RTX;
849 pat = PATTERN (insn);
850
851 /* The set is allowed to appear either as the insn pattern or
852 the first set in a PARALLEL. */
853 if (GET_CODE (pat) == PARALLEL)
854 pat = XVECEXP (pat, 0, 0);
855 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
856 return pat;
857
858 return NULL_RTX;
859 }
860
861 /* Return true when insn is an unconditional direct jump,
862 possibly bundled inside a PARALLEL. */
863
864 int
865 any_uncondjump_p (const_rtx insn)
866 {
867 const_rtx x = pc_set (insn);
868 if (!x)
869 return 0;
870 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
871 return 0;
872 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
873 return 0;
874 return 1;
875 }
876
877 /* Return true when insn is a conditional jump. This function works for
878 instructions containing PC sets in PARALLELs. The instruction may have
879 various other effects so before removing the jump you must verify
880 onlyjump_p.
881
882 Note that unlike condjump_p it returns false for unconditional jumps. */
883
884 int
885 any_condjump_p (const_rtx insn)
886 {
887 const_rtx x = pc_set (insn);
888 enum rtx_code a, b;
889
890 if (!x)
891 return 0;
892 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
893 return 0;
894
895 a = GET_CODE (XEXP (SET_SRC (x), 1));
896 b = GET_CODE (XEXP (SET_SRC (x), 2));
897
898 return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
899 || (a == PC
900 && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
901 }
902
903 /* Return the label of a conditional jump. */
904
905 rtx
906 condjump_label (const_rtx insn)
907 {
908 rtx x = pc_set (insn);
909
910 if (!x)
911 return NULL_RTX;
912 x = SET_SRC (x);
913 if (GET_CODE (x) == LABEL_REF)
914 return x;
915 if (GET_CODE (x) != IF_THEN_ELSE)
916 return NULL_RTX;
917 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
918 return XEXP (x, 1);
919 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
920 return XEXP (x, 2);
921 return NULL_RTX;
922 }
923
924 /* Return true if INSN is a (possibly conditional) return insn. */
925
926 static int
927 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
928 {
929 rtx x = *loc;
930
931 if (x == NULL)
932 return false;
933
934 switch (GET_CODE (x))
935 {
936 case RETURN:
937 case SIMPLE_RETURN:
938 case EH_RETURN:
939 return true;
940
941 case SET:
942 return SET_IS_RETURN_P (x);
943
944 default:
945 return false;
946 }
947 }
948
949 /* Return TRUE if INSN is a return jump. */
950
951 int
952 returnjump_p (rtx insn)
953 {
954 if (!JUMP_P (insn))
955 return 0;
956 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
957 }
958
959 /* Return true if INSN is a (possibly conditional) return insn. */
960
961 static int
962 eh_returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
963 {
964 return *loc && GET_CODE (*loc) == EH_RETURN;
965 }
966
967 int
968 eh_returnjump_p (rtx insn)
969 {
970 if (!JUMP_P (insn))
971 return 0;
972 return for_each_rtx (&PATTERN (insn), eh_returnjump_p_1, NULL);
973 }
974
975 /* Return true if INSN is a jump that only transfers control and
976 nothing more. */
977
978 int
979 onlyjump_p (const_rtx insn)
980 {
981 rtx set;
982
983 if (!JUMP_P (insn))
984 return 0;
985
986 set = single_set (insn);
987 if (set == NULL)
988 return 0;
989 if (GET_CODE (SET_DEST (set)) != PC)
990 return 0;
991 if (side_effects_p (SET_SRC (set)))
992 return 0;
993
994 return 1;
995 }
996
997 /* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
998 NULL or a return. */
999 bool
1000 jump_to_label_p (rtx insn)
1001 {
1002 return (JUMP_P (insn)
1003 && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
1004 }
1005
1006 #ifdef HAVE_cc0
1007
1008 /* Return nonzero if X is an RTX that only sets the condition codes
1009 and has no side effects. */
1010
1011 int
1012 only_sets_cc0_p (const_rtx x)
1013 {
1014 if (! x)
1015 return 0;
1016
1017 if (INSN_P (x))
1018 x = PATTERN (x);
1019
1020 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1021 }
1022
1023 /* Return 1 if X is an RTX that does nothing but set the condition codes
1024 and CLOBBER or USE registers.
1025 Return -1 if X does explicitly set the condition codes,
1026 but also does other things. */
1027
1028 int
1029 sets_cc0_p (const_rtx x)
1030 {
1031 if (! x)
1032 return 0;
1033
1034 if (INSN_P (x))
1035 x = PATTERN (x);
1036
1037 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1038 return 1;
1039 if (GET_CODE (x) == PARALLEL)
1040 {
1041 int i;
1042 int sets_cc0 = 0;
1043 int other_things = 0;
1044 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1045 {
1046 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1047 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1048 sets_cc0 = 1;
1049 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1050 other_things = 1;
1051 }
1052 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1053 }
1054 return 0;
1055 }
1056 #endif
1057 \f
1058 /* Find all CODE_LABELs referred to in X, and increment their use
1059 counts. If INSN is a JUMP_INSN and there is at least one
1060 CODE_LABEL referenced in INSN as a jump target, then store the last
1061 one in JUMP_LABEL (INSN). For a tablejump, this must be the label
1062 for the ADDR_VEC. Store any other jump targets as REG_LABEL_TARGET
1063 notes. If INSN is an INSN or a CALL_INSN or non-target operands of
1064 a JUMP_INSN, and there is at least one CODE_LABEL referenced in
1065 INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1066 For returnjumps, the JUMP_LABEL will also be set as appropriate.
1067
1068 Note that two labels separated by a loop-beginning note
1069 must be kept distinct if we have not yet done loop-optimization,
1070 because the gap between them is where loop-optimize
1071 will want to move invariant code to. CROSS_JUMP tells us
1072 that loop-optimization is done with. */
1073
1074 void
1075 mark_jump_label (rtx x, rtx insn, int in_mem)
1076 {
1077 rtx asmop = extract_asm_operands (x);
1078 if (asmop)
1079 mark_jump_label_asm (asmop, insn);
1080 else
1081 mark_jump_label_1 (x, insn, in_mem != 0,
1082 (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1083 }
1084
1085 /* Worker function for mark_jump_label. IN_MEM is TRUE when X occurs
1086 within a (MEM ...). IS_TARGET is TRUE when X is to be treated as a
1087 jump-target; when the JUMP_LABEL field of INSN should be set or a
1088 REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1089 note. */
1090
1091 static void
1092 mark_jump_label_1 (rtx x, rtx insn, bool in_mem, bool is_target)
1093 {
1094 RTX_CODE code = GET_CODE (x);
1095 int i;
1096 const char *fmt;
1097
1098 switch (code)
1099 {
1100 case PC:
1101 case CC0:
1102 case REG:
1103 case CLOBBER:
1104 case CALL:
1105 return;
1106
1107 case RETURN:
1108 case SIMPLE_RETURN:
1109 if (is_target)
1110 {
1111 gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
1112 JUMP_LABEL (insn) = x;
1113 }
1114 return;
1115
1116 case MEM:
1117 in_mem = true;
1118 break;
1119
1120 case SEQUENCE:
1121 for (i = 0; i < XVECLEN (x, 0); i++)
1122 mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
1123 XVECEXP (x, 0, i), 0);
1124 return;
1125
1126 case SYMBOL_REF:
1127 if (!in_mem)
1128 return;
1129
1130 /* If this is a constant-pool reference, see if it is a label. */
1131 if (CONSTANT_POOL_ADDRESS_P (x))
1132 mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1133 break;
1134
1135 /* Handle operands in the condition of an if-then-else as for a
1136 non-jump insn. */
1137 case IF_THEN_ELSE:
1138 if (!is_target)
1139 break;
1140 mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1141 mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1142 mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1143 return;
1144
1145 case LABEL_REF:
1146 {
1147 rtx label = XEXP (x, 0);
1148
1149 /* Ignore remaining references to unreachable labels that
1150 have been deleted. */
1151 if (NOTE_P (label)
1152 && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1153 break;
1154
1155 gcc_assert (LABEL_P (label));
1156
1157 /* Ignore references to labels of containing functions. */
1158 if (LABEL_REF_NONLOCAL_P (x))
1159 break;
1160
1161 XEXP (x, 0) = label;
1162 if (! insn || ! INSN_DELETED_P (insn))
1163 ++LABEL_NUSES (label);
1164
1165 if (insn)
1166 {
1167 if (is_target
1168 /* Do not change a previous setting of JUMP_LABEL. If the
1169 JUMP_LABEL slot is occupied by a different label,
1170 create a note for this label. */
1171 && (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1172 JUMP_LABEL (insn) = label;
1173 else
1174 {
1175 enum reg_note kind
1176 = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1177
1178 /* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1179 for LABEL unless there already is one. All uses of
1180 a label, except for the primary target of a jump,
1181 must have such a note. */
1182 if (! find_reg_note (insn, kind, label))
1183 add_reg_note (insn, kind, label);
1184 }
1185 }
1186 return;
1187 }
1188
1189 /* Do walk the labels in a vector, but not the first operand of an
1190 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1191 case ADDR_VEC:
1192 case ADDR_DIFF_VEC:
1193 if (! INSN_DELETED_P (insn))
1194 {
1195 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1196
1197 for (i = 0; i < XVECLEN (x, eltnum); i++)
1198 mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL_RTX, in_mem,
1199 is_target);
1200 }
1201 return;
1202
1203 default:
1204 break;
1205 }
1206
1207 fmt = GET_RTX_FORMAT (code);
1208
1209 /* The primary target of a tablejump is the label of the ADDR_VEC,
1210 which is canonically mentioned *last* in the insn. To get it
1211 marked as JUMP_LABEL, we iterate over items in reverse order. */
1212 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1213 {
1214 if (fmt[i] == 'e')
1215 mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1216 else if (fmt[i] == 'E')
1217 {
1218 int j;
1219
1220 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1221 mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1222 is_target);
1223 }
1224 }
1225 }
1226
1227 /* Worker function for mark_jump_label. Handle asm insns specially.
1228 In particular, output operands need not be considered so we can
1229 avoid re-scanning the replicated asm_operand. Also, the asm_labels
1230 need to be considered targets. */
1231
1232 static void
1233 mark_jump_label_asm (rtx asmop, rtx insn)
1234 {
1235 int i;
1236
1237 for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1238 mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1239
1240 for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1241 mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1242 }
1243 \f
1244 /* Delete insn INSN from the chain of insns and update label ref counts
1245 and delete insns now unreachable.
1246
1247 Returns the first insn after INSN that was not deleted.
1248
1249 Usage of this instruction is deprecated. Use delete_insn instead and
1250 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1251
1252 rtx
1253 delete_related_insns (rtx insn)
1254 {
1255 int was_code_label = (LABEL_P (insn));
1256 rtx note;
1257 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1258
1259 while (next && INSN_DELETED_P (next))
1260 next = NEXT_INSN (next);
1261
1262 /* This insn is already deleted => return first following nondeleted. */
1263 if (INSN_DELETED_P (insn))
1264 return next;
1265
1266 delete_insn (insn);
1267
1268 /* If instruction is followed by a barrier,
1269 delete the barrier too. */
1270
1271 if (next != 0 && BARRIER_P (next))
1272 delete_insn (next);
1273
1274 /* If this is a call, then we have to remove the var tracking note
1275 for the call arguments. */
1276
1277 if (CALL_P (insn)
1278 || (NONJUMP_INSN_P (insn)
1279 && GET_CODE (PATTERN (insn)) == SEQUENCE
1280 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
1281 {
1282 rtx p;
1283
1284 for (p = next && INSN_DELETED_P (next) ? NEXT_INSN (next) : next;
1285 p && NOTE_P (p);
1286 p = NEXT_INSN (p))
1287 if (NOTE_KIND (p) == NOTE_INSN_CALL_ARG_LOCATION)
1288 {
1289 remove_insn (p);
1290 break;
1291 }
1292 }
1293
1294 /* If deleting a jump, decrement the count of the label,
1295 and delete the label if it is now unused. */
1296
1297 if (jump_to_label_p (insn))
1298 {
1299 rtx lab = JUMP_LABEL (insn), lab_next;
1300
1301 if (LABEL_NUSES (lab) == 0)
1302 /* This can delete NEXT or PREV,
1303 either directly if NEXT is JUMP_LABEL (INSN),
1304 or indirectly through more levels of jumps. */
1305 delete_related_insns (lab);
1306 else if (tablejump_p (insn, NULL, &lab_next))
1307 {
1308 /* If we're deleting the tablejump, delete the dispatch table.
1309 We may not be able to kill the label immediately preceding
1310 just yet, as it might be referenced in code leading up to
1311 the tablejump. */
1312 delete_related_insns (lab_next);
1313 }
1314 }
1315
1316 /* Likewise if we're deleting a dispatch table. */
1317
1318 if (JUMP_TABLE_DATA_P (insn))
1319 {
1320 rtx pat = PATTERN (insn);
1321 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1322 int len = XVECLEN (pat, diff_vec_p);
1323
1324 for (i = 0; i < len; i++)
1325 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1326 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1327 while (next && INSN_DELETED_P (next))
1328 next = NEXT_INSN (next);
1329 return next;
1330 }
1331
1332 /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1333 REG_LABEL_OPERAND or REG_LABEL_TARGET note. */
1334 if (INSN_P (insn))
1335 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1336 if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1337 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1338 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1339 && LABEL_P (XEXP (note, 0)))
1340 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1341 delete_related_insns (XEXP (note, 0));
1342
1343 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1344 prev = PREV_INSN (prev);
1345
1346 /* If INSN was a label and a dispatch table follows it,
1347 delete the dispatch table. The tablejump must have gone already.
1348 It isn't useful to fall through into a table. */
1349
1350 if (was_code_label
1351 && NEXT_INSN (insn) != 0
1352 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1353 next = delete_related_insns (NEXT_INSN (insn));
1354
1355 /* If INSN was a label, delete insns following it if now unreachable. */
1356
1357 if (was_code_label && prev && BARRIER_P (prev))
1358 {
1359 enum rtx_code code;
1360 while (next)
1361 {
1362 code = GET_CODE (next);
1363 if (code == NOTE)
1364 next = NEXT_INSN (next);
1365 /* Keep going past other deleted labels to delete what follows. */
1366 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1367 next = NEXT_INSN (next);
1368 /* Keep the (use (insn))s created by dbr_schedule, which needs
1369 them in order to track liveness relative to a previous
1370 barrier. */
1371 else if (INSN_P (next)
1372 && GET_CODE (PATTERN (next)) == USE
1373 && INSN_P (XEXP (PATTERN (next), 0)))
1374 next = NEXT_INSN (next);
1375 else if (code == BARRIER || INSN_P (next))
1376 /* Note: if this deletes a jump, it can cause more
1377 deletion of unreachable code, after a different label.
1378 As long as the value from this recursive call is correct,
1379 this invocation functions correctly. */
1380 next = delete_related_insns (next);
1381 else
1382 break;
1383 }
1384 }
1385
1386 /* I feel a little doubtful about this loop,
1387 but I see no clean and sure alternative way
1388 to find the first insn after INSN that is not now deleted.
1389 I hope this works. */
1390 while (next && INSN_DELETED_P (next))
1391 next = NEXT_INSN (next);
1392 return next;
1393 }
1394 \f
1395 /* Delete a range of insns from FROM to TO, inclusive.
1396 This is for the sake of peephole optimization, so assume
1397 that whatever these insns do will still be done by a new
1398 peephole insn that will replace them. */
1399
1400 void
1401 delete_for_peephole (rtx from, rtx to)
1402 {
1403 rtx insn = from;
1404
1405 while (1)
1406 {
1407 rtx next = NEXT_INSN (insn);
1408 rtx prev = PREV_INSN (insn);
1409
1410 if (!NOTE_P (insn))
1411 {
1412 INSN_DELETED_P (insn) = 1;
1413
1414 /* Patch this insn out of the chain. */
1415 /* We don't do this all at once, because we
1416 must preserve all NOTEs. */
1417 if (prev)
1418 NEXT_INSN (prev) = next;
1419
1420 if (next)
1421 PREV_INSN (next) = prev;
1422 }
1423
1424 if (insn == to)
1425 break;
1426 insn = next;
1427 }
1428
1429 /* Note that if TO is an unconditional jump
1430 we *do not* delete the BARRIER that follows,
1431 since the peephole that replaces this sequence
1432 is also an unconditional jump in that case. */
1433 }
1434 \f
1435 /* A helper function for redirect_exp_1; examines its input X and returns
1436 either a LABEL_REF around a label, or a RETURN if X was NULL. */
1437 static rtx
1438 redirect_target (rtx x)
1439 {
1440 if (x == NULL_RTX)
1441 return ret_rtx;
1442 if (!ANY_RETURN_P (x))
1443 return gen_rtx_LABEL_REF (Pmode, x);
1444 return x;
1445 }
1446
1447 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1448 NLABEL as a return. Accrue modifications into the change group. */
1449
1450 static void
1451 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1452 {
1453 rtx x = *loc;
1454 RTX_CODE code = GET_CODE (x);
1455 int i;
1456 const char *fmt;
1457
1458 if ((code == LABEL_REF && XEXP (x, 0) == olabel)
1459 || x == olabel)
1460 {
1461 x = redirect_target (nlabel);
1462 if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
1463 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1464 validate_change (insn, loc, x, 1);
1465 return;
1466 }
1467
1468 if (code == SET && SET_DEST (x) == pc_rtx
1469 && ANY_RETURN_P (nlabel)
1470 && GET_CODE (SET_SRC (x)) == LABEL_REF
1471 && XEXP (SET_SRC (x), 0) == olabel)
1472 {
1473 validate_change (insn, loc, nlabel, 1);
1474 return;
1475 }
1476
1477 if (code == IF_THEN_ELSE)
1478 {
1479 /* Skip the condition of an IF_THEN_ELSE. We only want to
1480 change jump destinations, not eventual label comparisons. */
1481 redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1482 redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1483 return;
1484 }
1485
1486 fmt = GET_RTX_FORMAT (code);
1487 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1488 {
1489 if (fmt[i] == 'e')
1490 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1491 else if (fmt[i] == 'E')
1492 {
1493 int j;
1494 for (j = 0; j < XVECLEN (x, i); j++)
1495 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1496 }
1497 }
1498 }
1499
1500 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1501 the modifications into the change group. Return false if we did
1502 not see how to do that. */
1503
1504 int
1505 redirect_jump_1 (rtx jump, rtx nlabel)
1506 {
1507 int ochanges = num_validated_changes ();
1508 rtx *loc, asmop;
1509
1510 gcc_assert (nlabel != NULL_RTX);
1511 asmop = extract_asm_operands (PATTERN (jump));
1512 if (asmop)
1513 {
1514 if (nlabel == NULL)
1515 return 0;
1516 gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1517 loc = &ASM_OPERANDS_LABEL (asmop, 0);
1518 }
1519 else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1520 loc = &XVECEXP (PATTERN (jump), 0, 0);
1521 else
1522 loc = &PATTERN (jump);
1523
1524 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1525 return num_validated_changes () > ochanges;
1526 }
1527
1528 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1529 jump target label is unused as a result, it and the code following
1530 it may be deleted.
1531
1532 Normally, NLABEL will be a label, but it may also be a RETURN rtx;
1533 in that case we are to turn the jump into a (possibly conditional)
1534 return insn.
1535
1536 The return value will be 1 if the change was made, 0 if it wasn't
1537 (this can only occur when trying to produce return insns). */
1538
1539 int
1540 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1541 {
1542 rtx olabel = JUMP_LABEL (jump);
1543
1544 if (!nlabel)
1545 {
1546 /* If there is no label, we are asked to redirect to the EXIT block.
1547 When before the epilogue is emitted, return/simple_return cannot be
1548 created so we return 0 immediately. After the epilogue is emitted,
1549 we always expect a label, either a non-null label, or a
1550 return/simple_return RTX. */
1551
1552 if (!epilogue_completed)
1553 return 0;
1554 gcc_unreachable ();
1555 }
1556
1557 if (nlabel == olabel)
1558 return 1;
1559
1560 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1561 return 0;
1562
1563 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1564 return 1;
1565 }
1566
1567 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1568 NLABEL in JUMP.
1569 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1570 count has dropped to zero. */
1571 void
1572 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1573 int invert)
1574 {
1575 rtx note;
1576
1577 gcc_assert (JUMP_LABEL (jump) == olabel);
1578
1579 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1580 moving FUNCTION_END note. Just sanity check that no user still worry
1581 about this. */
1582 gcc_assert (delete_unused >= 0);
1583 JUMP_LABEL (jump) = nlabel;
1584 if (!ANY_RETURN_P (nlabel))
1585 ++LABEL_NUSES (nlabel);
1586
1587 /* Update labels in any REG_EQUAL note. */
1588 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1589 {
1590 if (ANY_RETURN_P (nlabel)
1591 || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1592 remove_note (jump, note);
1593 else
1594 {
1595 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1596 confirm_change_group ();
1597 }
1598 }
1599
1600 /* Handle the case where we had a conditional crossing jump to a return
1601 label and are now changing it into a direct conditional return.
1602 The jump is no longer crossing in that case. */
1603 if (ANY_RETURN_P (nlabel))
1604 CROSSING_JUMP_P (jump) = 0;
1605
1606 if (!ANY_RETURN_P (olabel)
1607 && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1608 /* Undefined labels will remain outside the insn stream. */
1609 && INSN_UID (olabel))
1610 delete_related_insns (olabel);
1611 if (invert)
1612 invert_br_probabilities (jump);
1613 }
1614
1615 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1616 modifications into the change group. Return nonzero for success. */
1617 static int
1618 invert_exp_1 (rtx x, rtx insn)
1619 {
1620 RTX_CODE code = GET_CODE (x);
1621
1622 if (code == IF_THEN_ELSE)
1623 {
1624 rtx comp = XEXP (x, 0);
1625 rtx tem;
1626 enum rtx_code reversed_code;
1627
1628 /* We can do this in two ways: The preferable way, which can only
1629 be done if this is not an integer comparison, is to reverse
1630 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1631 of the IF_THEN_ELSE. If we can't do either, fail. */
1632
1633 reversed_code = reversed_comparison_code (comp, insn);
1634
1635 if (reversed_code != UNKNOWN)
1636 {
1637 validate_change (insn, &XEXP (x, 0),
1638 gen_rtx_fmt_ee (reversed_code,
1639 GET_MODE (comp), XEXP (comp, 0),
1640 XEXP (comp, 1)),
1641 1);
1642 return 1;
1643 }
1644
1645 tem = XEXP (x, 1);
1646 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1647 validate_change (insn, &XEXP (x, 2), tem, 1);
1648 return 1;
1649 }
1650 else
1651 return 0;
1652 }
1653
1654 /* Invert the condition of the jump JUMP, and make it jump to label
1655 NLABEL instead of where it jumps now. Accrue changes into the
1656 change group. Return false if we didn't see how to perform the
1657 inversion and redirection. */
1658
1659 int
1660 invert_jump_1 (rtx jump, rtx nlabel)
1661 {
1662 rtx x = pc_set (jump);
1663 int ochanges;
1664 int ok;
1665
1666 ochanges = num_validated_changes ();
1667 if (x == NULL)
1668 return 0;
1669 ok = invert_exp_1 (SET_SRC (x), jump);
1670 gcc_assert (ok);
1671
1672 if (num_validated_changes () == ochanges)
1673 return 0;
1674
1675 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1676 in Pmode, so checking this is not merely an optimization. */
1677 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1678 }
1679
1680 /* Invert the condition of the jump JUMP, and make it jump to label
1681 NLABEL instead of where it jumps now. Return true if successful. */
1682
1683 int
1684 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1685 {
1686 rtx olabel = JUMP_LABEL (jump);
1687
1688 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1689 {
1690 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1691 return 1;
1692 }
1693 cancel_changes (0);
1694 return 0;
1695 }
1696
1697 \f
1698 /* Like rtx_equal_p except that it considers two REGs as equal
1699 if they renumber to the same value and considers two commutative
1700 operations to be the same if the order of the operands has been
1701 reversed. */
1702
1703 int
1704 rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1705 {
1706 int i;
1707 const enum rtx_code code = GET_CODE (x);
1708 const char *fmt;
1709
1710 if (x == y)
1711 return 1;
1712
1713 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1714 && (REG_P (y) || (GET_CODE (y) == SUBREG
1715 && REG_P (SUBREG_REG (y)))))
1716 {
1717 int reg_x = -1, reg_y = -1;
1718 int byte_x = 0, byte_y = 0;
1719 struct subreg_info info;
1720
1721 if (GET_MODE (x) != GET_MODE (y))
1722 return 0;
1723
1724 /* If we haven't done any renumbering, don't
1725 make any assumptions. */
1726 if (reg_renumber == 0)
1727 return rtx_equal_p (x, y);
1728
1729 if (code == SUBREG)
1730 {
1731 reg_x = REGNO (SUBREG_REG (x));
1732 byte_x = SUBREG_BYTE (x);
1733
1734 if (reg_renumber[reg_x] >= 0)
1735 {
1736 subreg_get_info (reg_renumber[reg_x],
1737 GET_MODE (SUBREG_REG (x)), byte_x,
1738 GET_MODE (x), &info);
1739 if (!info.representable_p)
1740 return 0;
1741 reg_x = info.offset;
1742 byte_x = 0;
1743 }
1744 }
1745 else
1746 {
1747 reg_x = REGNO (x);
1748 if (reg_renumber[reg_x] >= 0)
1749 reg_x = reg_renumber[reg_x];
1750 }
1751
1752 if (GET_CODE (y) == SUBREG)
1753 {
1754 reg_y = REGNO (SUBREG_REG (y));
1755 byte_y = SUBREG_BYTE (y);
1756
1757 if (reg_renumber[reg_y] >= 0)
1758 {
1759 subreg_get_info (reg_renumber[reg_y],
1760 GET_MODE (SUBREG_REG (y)), byte_y,
1761 GET_MODE (y), &info);
1762 if (!info.representable_p)
1763 return 0;
1764 reg_y = info.offset;
1765 byte_y = 0;
1766 }
1767 }
1768 else
1769 {
1770 reg_y = REGNO (y);
1771 if (reg_renumber[reg_y] >= 0)
1772 reg_y = reg_renumber[reg_y];
1773 }
1774
1775 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1776 }
1777
1778 /* Now we have disposed of all the cases
1779 in which different rtx codes can match. */
1780 if (code != GET_CODE (y))
1781 return 0;
1782
1783 switch (code)
1784 {
1785 case PC:
1786 case CC0:
1787 case ADDR_VEC:
1788 case ADDR_DIFF_VEC:
1789 CASE_CONST_UNIQUE:
1790 return 0;
1791
1792 case LABEL_REF:
1793 /* We can't assume nonlocal labels have their following insns yet. */
1794 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1795 return XEXP (x, 0) == XEXP (y, 0);
1796
1797 /* Two label-refs are equivalent if they point at labels
1798 in the same position in the instruction stream. */
1799 return (next_real_insn (XEXP (x, 0))
1800 == next_real_insn (XEXP (y, 0)));
1801
1802 case SYMBOL_REF:
1803 return XSTR (x, 0) == XSTR (y, 0);
1804
1805 case CODE_LABEL:
1806 /* If we didn't match EQ equality above, they aren't the same. */
1807 return 0;
1808
1809 default:
1810 break;
1811 }
1812
1813 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1814
1815 if (GET_MODE (x) != GET_MODE (y))
1816 return 0;
1817
1818 /* MEMs referring to different address space are not equivalent. */
1819 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1820 return 0;
1821
1822 /* For commutative operations, the RTX match if the operand match in any
1823 order. Also handle the simple binary and unary cases without a loop. */
1824 if (targetm.commutative_p (x, UNKNOWN))
1825 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1826 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1827 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1828 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1829 else if (NON_COMMUTATIVE_P (x))
1830 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1831 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1832 else if (UNARY_P (x))
1833 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1834
1835 /* Compare the elements. If any pair of corresponding elements
1836 fail to match, return 0 for the whole things. */
1837
1838 fmt = GET_RTX_FORMAT (code);
1839 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1840 {
1841 int j;
1842 switch (fmt[i])
1843 {
1844 case 'w':
1845 if (XWINT (x, i) != XWINT (y, i))
1846 return 0;
1847 break;
1848
1849 case 'i':
1850 if (XINT (x, i) != XINT (y, i))
1851 {
1852 if (((code == ASM_OPERANDS && i == 6)
1853 || (code == ASM_INPUT && i == 1)))
1854 break;
1855 return 0;
1856 }
1857 break;
1858
1859 case 't':
1860 if (XTREE (x, i) != XTREE (y, i))
1861 return 0;
1862 break;
1863
1864 case 's':
1865 if (strcmp (XSTR (x, i), XSTR (y, i)))
1866 return 0;
1867 break;
1868
1869 case 'e':
1870 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1871 return 0;
1872 break;
1873
1874 case 'u':
1875 if (XEXP (x, i) != XEXP (y, i))
1876 return 0;
1877 /* Fall through. */
1878 case '0':
1879 break;
1880
1881 case 'E':
1882 if (XVECLEN (x, i) != XVECLEN (y, i))
1883 return 0;
1884 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1885 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1886 return 0;
1887 break;
1888
1889 default:
1890 gcc_unreachable ();
1891 }
1892 }
1893 return 1;
1894 }
1895 \f
1896 /* If X is a hard register or equivalent to one or a subregister of one,
1897 return the hard register number. If X is a pseudo register that was not
1898 assigned a hard register, return the pseudo register number. Otherwise,
1899 return -1. Any rtx is valid for X. */
1900
1901 int
1902 true_regnum (const_rtx x)
1903 {
1904 if (REG_P (x))
1905 {
1906 if (REGNO (x) >= FIRST_PSEUDO_REGISTER
1907 && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
1908 return reg_renumber[REGNO (x)];
1909 return REGNO (x);
1910 }
1911 if (GET_CODE (x) == SUBREG)
1912 {
1913 int base = true_regnum (SUBREG_REG (x));
1914 if (base >= 0
1915 && base < FIRST_PSEUDO_REGISTER)
1916 {
1917 struct subreg_info info;
1918
1919 subreg_get_info (lra_in_progress
1920 ? (unsigned) base : REGNO (SUBREG_REG (x)),
1921 GET_MODE (SUBREG_REG (x)),
1922 SUBREG_BYTE (x), GET_MODE (x), &info);
1923
1924 if (info.representable_p)
1925 return base + info.offset;
1926 }
1927 }
1928 return -1;
1929 }
1930
1931 /* Return regno of the register REG and handle subregs too. */
1932 unsigned int
1933 reg_or_subregno (const_rtx reg)
1934 {
1935 if (GET_CODE (reg) == SUBREG)
1936 reg = SUBREG_REG (reg);
1937 gcc_assert (REG_P (reg));
1938 return REGNO (reg);
1939 }