jump.c (mark_jump_label): Treat SEQUENCE specially.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24 of the compiler. Now it contains basically a set of utility functions to
25 operate with jumps.
26
27 Each CODE_LABEL has a count of the times it is used
28 stored in the LABEL_NUSES internal field, and each JUMP_INSN
29 has one label that it refers to stored in the
30 JUMP_LABEL internal field. With this we can detect labels that
31 become unused because of the deletion of all the jumps that
32 formerly used them. The JUMP_LABEL info is sometimes looked
33 at by later passes.
34
35 The subroutines redirect_jump and invert_jump are used
36 from other passes as well. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "rtl.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "hard-reg-set.h"
46 #include "regs.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
49 #include "recog.h"
50 #include "function.h"
51 #include "expr.h"
52 #include "real.h"
53 #include "except.h"
54 #include "diagnostic.h"
55 #include "toplev.h"
56 #include "reload.h"
57 #include "predict.h"
58 #include "timevar.h"
59 #include "tree-pass.h"
60 #include "target.h"
61
62 /* Optimize jump y; x: ... y: jumpif... x?
63 Don't know if it is worth bothering with. */
64 /* Optimize two cases of conditional jump to conditional jump?
65 This can never delete any instruction or make anything dead,
66 or even change what is live at any point.
67 So perhaps let combiner do it. */
68
69 static void init_label_info (rtx);
70 static void mark_all_labels (rtx);
71 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
72 static int invert_exp_1 (rtx, rtx);
73 static int returnjump_p_1 (rtx *, void *);
74 \f
75 /* Alternate entry into the jump optimizer. This entry point only rebuilds
76 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
77 instructions. */
78 void
79 rebuild_jump_labels (rtx f)
80 {
81 rtx insn;
82
83 timevar_push (TV_REBUILD_JUMP);
84 init_label_info (f);
85 mark_all_labels (f);
86
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
90
91 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
92 if (LABEL_P (XEXP (insn, 0)))
93 LABEL_NUSES (XEXP (insn, 0))++;
94 timevar_pop (TV_REBUILD_JUMP);
95 }
96 \f
97 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
98 non-fallthru insn. This is not generally true, as multiple barriers
99 may have crept in, or the BARRIER may be separated from the last
100 real insn by one or more NOTEs.
101
102 This simple pass moves barriers and removes duplicates so that the
103 old code is happy.
104 */
105 unsigned int
106 cleanup_barriers (void)
107 {
108 rtx insn, next, prev;
109 for (insn = get_insns (); insn; insn = next)
110 {
111 next = NEXT_INSN (insn);
112 if (BARRIER_P (insn))
113 {
114 prev = prev_nonnote_insn (insn);
115 if (BARRIER_P (prev))
116 delete_insn (insn);
117 else if (prev != PREV_INSN (insn))
118 reorder_insns (insn, insn, prev);
119 }
120 }
121 return 0;
122 }
123
124 struct tree_opt_pass pass_cleanup_barriers =
125 {
126 "barriers", /* name */
127 NULL, /* gate */
128 cleanup_barriers, /* execute */
129 NULL, /* sub */
130 NULL, /* next */
131 0, /* static_pass_number */
132 0, /* tv_id */
133 0, /* properties_required */
134 0, /* properties_provided */
135 0, /* properties_destroyed */
136 0, /* todo_flags_start */
137 TODO_dump_func, /* todo_flags_finish */
138 0 /* letter */
139 };
140
141 \f
142 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
143 notes whose labels don't occur in the insn any more. Returns the
144 largest INSN_UID found. */
145 static void
146 init_label_info (rtx f)
147 {
148 rtx insn;
149
150 for (insn = f; insn; insn = NEXT_INSN (insn))
151 if (LABEL_P (insn))
152 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
153 else if (JUMP_P (insn))
154 JUMP_LABEL (insn) = 0;
155 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
156 {
157 rtx note, next;
158
159 for (note = REG_NOTES (insn); note; note = next)
160 {
161 next = XEXP (note, 1);
162 if (REG_NOTE_KIND (note) == REG_LABEL
163 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
164 remove_note (insn, note);
165 }
166 }
167 }
168
169 /* Mark the label each jump jumps to.
170 Combine consecutive labels, and count uses of labels. */
171
172 static void
173 mark_all_labels (rtx f)
174 {
175 rtx insn;
176
177 for (insn = f; insn; insn = NEXT_INSN (insn))
178 if (INSN_P (insn))
179 {
180 mark_jump_label (PATTERN (insn), insn, 0);
181 if (! INSN_DELETED_P (insn) && JUMP_P (insn))
182 {
183 /* When we know the LABEL_REF contained in a REG used in
184 an indirect jump, we'll have a REG_LABEL note so that
185 flow can tell where it's going. */
186 if (JUMP_LABEL (insn) == 0)
187 {
188 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
189 if (label_note)
190 {
191 /* But a LABEL_REF around the REG_LABEL note, so
192 that we can canonicalize it. */
193 rtx label_ref = gen_rtx_LABEL_REF (Pmode,
194 XEXP (label_note, 0));
195
196 mark_jump_label (label_ref, insn, 0);
197 XEXP (label_note, 0) = XEXP (label_ref, 0);
198 JUMP_LABEL (insn) = XEXP (label_note, 0);
199 }
200 }
201 }
202 }
203
204 /* If we are in cfglayout mode, there may be non-insns between the
205 basic blocks. If those non-insns represent tablejump data, they
206 contain label references that we must record. */
207 if (current_ir_type () == IR_RTL_CFGLAYOUT)
208 {
209 basic_block bb;
210 rtx insn;
211 FOR_EACH_BB (bb)
212 {
213 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
214 if (INSN_P (insn))
215 {
216 gcc_assert (JUMP_TABLE_DATA_P (insn));
217 mark_jump_label (PATTERN (insn), insn, 0);
218 }
219
220 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
221 if (INSN_P (insn))
222 {
223 gcc_assert (JUMP_TABLE_DATA_P (insn));
224 mark_jump_label (PATTERN (insn), insn, 0);
225 }
226 }
227 }
228 }
229 \f
230 /* Move all block-beg, block-end and loop-beg notes between START and END out
231 before START. START and END may be such notes. Returns the values of the
232 new starting and ending insns, which may be different if the original ones
233 were such notes. Return true if there were only such notes and no real
234 instructions. */
235
236 bool
237 squeeze_notes (rtx* startp, rtx* endp)
238 {
239 rtx start = *startp;
240 rtx end = *endp;
241
242 rtx insn;
243 rtx next;
244 rtx last = NULL;
245 rtx past_end = NEXT_INSN (end);
246
247 for (insn = start; insn != past_end; insn = next)
248 {
249 next = NEXT_INSN (insn);
250 if (NOTE_P (insn)
251 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
252 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
253 {
254 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
255 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
256 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
257
258 if (insn == start)
259 start = next;
260 else
261 {
262 rtx prev = PREV_INSN (insn);
263 PREV_INSN (insn) = PREV_INSN (start);
264 NEXT_INSN (insn) = start;
265 NEXT_INSN (PREV_INSN (insn)) = insn;
266 PREV_INSN (NEXT_INSN (insn)) = insn;
267 NEXT_INSN (prev) = next;
268 PREV_INSN (next) = prev;
269 }
270 }
271 else
272 last = insn;
273 }
274
275 /* There were no real instructions. */
276 if (start == past_end)
277 return true;
278
279 end = last;
280
281 *startp = start;
282 *endp = end;
283 return false;
284 }
285 \f
286 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
287 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
288 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
289 know whether it's source is floating point or integer comparison. Machine
290 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
291 to help this function avoid overhead in these cases. */
292 enum rtx_code
293 reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
294 {
295 enum machine_mode mode;
296
297 /* If this is not actually a comparison, we can't reverse it. */
298 if (GET_RTX_CLASS (code) != RTX_COMPARE
299 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
300 return UNKNOWN;
301
302 mode = GET_MODE (arg0);
303 if (mode == VOIDmode)
304 mode = GET_MODE (arg1);
305
306 /* First see if machine description supplies us way to reverse the
307 comparison. Give it priority over everything else to allow
308 machine description to do tricks. */
309 if (GET_MODE_CLASS (mode) == MODE_CC
310 && REVERSIBLE_CC_MODE (mode))
311 {
312 #ifdef REVERSE_CONDITION
313 return REVERSE_CONDITION (code, mode);
314 #endif
315 return reverse_condition (code);
316 }
317
318 /* Try a few special cases based on the comparison code. */
319 switch (code)
320 {
321 case GEU:
322 case GTU:
323 case LEU:
324 case LTU:
325 case NE:
326 case EQ:
327 /* It is always safe to reverse EQ and NE, even for the floating
328 point. Similarly the unsigned comparisons are never used for
329 floating point so we can reverse them in the default way. */
330 return reverse_condition (code);
331 case ORDERED:
332 case UNORDERED:
333 case LTGT:
334 case UNEQ:
335 /* In case we already see unordered comparison, we can be sure to
336 be dealing with floating point so we don't need any more tests. */
337 return reverse_condition_maybe_unordered (code);
338 case UNLT:
339 case UNLE:
340 case UNGT:
341 case UNGE:
342 /* We don't have safe way to reverse these yet. */
343 return UNKNOWN;
344 default:
345 break;
346 }
347
348 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
349 {
350 rtx prev;
351 /* Try to search for the comparison to determine the real mode.
352 This code is expensive, but with sane machine description it
353 will be never used, since REVERSIBLE_CC_MODE will return true
354 in all cases. */
355 if (! insn)
356 return UNKNOWN;
357
358 for (prev = prev_nonnote_insn (insn);
359 prev != 0 && !LABEL_P (prev);
360 prev = prev_nonnote_insn (prev))
361 {
362 rtx set = set_of (arg0, prev);
363 if (set && GET_CODE (set) == SET
364 && rtx_equal_p (SET_DEST (set), arg0))
365 {
366 rtx src = SET_SRC (set);
367
368 if (GET_CODE (src) == COMPARE)
369 {
370 rtx comparison = src;
371 arg0 = XEXP (src, 0);
372 mode = GET_MODE (arg0);
373 if (mode == VOIDmode)
374 mode = GET_MODE (XEXP (comparison, 1));
375 break;
376 }
377 /* We can get past reg-reg moves. This may be useful for model
378 of i387 comparisons that first move flag registers around. */
379 if (REG_P (src))
380 {
381 arg0 = src;
382 continue;
383 }
384 }
385 /* If register is clobbered in some ununderstandable way,
386 give up. */
387 if (set)
388 return UNKNOWN;
389 }
390 }
391
392 /* Test for an integer condition, or a floating-point comparison
393 in which NaNs can be ignored. */
394 if (GET_CODE (arg0) == CONST_INT
395 || (GET_MODE (arg0) != VOIDmode
396 && GET_MODE_CLASS (mode) != MODE_CC
397 && !HONOR_NANS (mode)))
398 return reverse_condition (code);
399
400 return UNKNOWN;
401 }
402
403 /* A wrapper around the previous function to take COMPARISON as rtx
404 expression. This simplifies many callers. */
405 enum rtx_code
406 reversed_comparison_code (rtx comparison, rtx insn)
407 {
408 if (!COMPARISON_P (comparison))
409 return UNKNOWN;
410 return reversed_comparison_code_parts (GET_CODE (comparison),
411 XEXP (comparison, 0),
412 XEXP (comparison, 1), insn);
413 }
414
415 /* Return comparison with reversed code of EXP.
416 Return NULL_RTX in case we fail to do the reversal. */
417 rtx
418 reversed_comparison (rtx exp, enum machine_mode mode)
419 {
420 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
421 if (reversed_code == UNKNOWN)
422 return NULL_RTX;
423 else
424 return simplify_gen_relational (reversed_code, mode, VOIDmode,
425 XEXP (exp, 0), XEXP (exp, 1));
426 }
427
428 \f
429 /* Given an rtx-code for a comparison, return the code for the negated
430 comparison. If no such code exists, return UNKNOWN.
431
432 WATCH OUT! reverse_condition is not safe to use on a jump that might
433 be acting on the results of an IEEE floating point comparison, because
434 of the special treatment of non-signaling nans in comparisons.
435 Use reversed_comparison_code instead. */
436
437 enum rtx_code
438 reverse_condition (enum rtx_code code)
439 {
440 switch (code)
441 {
442 case EQ:
443 return NE;
444 case NE:
445 return EQ;
446 case GT:
447 return LE;
448 case GE:
449 return LT;
450 case LT:
451 return GE;
452 case LE:
453 return GT;
454 case GTU:
455 return LEU;
456 case GEU:
457 return LTU;
458 case LTU:
459 return GEU;
460 case LEU:
461 return GTU;
462 case UNORDERED:
463 return ORDERED;
464 case ORDERED:
465 return UNORDERED;
466
467 case UNLT:
468 case UNLE:
469 case UNGT:
470 case UNGE:
471 case UNEQ:
472 case LTGT:
473 return UNKNOWN;
474
475 default:
476 gcc_unreachable ();
477 }
478 }
479
480 /* Similar, but we're allowed to generate unordered comparisons, which
481 makes it safe for IEEE floating-point. Of course, we have to recognize
482 that the target will support them too... */
483
484 enum rtx_code
485 reverse_condition_maybe_unordered (enum rtx_code code)
486 {
487 switch (code)
488 {
489 case EQ:
490 return NE;
491 case NE:
492 return EQ;
493 case GT:
494 return UNLE;
495 case GE:
496 return UNLT;
497 case LT:
498 return UNGE;
499 case LE:
500 return UNGT;
501 case LTGT:
502 return UNEQ;
503 case UNORDERED:
504 return ORDERED;
505 case ORDERED:
506 return UNORDERED;
507 case UNLT:
508 return GE;
509 case UNLE:
510 return GT;
511 case UNGT:
512 return LE;
513 case UNGE:
514 return LT;
515 case UNEQ:
516 return LTGT;
517
518 default:
519 gcc_unreachable ();
520 }
521 }
522
523 /* Similar, but return the code when two operands of a comparison are swapped.
524 This IS safe for IEEE floating-point. */
525
526 enum rtx_code
527 swap_condition (enum rtx_code code)
528 {
529 switch (code)
530 {
531 case EQ:
532 case NE:
533 case UNORDERED:
534 case ORDERED:
535 case UNEQ:
536 case LTGT:
537 return code;
538
539 case GT:
540 return LT;
541 case GE:
542 return LE;
543 case LT:
544 return GT;
545 case LE:
546 return GE;
547 case GTU:
548 return LTU;
549 case GEU:
550 return LEU;
551 case LTU:
552 return GTU;
553 case LEU:
554 return GEU;
555 case UNLT:
556 return UNGT;
557 case UNLE:
558 return UNGE;
559 case UNGT:
560 return UNLT;
561 case UNGE:
562 return UNLE;
563
564 default:
565 gcc_unreachable ();
566 }
567 }
568
569 /* Given a comparison CODE, return the corresponding unsigned comparison.
570 If CODE is an equality comparison or already an unsigned comparison,
571 CODE is returned. */
572
573 enum rtx_code
574 unsigned_condition (enum rtx_code code)
575 {
576 switch (code)
577 {
578 case EQ:
579 case NE:
580 case GTU:
581 case GEU:
582 case LTU:
583 case LEU:
584 return code;
585
586 case GT:
587 return GTU;
588 case GE:
589 return GEU;
590 case LT:
591 return LTU;
592 case LE:
593 return LEU;
594
595 default:
596 gcc_unreachable ();
597 }
598 }
599
600 /* Similarly, return the signed version of a comparison. */
601
602 enum rtx_code
603 signed_condition (enum rtx_code code)
604 {
605 switch (code)
606 {
607 case EQ:
608 case NE:
609 case GT:
610 case GE:
611 case LT:
612 case LE:
613 return code;
614
615 case GTU:
616 return GT;
617 case GEU:
618 return GE;
619 case LTU:
620 return LT;
621 case LEU:
622 return LE;
623
624 default:
625 gcc_unreachable ();
626 }
627 }
628 \f
629 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
630 truth of CODE1 implies the truth of CODE2. */
631
632 int
633 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
634 {
635 /* UNKNOWN comparison codes can happen as a result of trying to revert
636 comparison codes.
637 They can't match anything, so we have to reject them here. */
638 if (code1 == UNKNOWN || code2 == UNKNOWN)
639 return 0;
640
641 if (code1 == code2)
642 return 1;
643
644 switch (code1)
645 {
646 case UNEQ:
647 if (code2 == UNLE || code2 == UNGE)
648 return 1;
649 break;
650
651 case EQ:
652 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
653 || code2 == ORDERED)
654 return 1;
655 break;
656
657 case UNLT:
658 if (code2 == UNLE || code2 == NE)
659 return 1;
660 break;
661
662 case LT:
663 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
664 return 1;
665 break;
666
667 case UNGT:
668 if (code2 == UNGE || code2 == NE)
669 return 1;
670 break;
671
672 case GT:
673 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
674 return 1;
675 break;
676
677 case GE:
678 case LE:
679 if (code2 == ORDERED)
680 return 1;
681 break;
682
683 case LTGT:
684 if (code2 == NE || code2 == ORDERED)
685 return 1;
686 break;
687
688 case LTU:
689 if (code2 == LEU || code2 == NE)
690 return 1;
691 break;
692
693 case GTU:
694 if (code2 == GEU || code2 == NE)
695 return 1;
696 break;
697
698 case UNORDERED:
699 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
700 || code2 == UNGE || code2 == UNGT)
701 return 1;
702 break;
703
704 default:
705 break;
706 }
707
708 return 0;
709 }
710 \f
711 /* Return 1 if INSN is an unconditional jump and nothing else. */
712
713 int
714 simplejump_p (rtx insn)
715 {
716 return (JUMP_P (insn)
717 && GET_CODE (PATTERN (insn)) == SET
718 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
719 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
720 }
721
722 /* Return nonzero if INSN is a (possibly) conditional jump
723 and nothing more.
724
725 Use of this function is deprecated, since we need to support combined
726 branch and compare insns. Use any_condjump_p instead whenever possible. */
727
728 int
729 condjump_p (rtx insn)
730 {
731 rtx x = PATTERN (insn);
732
733 if (GET_CODE (x) != SET
734 || GET_CODE (SET_DEST (x)) != PC)
735 return 0;
736
737 x = SET_SRC (x);
738 if (GET_CODE (x) == LABEL_REF)
739 return 1;
740 else
741 return (GET_CODE (x) == IF_THEN_ELSE
742 && ((GET_CODE (XEXP (x, 2)) == PC
743 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
744 || GET_CODE (XEXP (x, 1)) == RETURN))
745 || (GET_CODE (XEXP (x, 1)) == PC
746 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
747 || GET_CODE (XEXP (x, 2)) == RETURN))));
748 }
749
750 /* Return nonzero if INSN is a (possibly) conditional jump inside a
751 PARALLEL.
752
753 Use this function is deprecated, since we need to support combined
754 branch and compare insns. Use any_condjump_p instead whenever possible. */
755
756 int
757 condjump_in_parallel_p (rtx insn)
758 {
759 rtx x = PATTERN (insn);
760
761 if (GET_CODE (x) != PARALLEL)
762 return 0;
763 else
764 x = XVECEXP (x, 0, 0);
765
766 if (GET_CODE (x) != SET)
767 return 0;
768 if (GET_CODE (SET_DEST (x)) != PC)
769 return 0;
770 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
771 return 1;
772 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
773 return 0;
774 if (XEXP (SET_SRC (x), 2) == pc_rtx
775 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
776 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
777 return 1;
778 if (XEXP (SET_SRC (x), 1) == pc_rtx
779 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
780 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
781 return 1;
782 return 0;
783 }
784
785 /* Return set of PC, otherwise NULL. */
786
787 rtx
788 pc_set (rtx insn)
789 {
790 rtx pat;
791 if (!JUMP_P (insn))
792 return NULL_RTX;
793 pat = PATTERN (insn);
794
795 /* The set is allowed to appear either as the insn pattern or
796 the first set in a PARALLEL. */
797 if (GET_CODE (pat) == PARALLEL)
798 pat = XVECEXP (pat, 0, 0);
799 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
800 return pat;
801
802 return NULL_RTX;
803 }
804
805 /* Return true when insn is an unconditional direct jump,
806 possibly bundled inside a PARALLEL. */
807
808 int
809 any_uncondjump_p (rtx insn)
810 {
811 rtx x = pc_set (insn);
812 if (!x)
813 return 0;
814 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
815 return 0;
816 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
817 return 0;
818 return 1;
819 }
820
821 /* Return true when insn is a conditional jump. This function works for
822 instructions containing PC sets in PARALLELs. The instruction may have
823 various other effects so before removing the jump you must verify
824 onlyjump_p.
825
826 Note that unlike condjump_p it returns false for unconditional jumps. */
827
828 int
829 any_condjump_p (rtx insn)
830 {
831 rtx x = pc_set (insn);
832 enum rtx_code a, b;
833
834 if (!x)
835 return 0;
836 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
837 return 0;
838
839 a = GET_CODE (XEXP (SET_SRC (x), 1));
840 b = GET_CODE (XEXP (SET_SRC (x), 2));
841
842 return ((b == PC && (a == LABEL_REF || a == RETURN))
843 || (a == PC && (b == LABEL_REF || b == RETURN)));
844 }
845
846 /* Return the label of a conditional jump. */
847
848 rtx
849 condjump_label (rtx insn)
850 {
851 rtx x = pc_set (insn);
852
853 if (!x)
854 return NULL_RTX;
855 x = SET_SRC (x);
856 if (GET_CODE (x) == LABEL_REF)
857 return x;
858 if (GET_CODE (x) != IF_THEN_ELSE)
859 return NULL_RTX;
860 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
861 return XEXP (x, 1);
862 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
863 return XEXP (x, 2);
864 return NULL_RTX;
865 }
866
867 /* Return true if INSN is a (possibly conditional) return insn. */
868
869 static int
870 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
871 {
872 rtx x = *loc;
873
874 return x && (GET_CODE (x) == RETURN
875 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
876 }
877
878 int
879 returnjump_p (rtx insn)
880 {
881 if (!JUMP_P (insn))
882 return 0;
883 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
884 }
885
886 /* Return true if INSN is a jump that only transfers control and
887 nothing more. */
888
889 int
890 onlyjump_p (rtx insn)
891 {
892 rtx set;
893
894 if (!JUMP_P (insn))
895 return 0;
896
897 set = single_set (insn);
898 if (set == NULL)
899 return 0;
900 if (GET_CODE (SET_DEST (set)) != PC)
901 return 0;
902 if (side_effects_p (SET_SRC (set)))
903 return 0;
904
905 return 1;
906 }
907
908 #ifdef HAVE_cc0
909
910 /* Return nonzero if X is an RTX that only sets the condition codes
911 and has no side effects. */
912
913 int
914 only_sets_cc0_p (rtx x)
915 {
916 if (! x)
917 return 0;
918
919 if (INSN_P (x))
920 x = PATTERN (x);
921
922 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
923 }
924
925 /* Return 1 if X is an RTX that does nothing but set the condition codes
926 and CLOBBER or USE registers.
927 Return -1 if X does explicitly set the condition codes,
928 but also does other things. */
929
930 int
931 sets_cc0_p (rtx x)
932 {
933 if (! x)
934 return 0;
935
936 if (INSN_P (x))
937 x = PATTERN (x);
938
939 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
940 return 1;
941 if (GET_CODE (x) == PARALLEL)
942 {
943 int i;
944 int sets_cc0 = 0;
945 int other_things = 0;
946 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
947 {
948 if (GET_CODE (XVECEXP (x, 0, i)) == SET
949 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
950 sets_cc0 = 1;
951 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
952 other_things = 1;
953 }
954 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
955 }
956 return 0;
957 }
958 #endif
959 \f
960 /* Find all CODE_LABELs referred to in X, and increment their use counts.
961 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
962 in INSN, then store one of them in JUMP_LABEL (INSN).
963 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
964 referenced in INSN, add a REG_LABEL note containing that label to INSN.
965 Also, when there are consecutive labels, canonicalize on the last of them.
966
967 Note that two labels separated by a loop-beginning note
968 must be kept distinct if we have not yet done loop-optimization,
969 because the gap between them is where loop-optimize
970 will want to move invariant code to. CROSS_JUMP tells us
971 that loop-optimization is done with. */
972
973 void
974 mark_jump_label (rtx x, rtx insn, int in_mem)
975 {
976 RTX_CODE code = GET_CODE (x);
977 int i;
978 const char *fmt;
979
980 switch (code)
981 {
982 case PC:
983 case CC0:
984 case REG:
985 case CONST_INT:
986 case CONST_DOUBLE:
987 case CLOBBER:
988 case CALL:
989 return;
990
991 case MEM:
992 in_mem = 1;
993 break;
994
995 case SEQUENCE:
996 for (i = 0; i < XVECLEN (x, 0); i++)
997 mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
998 XVECEXP (x, 0, i), 0);
999 return;
1000
1001 case SYMBOL_REF:
1002 if (!in_mem)
1003 return;
1004
1005 /* If this is a constant-pool reference, see if it is a label. */
1006 if (CONSTANT_POOL_ADDRESS_P (x))
1007 mark_jump_label (get_pool_constant (x), insn, in_mem);
1008 break;
1009
1010 case LABEL_REF:
1011 {
1012 rtx label = XEXP (x, 0);
1013
1014 /* Ignore remaining references to unreachable labels that
1015 have been deleted. */
1016 if (NOTE_P (label)
1017 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1018 break;
1019
1020 gcc_assert (LABEL_P (label));
1021
1022 /* Ignore references to labels of containing functions. */
1023 if (LABEL_REF_NONLOCAL_P (x))
1024 break;
1025
1026 XEXP (x, 0) = label;
1027 if (! insn || ! INSN_DELETED_P (insn))
1028 ++LABEL_NUSES (label);
1029
1030 if (insn)
1031 {
1032 if (JUMP_P (insn))
1033 JUMP_LABEL (insn) = label;
1034 else
1035 {
1036 /* Add a REG_LABEL note for LABEL unless there already
1037 is one. All uses of a label, except for labels
1038 that are the targets of jumps, must have a
1039 REG_LABEL note. */
1040 if (! find_reg_note (insn, REG_LABEL, label))
1041 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1042 REG_NOTES (insn));
1043 }
1044 }
1045 return;
1046 }
1047
1048 /* Do walk the labels in a vector, but not the first operand of an
1049 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1050 case ADDR_VEC:
1051 case ADDR_DIFF_VEC:
1052 if (! INSN_DELETED_P (insn))
1053 {
1054 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1055
1056 for (i = 0; i < XVECLEN (x, eltnum); i++)
1057 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1058 }
1059 return;
1060
1061 default:
1062 break;
1063 }
1064
1065 fmt = GET_RTX_FORMAT (code);
1066 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1067 {
1068 if (fmt[i] == 'e')
1069 mark_jump_label (XEXP (x, i), insn, in_mem);
1070 else if (fmt[i] == 'E')
1071 {
1072 int j;
1073 for (j = 0; j < XVECLEN (x, i); j++)
1074 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1075 }
1076 }
1077 }
1078
1079 \f
1080 /* Delete insn INSN from the chain of insns and update label ref counts
1081 and delete insns now unreachable.
1082
1083 Returns the first insn after INSN that was not deleted.
1084
1085 Usage of this instruction is deprecated. Use delete_insn instead and
1086 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1087
1088 rtx
1089 delete_related_insns (rtx insn)
1090 {
1091 int was_code_label = (LABEL_P (insn));
1092 rtx note;
1093 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1094
1095 while (next && INSN_DELETED_P (next))
1096 next = NEXT_INSN (next);
1097
1098 /* This insn is already deleted => return first following nondeleted. */
1099 if (INSN_DELETED_P (insn))
1100 return next;
1101
1102 delete_insn (insn);
1103
1104 /* If instruction is followed by a barrier,
1105 delete the barrier too. */
1106
1107 if (next != 0 && BARRIER_P (next))
1108 delete_insn (next);
1109
1110 /* If deleting a jump, decrement the count of the label,
1111 and delete the label if it is now unused. */
1112
1113 if (JUMP_P (insn) && JUMP_LABEL (insn))
1114 {
1115 rtx lab = JUMP_LABEL (insn), lab_next;
1116
1117 if (LABEL_NUSES (lab) == 0)
1118 {
1119 /* This can delete NEXT or PREV,
1120 either directly if NEXT is JUMP_LABEL (INSN),
1121 or indirectly through more levels of jumps. */
1122 delete_related_insns (lab);
1123
1124 /* I feel a little doubtful about this loop,
1125 but I see no clean and sure alternative way
1126 to find the first insn after INSN that is not now deleted.
1127 I hope this works. */
1128 while (next && INSN_DELETED_P (next))
1129 next = NEXT_INSN (next);
1130 return next;
1131 }
1132 else if (tablejump_p (insn, NULL, &lab_next))
1133 {
1134 /* If we're deleting the tablejump, delete the dispatch table.
1135 We may not be able to kill the label immediately preceding
1136 just yet, as it might be referenced in code leading up to
1137 the tablejump. */
1138 delete_related_insns (lab_next);
1139 }
1140 }
1141
1142 /* Likewise if we're deleting a dispatch table. */
1143
1144 if (JUMP_P (insn)
1145 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1146 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1147 {
1148 rtx pat = PATTERN (insn);
1149 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1150 int len = XVECLEN (pat, diff_vec_p);
1151
1152 for (i = 0; i < len; i++)
1153 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1154 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1155 while (next && INSN_DELETED_P (next))
1156 next = NEXT_INSN (next);
1157 return next;
1158 }
1159
1160 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1161 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1162 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1163 if (REG_NOTE_KIND (note) == REG_LABEL
1164 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1165 && LABEL_P (XEXP (note, 0)))
1166 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1167 delete_related_insns (XEXP (note, 0));
1168
1169 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1170 prev = PREV_INSN (prev);
1171
1172 /* If INSN was a label and a dispatch table follows it,
1173 delete the dispatch table. The tablejump must have gone already.
1174 It isn't useful to fall through into a table. */
1175
1176 if (was_code_label
1177 && NEXT_INSN (insn) != 0
1178 && JUMP_P (NEXT_INSN (insn))
1179 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1180 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1181 next = delete_related_insns (NEXT_INSN (insn));
1182
1183 /* If INSN was a label, delete insns following it if now unreachable. */
1184
1185 if (was_code_label && prev && BARRIER_P (prev))
1186 {
1187 enum rtx_code code;
1188 while (next)
1189 {
1190 code = GET_CODE (next);
1191 if (code == NOTE)
1192 next = NEXT_INSN (next);
1193 /* Keep going past other deleted labels to delete what follows. */
1194 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1195 next = NEXT_INSN (next);
1196 else if (code == BARRIER || INSN_P (next))
1197 /* Note: if this deletes a jump, it can cause more
1198 deletion of unreachable code, after a different label.
1199 As long as the value from this recursive call is correct,
1200 this invocation functions correctly. */
1201 next = delete_related_insns (next);
1202 else
1203 break;
1204 }
1205 }
1206
1207 return next;
1208 }
1209 \f
1210 /* Delete a range of insns from FROM to TO, inclusive.
1211 This is for the sake of peephole optimization, so assume
1212 that whatever these insns do will still be done by a new
1213 peephole insn that will replace them. */
1214
1215 void
1216 delete_for_peephole (rtx from, rtx to)
1217 {
1218 rtx insn = from;
1219
1220 while (1)
1221 {
1222 rtx next = NEXT_INSN (insn);
1223 rtx prev = PREV_INSN (insn);
1224
1225 if (!NOTE_P (insn))
1226 {
1227 INSN_DELETED_P (insn) = 1;
1228
1229 /* Patch this insn out of the chain. */
1230 /* We don't do this all at once, because we
1231 must preserve all NOTEs. */
1232 if (prev)
1233 NEXT_INSN (prev) = next;
1234
1235 if (next)
1236 PREV_INSN (next) = prev;
1237 }
1238
1239 if (insn == to)
1240 break;
1241 insn = next;
1242 }
1243
1244 /* Note that if TO is an unconditional jump
1245 we *do not* delete the BARRIER that follows,
1246 since the peephole that replaces this sequence
1247 is also an unconditional jump in that case. */
1248 }
1249 \f
1250 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1251 NLABEL as a return. Accrue modifications into the change group. */
1252
1253 static void
1254 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1255 {
1256 rtx x = *loc;
1257 RTX_CODE code = GET_CODE (x);
1258 int i;
1259 const char *fmt;
1260
1261 if (code == LABEL_REF)
1262 {
1263 if (XEXP (x, 0) == olabel)
1264 {
1265 rtx n;
1266 if (nlabel)
1267 n = gen_rtx_LABEL_REF (Pmode, nlabel);
1268 else
1269 n = gen_rtx_RETURN (VOIDmode);
1270
1271 validate_change (insn, loc, n, 1);
1272 return;
1273 }
1274 }
1275 else if (code == RETURN && olabel == 0)
1276 {
1277 if (nlabel)
1278 x = gen_rtx_LABEL_REF (Pmode, nlabel);
1279 else
1280 x = gen_rtx_RETURN (VOIDmode);
1281 if (loc == &PATTERN (insn))
1282 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1283 validate_change (insn, loc, x, 1);
1284 return;
1285 }
1286
1287 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1288 && GET_CODE (SET_SRC (x)) == LABEL_REF
1289 && XEXP (SET_SRC (x), 0) == olabel)
1290 {
1291 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1292 return;
1293 }
1294
1295 fmt = GET_RTX_FORMAT (code);
1296 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1297 {
1298 if (fmt[i] == 'e')
1299 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1300 else if (fmt[i] == 'E')
1301 {
1302 int j;
1303 for (j = 0; j < XVECLEN (x, i); j++)
1304 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1305 }
1306 }
1307 }
1308
1309 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1310 the modifications into the change group. Return false if we did
1311 not see how to do that. */
1312
1313 int
1314 redirect_jump_1 (rtx jump, rtx nlabel)
1315 {
1316 int ochanges = num_validated_changes ();
1317 rtx *loc;
1318
1319 if (GET_CODE (PATTERN (jump)) == PARALLEL)
1320 loc = &XVECEXP (PATTERN (jump), 0, 0);
1321 else
1322 loc = &PATTERN (jump);
1323
1324 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1325 return num_validated_changes () > ochanges;
1326 }
1327
1328 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1329 jump target label is unused as a result, it and the code following
1330 it may be deleted.
1331
1332 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1333 RETURN insn.
1334
1335 The return value will be 1 if the change was made, 0 if it wasn't
1336 (this can only occur for NLABEL == 0). */
1337
1338 int
1339 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1340 {
1341 rtx olabel = JUMP_LABEL (jump);
1342
1343 if (nlabel == olabel)
1344 return 1;
1345
1346 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1347 return 0;
1348
1349 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1350 return 1;
1351 }
1352
1353 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1354 NLABEL in JUMP.
1355 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1356 count has dropped to zero. */
1357 void
1358 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1359 int invert)
1360 {
1361 rtx note;
1362
1363 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1364 moving FUNCTION_END note. Just sanity check that no user still worry
1365 about this. */
1366 gcc_assert (delete_unused >= 0);
1367 JUMP_LABEL (jump) = nlabel;
1368 if (nlabel)
1369 ++LABEL_NUSES (nlabel);
1370
1371 /* Update labels in any REG_EQUAL note. */
1372 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1373 {
1374 if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1375 remove_note (jump, note);
1376 else
1377 {
1378 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1379 confirm_change_group ();
1380 }
1381 }
1382
1383 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1384 /* Undefined labels will remain outside the insn stream. */
1385 && INSN_UID (olabel))
1386 delete_related_insns (olabel);
1387 if (invert)
1388 invert_br_probabilities (jump);
1389 }
1390
1391 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1392 modifications into the change group. Return nonzero for success. */
1393 static int
1394 invert_exp_1 (rtx x, rtx insn)
1395 {
1396 RTX_CODE code = GET_CODE (x);
1397
1398 if (code == IF_THEN_ELSE)
1399 {
1400 rtx comp = XEXP (x, 0);
1401 rtx tem;
1402 enum rtx_code reversed_code;
1403
1404 /* We can do this in two ways: The preferable way, which can only
1405 be done if this is not an integer comparison, is to reverse
1406 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1407 of the IF_THEN_ELSE. If we can't do either, fail. */
1408
1409 reversed_code = reversed_comparison_code (comp, insn);
1410
1411 if (reversed_code != UNKNOWN)
1412 {
1413 validate_change (insn, &XEXP (x, 0),
1414 gen_rtx_fmt_ee (reversed_code,
1415 GET_MODE (comp), XEXP (comp, 0),
1416 XEXP (comp, 1)),
1417 1);
1418 return 1;
1419 }
1420
1421 tem = XEXP (x, 1);
1422 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1423 validate_change (insn, &XEXP (x, 2), tem, 1);
1424 return 1;
1425 }
1426 else
1427 return 0;
1428 }
1429
1430 /* Invert the condition of the jump JUMP, and make it jump to label
1431 NLABEL instead of where it jumps now. Accrue changes into the
1432 change group. Return false if we didn't see how to perform the
1433 inversion and redirection. */
1434
1435 int
1436 invert_jump_1 (rtx jump, rtx nlabel)
1437 {
1438 rtx x = pc_set (jump);
1439 int ochanges;
1440 int ok;
1441
1442 ochanges = num_validated_changes ();
1443 gcc_assert (x);
1444 ok = invert_exp_1 (SET_SRC (x), jump);
1445 gcc_assert (ok);
1446
1447 if (num_validated_changes () == ochanges)
1448 return 0;
1449
1450 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1451 in Pmode, so checking this is not merely an optimization. */
1452 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1453 }
1454
1455 /* Invert the condition of the jump JUMP, and make it jump to label
1456 NLABEL instead of where it jumps now. Return true if successful. */
1457
1458 int
1459 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1460 {
1461 rtx olabel = JUMP_LABEL (jump);
1462
1463 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1464 {
1465 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1466 return 1;
1467 }
1468 cancel_changes (0);
1469 return 0;
1470 }
1471
1472 \f
1473 /* Like rtx_equal_p except that it considers two REGs as equal
1474 if they renumber to the same value and considers two commutative
1475 operations to be the same if the order of the operands has been
1476 reversed. */
1477
1478 int
1479 rtx_renumbered_equal_p (rtx x, rtx y)
1480 {
1481 int i;
1482 enum rtx_code code = GET_CODE (x);
1483 const char *fmt;
1484
1485 if (x == y)
1486 return 1;
1487
1488 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1489 && (REG_P (y) || (GET_CODE (y) == SUBREG
1490 && REG_P (SUBREG_REG (y)))))
1491 {
1492 int reg_x = -1, reg_y = -1;
1493 int byte_x = 0, byte_y = 0;
1494
1495 if (GET_MODE (x) != GET_MODE (y))
1496 return 0;
1497
1498 /* If we haven't done any renumbering, don't
1499 make any assumptions. */
1500 if (reg_renumber == 0)
1501 return rtx_equal_p (x, y);
1502
1503 if (code == SUBREG)
1504 {
1505 reg_x = REGNO (SUBREG_REG (x));
1506 byte_x = SUBREG_BYTE (x);
1507
1508 if (reg_renumber[reg_x] >= 0)
1509 {
1510 reg_x = subreg_regno_offset (reg_renumber[reg_x],
1511 GET_MODE (SUBREG_REG (x)),
1512 byte_x,
1513 GET_MODE (x));
1514 byte_x = 0;
1515 }
1516 }
1517 else
1518 {
1519 reg_x = REGNO (x);
1520 if (reg_renumber[reg_x] >= 0)
1521 reg_x = reg_renumber[reg_x];
1522 }
1523
1524 if (GET_CODE (y) == SUBREG)
1525 {
1526 reg_y = REGNO (SUBREG_REG (y));
1527 byte_y = SUBREG_BYTE (y);
1528
1529 if (reg_renumber[reg_y] >= 0)
1530 {
1531 reg_y = subreg_regno_offset (reg_renumber[reg_y],
1532 GET_MODE (SUBREG_REG (y)),
1533 byte_y,
1534 GET_MODE (y));
1535 byte_y = 0;
1536 }
1537 }
1538 else
1539 {
1540 reg_y = REGNO (y);
1541 if (reg_renumber[reg_y] >= 0)
1542 reg_y = reg_renumber[reg_y];
1543 }
1544
1545 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1546 }
1547
1548 /* Now we have disposed of all the cases
1549 in which different rtx codes can match. */
1550 if (code != GET_CODE (y))
1551 return 0;
1552
1553 switch (code)
1554 {
1555 case PC:
1556 case CC0:
1557 case ADDR_VEC:
1558 case ADDR_DIFF_VEC:
1559 case CONST_INT:
1560 case CONST_DOUBLE:
1561 return 0;
1562
1563 case LABEL_REF:
1564 /* We can't assume nonlocal labels have their following insns yet. */
1565 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1566 return XEXP (x, 0) == XEXP (y, 0);
1567
1568 /* Two label-refs are equivalent if they point at labels
1569 in the same position in the instruction stream. */
1570 return (next_real_insn (XEXP (x, 0))
1571 == next_real_insn (XEXP (y, 0)));
1572
1573 case SYMBOL_REF:
1574 return XSTR (x, 0) == XSTR (y, 0);
1575
1576 case CODE_LABEL:
1577 /* If we didn't match EQ equality above, they aren't the same. */
1578 return 0;
1579
1580 default:
1581 break;
1582 }
1583
1584 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1585
1586 if (GET_MODE (x) != GET_MODE (y))
1587 return 0;
1588
1589 /* For commutative operations, the RTX match if the operand match in any
1590 order. Also handle the simple binary and unary cases without a loop. */
1591 if (targetm.commutative_p (x, UNKNOWN))
1592 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1593 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1594 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1595 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1596 else if (NON_COMMUTATIVE_P (x))
1597 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1598 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1599 else if (UNARY_P (x))
1600 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1601
1602 /* Compare the elements. If any pair of corresponding elements
1603 fail to match, return 0 for the whole things. */
1604
1605 fmt = GET_RTX_FORMAT (code);
1606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1607 {
1608 int j;
1609 switch (fmt[i])
1610 {
1611 case 'w':
1612 if (XWINT (x, i) != XWINT (y, i))
1613 return 0;
1614 break;
1615
1616 case 'i':
1617 if (XINT (x, i) != XINT (y, i))
1618 return 0;
1619 break;
1620
1621 case 't':
1622 if (XTREE (x, i) != XTREE (y, i))
1623 return 0;
1624 break;
1625
1626 case 's':
1627 if (strcmp (XSTR (x, i), XSTR (y, i)))
1628 return 0;
1629 break;
1630
1631 case 'e':
1632 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1633 return 0;
1634 break;
1635
1636 case 'u':
1637 if (XEXP (x, i) != XEXP (y, i))
1638 return 0;
1639 /* Fall through. */
1640 case '0':
1641 break;
1642
1643 case 'E':
1644 if (XVECLEN (x, i) != XVECLEN (y, i))
1645 return 0;
1646 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1647 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1648 return 0;
1649 break;
1650
1651 default:
1652 gcc_unreachable ();
1653 }
1654 }
1655 return 1;
1656 }
1657 \f
1658 /* If X is a hard register or equivalent to one or a subregister of one,
1659 return the hard register number. If X is a pseudo register that was not
1660 assigned a hard register, return the pseudo register number. Otherwise,
1661 return -1. Any rtx is valid for X. */
1662
1663 int
1664 true_regnum (rtx x)
1665 {
1666 if (REG_P (x))
1667 {
1668 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1669 return reg_renumber[REGNO (x)];
1670 return REGNO (x);
1671 }
1672 if (GET_CODE (x) == SUBREG)
1673 {
1674 int base = true_regnum (SUBREG_REG (x));
1675 if (base >= 0
1676 && base < FIRST_PSEUDO_REGISTER
1677 && subreg_offset_representable_p (REGNO (SUBREG_REG (x)),
1678 GET_MODE (SUBREG_REG (x)),
1679 SUBREG_BYTE (x), GET_MODE (x)))
1680 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1681 GET_MODE (SUBREG_REG (x)),
1682 SUBREG_BYTE (x), GET_MODE (x));
1683 }
1684 return -1;
1685 }
1686
1687 /* Return regno of the register REG and handle subregs too. */
1688 unsigned int
1689 reg_or_subregno (rtx reg)
1690 {
1691 if (GET_CODE (reg) == SUBREG)
1692 reg = SUBREG_REG (reg);
1693 gcc_assert (REG_P (reg));
1694 return REGNO (reg);
1695 }