alias.c (record_set, [...]): Constify.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically a set of utility functions to
24 operate with jumps.
25
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
33
34 The subroutines redirect_jump and invert_jump are used
35 from other passes as well. */
36
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "regs.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 #include "recog.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "real.h"
52 #include "except.h"
53 #include "diagnostic.h"
54 #include "toplev.h"
55 #include "reload.h"
56 #include "predict.h"
57 #include "timevar.h"
58 #include "tree-pass.h"
59 #include "target.h"
60
61 /* Optimize jump y; x: ... y: jumpif... x?
62 Don't know if it is worth bothering with. */
63 /* Optimize two cases of conditional jump to conditional jump?
64 This can never delete any instruction or make anything dead,
65 or even change what is live at any point.
66 So perhaps let combiner do it. */
67
68 static void init_label_info (rtx);
69 static void mark_all_labels (rtx);
70 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
71 static int invert_exp_1 (rtx, rtx);
72 static int returnjump_p_1 (rtx *, void *);
73 \f
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
76 instructions. */
77 void
78 rebuild_jump_labels (rtx f)
79 {
80 rtx insn;
81
82 timevar_push (TV_REBUILD_JUMP);
83 init_label_info (f);
84 mark_all_labels (f);
85
86 /* Keep track of labels used from static data; we don't track them
87 closely enough to delete them here, so make sure their reference
88 count doesn't drop to zero. */
89
90 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
91 if (LABEL_P (XEXP (insn, 0)))
92 LABEL_NUSES (XEXP (insn, 0))++;
93 timevar_pop (TV_REBUILD_JUMP);
94 }
95 \f
96 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
97 non-fallthru insn. This is not generally true, as multiple barriers
98 may have crept in, or the BARRIER may be separated from the last
99 real insn by one or more NOTEs.
100
101 This simple pass moves barriers and removes duplicates so that the
102 old code is happy.
103 */
104 unsigned int
105 cleanup_barriers (void)
106 {
107 rtx insn, next, prev;
108 for (insn = get_insns (); insn; insn = next)
109 {
110 next = NEXT_INSN (insn);
111 if (BARRIER_P (insn))
112 {
113 prev = prev_nonnote_insn (insn);
114 if (BARRIER_P (prev))
115 delete_insn (insn);
116 else if (prev != PREV_INSN (insn))
117 reorder_insns (insn, insn, prev);
118 }
119 }
120 return 0;
121 }
122
123 struct tree_opt_pass pass_cleanup_barriers =
124 {
125 "barriers", /* name */
126 NULL, /* gate */
127 cleanup_barriers, /* execute */
128 NULL, /* sub */
129 NULL, /* next */
130 0, /* static_pass_number */
131 0, /* tv_id */
132 0, /* properties_required */
133 0, /* properties_provided */
134 0, /* properties_destroyed */
135 0, /* todo_flags_start */
136 TODO_dump_func, /* todo_flags_finish */
137 0 /* letter */
138 };
139
140 \f
141 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
142 notes whose labels don't occur in the insn any more. Returns the
143 largest INSN_UID found. */
144 static void
145 init_label_info (rtx f)
146 {
147 rtx insn;
148
149 for (insn = f; insn; insn = NEXT_INSN (insn))
150 if (LABEL_P (insn))
151 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
152 else if (JUMP_P (insn))
153 JUMP_LABEL (insn) = 0;
154 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
155 {
156 rtx note, next;
157
158 for (note = REG_NOTES (insn); note; note = next)
159 {
160 next = XEXP (note, 1);
161 if (REG_NOTE_KIND (note) == REG_LABEL
162 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
163 remove_note (insn, note);
164 }
165 }
166 }
167
168 /* Mark the label each jump jumps to.
169 Combine consecutive labels, and count uses of labels. */
170
171 static void
172 mark_all_labels (rtx f)
173 {
174 rtx insn;
175
176 for (insn = f; insn; insn = NEXT_INSN (insn))
177 if (INSN_P (insn))
178 {
179 mark_jump_label (PATTERN (insn), insn, 0);
180 if (! INSN_DELETED_P (insn) && JUMP_P (insn))
181 {
182 /* When we know the LABEL_REF contained in a REG used in
183 an indirect jump, we'll have a REG_LABEL note so that
184 flow can tell where it's going. */
185 if (JUMP_LABEL (insn) == 0)
186 {
187 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
188 if (label_note)
189 {
190 /* But a LABEL_REF around the REG_LABEL note, so
191 that we can canonicalize it. */
192 rtx label_ref = gen_rtx_LABEL_REF (Pmode,
193 XEXP (label_note, 0));
194
195 mark_jump_label (label_ref, insn, 0);
196 XEXP (label_note, 0) = XEXP (label_ref, 0);
197 JUMP_LABEL (insn) = XEXP (label_note, 0);
198 }
199 }
200 }
201 }
202
203 /* If we are in cfglayout mode, there may be non-insns between the
204 basic blocks. If those non-insns represent tablejump data, they
205 contain label references that we must record. */
206 if (current_ir_type () == IR_RTL_CFGLAYOUT)
207 {
208 basic_block bb;
209 rtx insn;
210 FOR_EACH_BB (bb)
211 {
212 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
213 if (INSN_P (insn))
214 {
215 gcc_assert (JUMP_TABLE_DATA_P (insn));
216 mark_jump_label (PATTERN (insn), insn, 0);
217 }
218
219 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
220 if (INSN_P (insn))
221 {
222 gcc_assert (JUMP_TABLE_DATA_P (insn));
223 mark_jump_label (PATTERN (insn), insn, 0);
224 }
225 }
226 }
227 }
228 \f
229 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
230 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
231 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
232 know whether it's source is floating point or integer comparison. Machine
233 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
234 to help this function avoid overhead in these cases. */
235 enum rtx_code
236 reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
237 {
238 enum machine_mode mode;
239
240 /* If this is not actually a comparison, we can't reverse it. */
241 if (GET_RTX_CLASS (code) != RTX_COMPARE
242 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
243 return UNKNOWN;
244
245 mode = GET_MODE (arg0);
246 if (mode == VOIDmode)
247 mode = GET_MODE (arg1);
248
249 /* First see if machine description supplies us way to reverse the
250 comparison. Give it priority over everything else to allow
251 machine description to do tricks. */
252 if (GET_MODE_CLASS (mode) == MODE_CC
253 && REVERSIBLE_CC_MODE (mode))
254 {
255 #ifdef REVERSE_CONDITION
256 return REVERSE_CONDITION (code, mode);
257 #endif
258 return reverse_condition (code);
259 }
260
261 /* Try a few special cases based on the comparison code. */
262 switch (code)
263 {
264 case GEU:
265 case GTU:
266 case LEU:
267 case LTU:
268 case NE:
269 case EQ:
270 /* It is always safe to reverse EQ and NE, even for the floating
271 point. Similarly the unsigned comparisons are never used for
272 floating point so we can reverse them in the default way. */
273 return reverse_condition (code);
274 case ORDERED:
275 case UNORDERED:
276 case LTGT:
277 case UNEQ:
278 /* In case we already see unordered comparison, we can be sure to
279 be dealing with floating point so we don't need any more tests. */
280 return reverse_condition_maybe_unordered (code);
281 case UNLT:
282 case UNLE:
283 case UNGT:
284 case UNGE:
285 /* We don't have safe way to reverse these yet. */
286 return UNKNOWN;
287 default:
288 break;
289 }
290
291 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
292 {
293 rtx prev;
294 /* Try to search for the comparison to determine the real mode.
295 This code is expensive, but with sane machine description it
296 will be never used, since REVERSIBLE_CC_MODE will return true
297 in all cases. */
298 if (! insn)
299 return UNKNOWN;
300
301 for (prev = prev_nonnote_insn (insn);
302 prev != 0 && !LABEL_P (prev);
303 prev = prev_nonnote_insn (prev))
304 {
305 const_rtx set = set_of (arg0, prev);
306 if (set && GET_CODE (set) == SET
307 && rtx_equal_p (SET_DEST (set), arg0))
308 {
309 rtx src = SET_SRC (set);
310
311 if (GET_CODE (src) == COMPARE)
312 {
313 rtx comparison = src;
314 arg0 = XEXP (src, 0);
315 mode = GET_MODE (arg0);
316 if (mode == VOIDmode)
317 mode = GET_MODE (XEXP (comparison, 1));
318 break;
319 }
320 /* We can get past reg-reg moves. This may be useful for model
321 of i387 comparisons that first move flag registers around. */
322 if (REG_P (src))
323 {
324 arg0 = src;
325 continue;
326 }
327 }
328 /* If register is clobbered in some ununderstandable way,
329 give up. */
330 if (set)
331 return UNKNOWN;
332 }
333 }
334
335 /* Test for an integer condition, or a floating-point comparison
336 in which NaNs can be ignored. */
337 if (GET_CODE (arg0) == CONST_INT
338 || (GET_MODE (arg0) != VOIDmode
339 && GET_MODE_CLASS (mode) != MODE_CC
340 && !HONOR_NANS (mode)))
341 return reverse_condition (code);
342
343 return UNKNOWN;
344 }
345
346 /* A wrapper around the previous function to take COMPARISON as rtx
347 expression. This simplifies many callers. */
348 enum rtx_code
349 reversed_comparison_code (rtx comparison, rtx insn)
350 {
351 if (!COMPARISON_P (comparison))
352 return UNKNOWN;
353 return reversed_comparison_code_parts (GET_CODE (comparison),
354 XEXP (comparison, 0),
355 XEXP (comparison, 1), insn);
356 }
357
358 /* Return comparison with reversed code of EXP.
359 Return NULL_RTX in case we fail to do the reversal. */
360 rtx
361 reversed_comparison (rtx exp, enum machine_mode mode)
362 {
363 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
364 if (reversed_code == UNKNOWN)
365 return NULL_RTX;
366 else
367 return simplify_gen_relational (reversed_code, mode, VOIDmode,
368 XEXP (exp, 0), XEXP (exp, 1));
369 }
370
371 \f
372 /* Given an rtx-code for a comparison, return the code for the negated
373 comparison. If no such code exists, return UNKNOWN.
374
375 WATCH OUT! reverse_condition is not safe to use on a jump that might
376 be acting on the results of an IEEE floating point comparison, because
377 of the special treatment of non-signaling nans in comparisons.
378 Use reversed_comparison_code instead. */
379
380 enum rtx_code
381 reverse_condition (enum rtx_code code)
382 {
383 switch (code)
384 {
385 case EQ:
386 return NE;
387 case NE:
388 return EQ;
389 case GT:
390 return LE;
391 case GE:
392 return LT;
393 case LT:
394 return GE;
395 case LE:
396 return GT;
397 case GTU:
398 return LEU;
399 case GEU:
400 return LTU;
401 case LTU:
402 return GEU;
403 case LEU:
404 return GTU;
405 case UNORDERED:
406 return ORDERED;
407 case ORDERED:
408 return UNORDERED;
409
410 case UNLT:
411 case UNLE:
412 case UNGT:
413 case UNGE:
414 case UNEQ:
415 case LTGT:
416 return UNKNOWN;
417
418 default:
419 gcc_unreachable ();
420 }
421 }
422
423 /* Similar, but we're allowed to generate unordered comparisons, which
424 makes it safe for IEEE floating-point. Of course, we have to recognize
425 that the target will support them too... */
426
427 enum rtx_code
428 reverse_condition_maybe_unordered (enum rtx_code code)
429 {
430 switch (code)
431 {
432 case EQ:
433 return NE;
434 case NE:
435 return EQ;
436 case GT:
437 return UNLE;
438 case GE:
439 return UNLT;
440 case LT:
441 return UNGE;
442 case LE:
443 return UNGT;
444 case LTGT:
445 return UNEQ;
446 case UNORDERED:
447 return ORDERED;
448 case ORDERED:
449 return UNORDERED;
450 case UNLT:
451 return GE;
452 case UNLE:
453 return GT;
454 case UNGT:
455 return LE;
456 case UNGE:
457 return LT;
458 case UNEQ:
459 return LTGT;
460
461 default:
462 gcc_unreachable ();
463 }
464 }
465
466 /* Similar, but return the code when two operands of a comparison are swapped.
467 This IS safe for IEEE floating-point. */
468
469 enum rtx_code
470 swap_condition (enum rtx_code code)
471 {
472 switch (code)
473 {
474 case EQ:
475 case NE:
476 case UNORDERED:
477 case ORDERED:
478 case UNEQ:
479 case LTGT:
480 return code;
481
482 case GT:
483 return LT;
484 case GE:
485 return LE;
486 case LT:
487 return GT;
488 case LE:
489 return GE;
490 case GTU:
491 return LTU;
492 case GEU:
493 return LEU;
494 case LTU:
495 return GTU;
496 case LEU:
497 return GEU;
498 case UNLT:
499 return UNGT;
500 case UNLE:
501 return UNGE;
502 case UNGT:
503 return UNLT;
504 case UNGE:
505 return UNLE;
506
507 default:
508 gcc_unreachable ();
509 }
510 }
511
512 /* Given a comparison CODE, return the corresponding unsigned comparison.
513 If CODE is an equality comparison or already an unsigned comparison,
514 CODE is returned. */
515
516 enum rtx_code
517 unsigned_condition (enum rtx_code code)
518 {
519 switch (code)
520 {
521 case EQ:
522 case NE:
523 case GTU:
524 case GEU:
525 case LTU:
526 case LEU:
527 return code;
528
529 case GT:
530 return GTU;
531 case GE:
532 return GEU;
533 case LT:
534 return LTU;
535 case LE:
536 return LEU;
537
538 default:
539 gcc_unreachable ();
540 }
541 }
542
543 /* Similarly, return the signed version of a comparison. */
544
545 enum rtx_code
546 signed_condition (enum rtx_code code)
547 {
548 switch (code)
549 {
550 case EQ:
551 case NE:
552 case GT:
553 case GE:
554 case LT:
555 case LE:
556 return code;
557
558 case GTU:
559 return GT;
560 case GEU:
561 return GE;
562 case LTU:
563 return LT;
564 case LEU:
565 return LE;
566
567 default:
568 gcc_unreachable ();
569 }
570 }
571 \f
572 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
573 truth of CODE1 implies the truth of CODE2. */
574
575 int
576 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
577 {
578 /* UNKNOWN comparison codes can happen as a result of trying to revert
579 comparison codes.
580 They can't match anything, so we have to reject them here. */
581 if (code1 == UNKNOWN || code2 == UNKNOWN)
582 return 0;
583
584 if (code1 == code2)
585 return 1;
586
587 switch (code1)
588 {
589 case UNEQ:
590 if (code2 == UNLE || code2 == UNGE)
591 return 1;
592 break;
593
594 case EQ:
595 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
596 || code2 == ORDERED)
597 return 1;
598 break;
599
600 case UNLT:
601 if (code2 == UNLE || code2 == NE)
602 return 1;
603 break;
604
605 case LT:
606 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
607 return 1;
608 break;
609
610 case UNGT:
611 if (code2 == UNGE || code2 == NE)
612 return 1;
613 break;
614
615 case GT:
616 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
617 return 1;
618 break;
619
620 case GE:
621 case LE:
622 if (code2 == ORDERED)
623 return 1;
624 break;
625
626 case LTGT:
627 if (code2 == NE || code2 == ORDERED)
628 return 1;
629 break;
630
631 case LTU:
632 if (code2 == LEU || code2 == NE)
633 return 1;
634 break;
635
636 case GTU:
637 if (code2 == GEU || code2 == NE)
638 return 1;
639 break;
640
641 case UNORDERED:
642 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
643 || code2 == UNGE || code2 == UNGT)
644 return 1;
645 break;
646
647 default:
648 break;
649 }
650
651 return 0;
652 }
653 \f
654 /* Return 1 if INSN is an unconditional jump and nothing else. */
655
656 int
657 simplejump_p (const_rtx insn)
658 {
659 return (JUMP_P (insn)
660 && GET_CODE (PATTERN (insn)) == SET
661 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
662 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
663 }
664
665 /* Return nonzero if INSN is a (possibly) conditional jump
666 and nothing more.
667
668 Use of this function is deprecated, since we need to support combined
669 branch and compare insns. Use any_condjump_p instead whenever possible. */
670
671 int
672 condjump_p (const_rtx insn)
673 {
674 const_rtx x = PATTERN (insn);
675
676 if (GET_CODE (x) != SET
677 || GET_CODE (SET_DEST (x)) != PC)
678 return 0;
679
680 x = SET_SRC (x);
681 if (GET_CODE (x) == LABEL_REF)
682 return 1;
683 else
684 return (GET_CODE (x) == IF_THEN_ELSE
685 && ((GET_CODE (XEXP (x, 2)) == PC
686 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
687 || GET_CODE (XEXP (x, 1)) == RETURN))
688 || (GET_CODE (XEXP (x, 1)) == PC
689 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
690 || GET_CODE (XEXP (x, 2)) == RETURN))));
691 }
692
693 /* Return nonzero if INSN is a (possibly) conditional jump inside a
694 PARALLEL.
695
696 Use this function is deprecated, since we need to support combined
697 branch and compare insns. Use any_condjump_p instead whenever possible. */
698
699 int
700 condjump_in_parallel_p (const_rtx insn)
701 {
702 const_rtx x = PATTERN (insn);
703
704 if (GET_CODE (x) != PARALLEL)
705 return 0;
706 else
707 x = XVECEXP (x, 0, 0);
708
709 if (GET_CODE (x) != SET)
710 return 0;
711 if (GET_CODE (SET_DEST (x)) != PC)
712 return 0;
713 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
714 return 1;
715 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
716 return 0;
717 if (XEXP (SET_SRC (x), 2) == pc_rtx
718 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
719 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
720 return 1;
721 if (XEXP (SET_SRC (x), 1) == pc_rtx
722 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
723 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
724 return 1;
725 return 0;
726 }
727
728 /* Return set of PC, otherwise NULL. */
729
730 rtx
731 pc_set (const_rtx insn)
732 {
733 rtx pat;
734 if (!JUMP_P (insn))
735 return NULL_RTX;
736 pat = PATTERN (insn);
737
738 /* The set is allowed to appear either as the insn pattern or
739 the first set in a PARALLEL. */
740 if (GET_CODE (pat) == PARALLEL)
741 pat = XVECEXP (pat, 0, 0);
742 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
743 return pat;
744
745 return NULL_RTX;
746 }
747
748 /* Return true when insn is an unconditional direct jump,
749 possibly bundled inside a PARALLEL. */
750
751 int
752 any_uncondjump_p (const_rtx insn)
753 {
754 const_rtx x = pc_set (insn);
755 if (!x)
756 return 0;
757 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
758 return 0;
759 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
760 return 0;
761 return 1;
762 }
763
764 /* Return true when insn is a conditional jump. This function works for
765 instructions containing PC sets in PARALLELs. The instruction may have
766 various other effects so before removing the jump you must verify
767 onlyjump_p.
768
769 Note that unlike condjump_p it returns false for unconditional jumps. */
770
771 int
772 any_condjump_p (const_rtx insn)
773 {
774 const_rtx x = pc_set (insn);
775 enum rtx_code a, b;
776
777 if (!x)
778 return 0;
779 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
780 return 0;
781
782 a = GET_CODE (XEXP (SET_SRC (x), 1));
783 b = GET_CODE (XEXP (SET_SRC (x), 2));
784
785 return ((b == PC && (a == LABEL_REF || a == RETURN))
786 || (a == PC && (b == LABEL_REF || b == RETURN)));
787 }
788
789 /* Return the label of a conditional jump. */
790
791 rtx
792 condjump_label (rtx insn)
793 {
794 rtx x = pc_set (insn);
795
796 if (!x)
797 return NULL_RTX;
798 x = SET_SRC (x);
799 if (GET_CODE (x) == LABEL_REF)
800 return x;
801 if (GET_CODE (x) != IF_THEN_ELSE)
802 return NULL_RTX;
803 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
804 return XEXP (x, 1);
805 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
806 return XEXP (x, 2);
807 return NULL_RTX;
808 }
809
810 /* Return true if INSN is a (possibly conditional) return insn. */
811
812 static int
813 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
814 {
815 rtx x = *loc;
816
817 return x && (GET_CODE (x) == RETURN
818 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
819 }
820
821 int
822 returnjump_p (rtx insn)
823 {
824 if (!JUMP_P (insn))
825 return 0;
826 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
827 }
828
829 /* Return true if INSN is a jump that only transfers control and
830 nothing more. */
831
832 int
833 onlyjump_p (const_rtx insn)
834 {
835 rtx set;
836
837 if (!JUMP_P (insn))
838 return 0;
839
840 set = single_set (insn);
841 if (set == NULL)
842 return 0;
843 if (GET_CODE (SET_DEST (set)) != PC)
844 return 0;
845 if (side_effects_p (SET_SRC (set)))
846 return 0;
847
848 return 1;
849 }
850
851 #ifdef HAVE_cc0
852
853 /* Return nonzero if X is an RTX that only sets the condition codes
854 and has no side effects. */
855
856 int
857 only_sets_cc0_p (const_rtx x)
858 {
859 if (! x)
860 return 0;
861
862 if (INSN_P (x))
863 x = PATTERN (x);
864
865 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
866 }
867
868 /* Return 1 if X is an RTX that does nothing but set the condition codes
869 and CLOBBER or USE registers.
870 Return -1 if X does explicitly set the condition codes,
871 but also does other things. */
872
873 int
874 sets_cc0_p (const_rtx x)
875 {
876 if (! x)
877 return 0;
878
879 if (INSN_P (x))
880 x = PATTERN (x);
881
882 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
883 return 1;
884 if (GET_CODE (x) == PARALLEL)
885 {
886 int i;
887 int sets_cc0 = 0;
888 int other_things = 0;
889 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
890 {
891 if (GET_CODE (XVECEXP (x, 0, i)) == SET
892 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
893 sets_cc0 = 1;
894 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
895 other_things = 1;
896 }
897 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
898 }
899 return 0;
900 }
901 #endif
902 \f
903 /* Find all CODE_LABELs referred to in X, and increment their use counts.
904 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
905 in INSN, then store one of them in JUMP_LABEL (INSN).
906 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
907 referenced in INSN, add a REG_LABEL note containing that label to INSN.
908 Also, when there are consecutive labels, canonicalize on the last of them.
909
910 Note that two labels separated by a loop-beginning note
911 must be kept distinct if we have not yet done loop-optimization,
912 because the gap between them is where loop-optimize
913 will want to move invariant code to. CROSS_JUMP tells us
914 that loop-optimization is done with. */
915
916 void
917 mark_jump_label (rtx x, rtx insn, int in_mem)
918 {
919 RTX_CODE code = GET_CODE (x);
920 int i;
921 const char *fmt;
922
923 switch (code)
924 {
925 case PC:
926 case CC0:
927 case REG:
928 case CONST_INT:
929 case CONST_DOUBLE:
930 case CLOBBER:
931 case CALL:
932 return;
933
934 case MEM:
935 in_mem = 1;
936 break;
937
938 case SEQUENCE:
939 for (i = 0; i < XVECLEN (x, 0); i++)
940 mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
941 XVECEXP (x, 0, i), 0);
942 return;
943
944 case SYMBOL_REF:
945 if (!in_mem)
946 return;
947
948 /* If this is a constant-pool reference, see if it is a label. */
949 if (CONSTANT_POOL_ADDRESS_P (x))
950 mark_jump_label (get_pool_constant (x), insn, in_mem);
951 break;
952
953 case LABEL_REF:
954 {
955 rtx label = XEXP (x, 0);
956
957 /* Ignore remaining references to unreachable labels that
958 have been deleted. */
959 if (NOTE_P (label)
960 && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
961 break;
962
963 gcc_assert (LABEL_P (label));
964
965 /* Ignore references to labels of containing functions. */
966 if (LABEL_REF_NONLOCAL_P (x))
967 break;
968
969 XEXP (x, 0) = label;
970 if (! insn || ! INSN_DELETED_P (insn))
971 ++LABEL_NUSES (label);
972
973 if (insn)
974 {
975 if (JUMP_P (insn))
976 JUMP_LABEL (insn) = label;
977 else
978 {
979 /* Add a REG_LABEL note for LABEL unless there already
980 is one. All uses of a label, except for labels
981 that are the targets of jumps, must have a
982 REG_LABEL note. */
983 if (! find_reg_note (insn, REG_LABEL, label))
984 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
985 REG_NOTES (insn));
986 }
987 }
988 return;
989 }
990
991 /* Do walk the labels in a vector, but not the first operand of an
992 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
993 case ADDR_VEC:
994 case ADDR_DIFF_VEC:
995 if (! INSN_DELETED_P (insn))
996 {
997 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
998
999 for (i = 0; i < XVECLEN (x, eltnum); i++)
1000 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1001 }
1002 return;
1003
1004 default:
1005 break;
1006 }
1007
1008 fmt = GET_RTX_FORMAT (code);
1009 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1010 {
1011 if (fmt[i] == 'e')
1012 mark_jump_label (XEXP (x, i), insn, in_mem);
1013 else if (fmt[i] == 'E')
1014 {
1015 int j;
1016 for (j = 0; j < XVECLEN (x, i); j++)
1017 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1018 }
1019 }
1020 }
1021
1022 \f
1023 /* Delete insn INSN from the chain of insns and update label ref counts
1024 and delete insns now unreachable.
1025
1026 Returns the first insn after INSN that was not deleted.
1027
1028 Usage of this instruction is deprecated. Use delete_insn instead and
1029 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1030
1031 rtx
1032 delete_related_insns (rtx insn)
1033 {
1034 int was_code_label = (LABEL_P (insn));
1035 rtx note;
1036 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1037
1038 while (next && INSN_DELETED_P (next))
1039 next = NEXT_INSN (next);
1040
1041 /* This insn is already deleted => return first following nondeleted. */
1042 if (INSN_DELETED_P (insn))
1043 return next;
1044
1045 delete_insn (insn);
1046
1047 /* If instruction is followed by a barrier,
1048 delete the barrier too. */
1049
1050 if (next != 0 && BARRIER_P (next))
1051 delete_insn (next);
1052
1053 /* If deleting a jump, decrement the count of the label,
1054 and delete the label if it is now unused. */
1055
1056 if (JUMP_P (insn) && JUMP_LABEL (insn))
1057 {
1058 rtx lab = JUMP_LABEL (insn), lab_next;
1059
1060 if (LABEL_NUSES (lab) == 0)
1061 {
1062 /* This can delete NEXT or PREV,
1063 either directly if NEXT is JUMP_LABEL (INSN),
1064 or indirectly through more levels of jumps. */
1065 delete_related_insns (lab);
1066
1067 /* I feel a little doubtful about this loop,
1068 but I see no clean and sure alternative way
1069 to find the first insn after INSN that is not now deleted.
1070 I hope this works. */
1071 while (next && INSN_DELETED_P (next))
1072 next = NEXT_INSN (next);
1073 return next;
1074 }
1075 else if (tablejump_p (insn, NULL, &lab_next))
1076 {
1077 /* If we're deleting the tablejump, delete the dispatch table.
1078 We may not be able to kill the label immediately preceding
1079 just yet, as it might be referenced in code leading up to
1080 the tablejump. */
1081 delete_related_insns (lab_next);
1082 }
1083 }
1084
1085 /* Likewise if we're deleting a dispatch table. */
1086
1087 if (JUMP_P (insn)
1088 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1089 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1090 {
1091 rtx pat = PATTERN (insn);
1092 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1093 int len = XVECLEN (pat, diff_vec_p);
1094
1095 for (i = 0; i < len; i++)
1096 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1097 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1098 while (next && INSN_DELETED_P (next))
1099 next = NEXT_INSN (next);
1100 return next;
1101 }
1102
1103 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1104 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1105 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1106 if (REG_NOTE_KIND (note) == REG_LABEL
1107 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1108 && LABEL_P (XEXP (note, 0)))
1109 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1110 delete_related_insns (XEXP (note, 0));
1111
1112 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1113 prev = PREV_INSN (prev);
1114
1115 /* If INSN was a label and a dispatch table follows it,
1116 delete the dispatch table. The tablejump must have gone already.
1117 It isn't useful to fall through into a table. */
1118
1119 if (was_code_label
1120 && NEXT_INSN (insn) != 0
1121 && JUMP_P (NEXT_INSN (insn))
1122 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1123 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1124 next = delete_related_insns (NEXT_INSN (insn));
1125
1126 /* If INSN was a label, delete insns following it if now unreachable. */
1127
1128 if (was_code_label && prev && BARRIER_P (prev))
1129 {
1130 enum rtx_code code;
1131 while (next)
1132 {
1133 code = GET_CODE (next);
1134 if (code == NOTE)
1135 next = NEXT_INSN (next);
1136 /* Keep going past other deleted labels to delete what follows. */
1137 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1138 next = NEXT_INSN (next);
1139 else if (code == BARRIER || INSN_P (next))
1140 /* Note: if this deletes a jump, it can cause more
1141 deletion of unreachable code, after a different label.
1142 As long as the value from this recursive call is correct,
1143 this invocation functions correctly. */
1144 next = delete_related_insns (next);
1145 else
1146 break;
1147 }
1148 }
1149
1150 return next;
1151 }
1152 \f
1153 /* Delete a range of insns from FROM to TO, inclusive.
1154 This is for the sake of peephole optimization, so assume
1155 that whatever these insns do will still be done by a new
1156 peephole insn that will replace them. */
1157
1158 void
1159 delete_for_peephole (rtx from, rtx to)
1160 {
1161 rtx insn = from;
1162
1163 while (1)
1164 {
1165 rtx next = NEXT_INSN (insn);
1166 rtx prev = PREV_INSN (insn);
1167
1168 if (!NOTE_P (insn))
1169 {
1170 INSN_DELETED_P (insn) = 1;
1171
1172 /* Patch this insn out of the chain. */
1173 /* We don't do this all at once, because we
1174 must preserve all NOTEs. */
1175 if (prev)
1176 NEXT_INSN (prev) = next;
1177
1178 if (next)
1179 PREV_INSN (next) = prev;
1180 }
1181
1182 if (insn == to)
1183 break;
1184 insn = next;
1185 }
1186
1187 /* Note that if TO is an unconditional jump
1188 we *do not* delete the BARRIER that follows,
1189 since the peephole that replaces this sequence
1190 is also an unconditional jump in that case. */
1191 }
1192 \f
1193 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1194 NLABEL as a return. Accrue modifications into the change group. */
1195
1196 static void
1197 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1198 {
1199 rtx x = *loc;
1200 RTX_CODE code = GET_CODE (x);
1201 int i;
1202 const char *fmt;
1203
1204 if (code == LABEL_REF)
1205 {
1206 if (XEXP (x, 0) == olabel)
1207 {
1208 rtx n;
1209 if (nlabel)
1210 n = gen_rtx_LABEL_REF (Pmode, nlabel);
1211 else
1212 n = gen_rtx_RETURN (VOIDmode);
1213
1214 validate_change (insn, loc, n, 1);
1215 return;
1216 }
1217 }
1218 else if (code == RETURN && olabel == 0)
1219 {
1220 if (nlabel)
1221 x = gen_rtx_LABEL_REF (Pmode, nlabel);
1222 else
1223 x = gen_rtx_RETURN (VOIDmode);
1224 if (loc == &PATTERN (insn))
1225 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1226 validate_change (insn, loc, x, 1);
1227 return;
1228 }
1229
1230 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1231 && GET_CODE (SET_SRC (x)) == LABEL_REF
1232 && XEXP (SET_SRC (x), 0) == olabel)
1233 {
1234 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1235 return;
1236 }
1237
1238 fmt = GET_RTX_FORMAT (code);
1239 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1240 {
1241 if (fmt[i] == 'e')
1242 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1243 else if (fmt[i] == 'E')
1244 {
1245 int j;
1246 for (j = 0; j < XVECLEN (x, i); j++)
1247 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1248 }
1249 }
1250 }
1251
1252 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1253 the modifications into the change group. Return false if we did
1254 not see how to do that. */
1255
1256 int
1257 redirect_jump_1 (rtx jump, rtx nlabel)
1258 {
1259 int ochanges = num_validated_changes ();
1260 rtx *loc;
1261
1262 if (GET_CODE (PATTERN (jump)) == PARALLEL)
1263 loc = &XVECEXP (PATTERN (jump), 0, 0);
1264 else
1265 loc = &PATTERN (jump);
1266
1267 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1268 return num_validated_changes () > ochanges;
1269 }
1270
1271 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1272 jump target label is unused as a result, it and the code following
1273 it may be deleted.
1274
1275 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1276 RETURN insn.
1277
1278 The return value will be 1 if the change was made, 0 if it wasn't
1279 (this can only occur for NLABEL == 0). */
1280
1281 int
1282 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1283 {
1284 rtx olabel = JUMP_LABEL (jump);
1285
1286 if (nlabel == olabel)
1287 return 1;
1288
1289 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1290 return 0;
1291
1292 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1293 return 1;
1294 }
1295
1296 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1297 NLABEL in JUMP.
1298 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1299 count has dropped to zero. */
1300 void
1301 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1302 int invert)
1303 {
1304 rtx note;
1305
1306 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1307 moving FUNCTION_END note. Just sanity check that no user still worry
1308 about this. */
1309 gcc_assert (delete_unused >= 0);
1310 JUMP_LABEL (jump) = nlabel;
1311 if (nlabel)
1312 ++LABEL_NUSES (nlabel);
1313
1314 /* Update labels in any REG_EQUAL note. */
1315 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1316 {
1317 if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1318 remove_note (jump, note);
1319 else
1320 {
1321 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1322 confirm_change_group ();
1323 }
1324 }
1325
1326 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1327 /* Undefined labels will remain outside the insn stream. */
1328 && INSN_UID (olabel))
1329 delete_related_insns (olabel);
1330 if (invert)
1331 invert_br_probabilities (jump);
1332 }
1333
1334 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1335 modifications into the change group. Return nonzero for success. */
1336 static int
1337 invert_exp_1 (rtx x, rtx insn)
1338 {
1339 RTX_CODE code = GET_CODE (x);
1340
1341 if (code == IF_THEN_ELSE)
1342 {
1343 rtx comp = XEXP (x, 0);
1344 rtx tem;
1345 enum rtx_code reversed_code;
1346
1347 /* We can do this in two ways: The preferable way, which can only
1348 be done if this is not an integer comparison, is to reverse
1349 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1350 of the IF_THEN_ELSE. If we can't do either, fail. */
1351
1352 reversed_code = reversed_comparison_code (comp, insn);
1353
1354 if (reversed_code != UNKNOWN)
1355 {
1356 validate_change (insn, &XEXP (x, 0),
1357 gen_rtx_fmt_ee (reversed_code,
1358 GET_MODE (comp), XEXP (comp, 0),
1359 XEXP (comp, 1)),
1360 1);
1361 return 1;
1362 }
1363
1364 tem = XEXP (x, 1);
1365 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1366 validate_change (insn, &XEXP (x, 2), tem, 1);
1367 return 1;
1368 }
1369 else
1370 return 0;
1371 }
1372
1373 /* Invert the condition of the jump JUMP, and make it jump to label
1374 NLABEL instead of where it jumps now. Accrue changes into the
1375 change group. Return false if we didn't see how to perform the
1376 inversion and redirection. */
1377
1378 int
1379 invert_jump_1 (rtx jump, rtx nlabel)
1380 {
1381 rtx x = pc_set (jump);
1382 int ochanges;
1383 int ok;
1384
1385 ochanges = num_validated_changes ();
1386 gcc_assert (x);
1387 ok = invert_exp_1 (SET_SRC (x), jump);
1388 gcc_assert (ok);
1389
1390 if (num_validated_changes () == ochanges)
1391 return 0;
1392
1393 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1394 in Pmode, so checking this is not merely an optimization. */
1395 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1396 }
1397
1398 /* Invert the condition of the jump JUMP, and make it jump to label
1399 NLABEL instead of where it jumps now. Return true if successful. */
1400
1401 int
1402 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1403 {
1404 rtx olabel = JUMP_LABEL (jump);
1405
1406 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1407 {
1408 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1409 return 1;
1410 }
1411 cancel_changes (0);
1412 return 0;
1413 }
1414
1415 \f
1416 /* Like rtx_equal_p except that it considers two REGs as equal
1417 if they renumber to the same value and considers two commutative
1418 operations to be the same if the order of the operands has been
1419 reversed. */
1420
1421 int
1422 rtx_renumbered_equal_p (rtx x, rtx y)
1423 {
1424 int i;
1425 const enum rtx_code code = GET_CODE (x);
1426 const char *fmt;
1427
1428 if (x == y)
1429 return 1;
1430
1431 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1432 && (REG_P (y) || (GET_CODE (y) == SUBREG
1433 && REG_P (SUBREG_REG (y)))))
1434 {
1435 int reg_x = -1, reg_y = -1;
1436 int byte_x = 0, byte_y = 0;
1437
1438 if (GET_MODE (x) != GET_MODE (y))
1439 return 0;
1440
1441 /* If we haven't done any renumbering, don't
1442 make any assumptions. */
1443 if (reg_renumber == 0)
1444 return rtx_equal_p (x, y);
1445
1446 if (code == SUBREG)
1447 {
1448 reg_x = REGNO (SUBREG_REG (x));
1449 byte_x = SUBREG_BYTE (x);
1450
1451 if (reg_renumber[reg_x] >= 0)
1452 {
1453 reg_x = subreg_regno_offset (reg_renumber[reg_x],
1454 GET_MODE (SUBREG_REG (x)),
1455 byte_x,
1456 GET_MODE (x));
1457 byte_x = 0;
1458 }
1459 }
1460 else
1461 {
1462 reg_x = REGNO (x);
1463 if (reg_renumber[reg_x] >= 0)
1464 reg_x = reg_renumber[reg_x];
1465 }
1466
1467 if (GET_CODE (y) == SUBREG)
1468 {
1469 reg_y = REGNO (SUBREG_REG (y));
1470 byte_y = SUBREG_BYTE (y);
1471
1472 if (reg_renumber[reg_y] >= 0)
1473 {
1474 reg_y = subreg_regno_offset (reg_renumber[reg_y],
1475 GET_MODE (SUBREG_REG (y)),
1476 byte_y,
1477 GET_MODE (y));
1478 byte_y = 0;
1479 }
1480 }
1481 else
1482 {
1483 reg_y = REGNO (y);
1484 if (reg_renumber[reg_y] >= 0)
1485 reg_y = reg_renumber[reg_y];
1486 }
1487
1488 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1489 }
1490
1491 /* Now we have disposed of all the cases
1492 in which different rtx codes can match. */
1493 if (code != GET_CODE (y))
1494 return 0;
1495
1496 switch (code)
1497 {
1498 case PC:
1499 case CC0:
1500 case ADDR_VEC:
1501 case ADDR_DIFF_VEC:
1502 case CONST_INT:
1503 case CONST_DOUBLE:
1504 return 0;
1505
1506 case LABEL_REF:
1507 /* We can't assume nonlocal labels have their following insns yet. */
1508 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1509 return XEXP (x, 0) == XEXP (y, 0);
1510
1511 /* Two label-refs are equivalent if they point at labels
1512 in the same position in the instruction stream. */
1513 return (next_real_insn (XEXP (x, 0))
1514 == next_real_insn (XEXP (y, 0)));
1515
1516 case SYMBOL_REF:
1517 return XSTR (x, 0) == XSTR (y, 0);
1518
1519 case CODE_LABEL:
1520 /* If we didn't match EQ equality above, they aren't the same. */
1521 return 0;
1522
1523 default:
1524 break;
1525 }
1526
1527 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1528
1529 if (GET_MODE (x) != GET_MODE (y))
1530 return 0;
1531
1532 /* For commutative operations, the RTX match if the operand match in any
1533 order. Also handle the simple binary and unary cases without a loop. */
1534 if (targetm.commutative_p (x, UNKNOWN))
1535 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1536 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1537 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1538 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1539 else if (NON_COMMUTATIVE_P (x))
1540 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1541 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1542 else if (UNARY_P (x))
1543 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1544
1545 /* Compare the elements. If any pair of corresponding elements
1546 fail to match, return 0 for the whole things. */
1547
1548 fmt = GET_RTX_FORMAT (code);
1549 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1550 {
1551 int j;
1552 switch (fmt[i])
1553 {
1554 case 'w':
1555 if (XWINT (x, i) != XWINT (y, i))
1556 return 0;
1557 break;
1558
1559 case 'i':
1560 if (XINT (x, i) != XINT (y, i))
1561 return 0;
1562 break;
1563
1564 case 't':
1565 if (XTREE (x, i) != XTREE (y, i))
1566 return 0;
1567 break;
1568
1569 case 's':
1570 if (strcmp (XSTR (x, i), XSTR (y, i)))
1571 return 0;
1572 break;
1573
1574 case 'e':
1575 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1576 return 0;
1577 break;
1578
1579 case 'u':
1580 if (XEXP (x, i) != XEXP (y, i))
1581 return 0;
1582 /* Fall through. */
1583 case '0':
1584 break;
1585
1586 case 'E':
1587 if (XVECLEN (x, i) != XVECLEN (y, i))
1588 return 0;
1589 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1590 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1591 return 0;
1592 break;
1593
1594 default:
1595 gcc_unreachable ();
1596 }
1597 }
1598 return 1;
1599 }
1600 \f
1601 /* If X is a hard register or equivalent to one or a subregister of one,
1602 return the hard register number. If X is a pseudo register that was not
1603 assigned a hard register, return the pseudo register number. Otherwise,
1604 return -1. Any rtx is valid for X. */
1605
1606 int
1607 true_regnum (const_rtx x)
1608 {
1609 if (REG_P (x))
1610 {
1611 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1612 return reg_renumber[REGNO (x)];
1613 return REGNO (x);
1614 }
1615 if (GET_CODE (x) == SUBREG)
1616 {
1617 int base = true_regnum (SUBREG_REG (x));
1618 if (base >= 0
1619 && base < FIRST_PSEUDO_REGISTER
1620 && subreg_offset_representable_p (REGNO (SUBREG_REG (x)),
1621 GET_MODE (SUBREG_REG (x)),
1622 SUBREG_BYTE (x), GET_MODE (x)))
1623 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1624 GET_MODE (SUBREG_REG (x)),
1625 SUBREG_BYTE (x), GET_MODE (x));
1626 }
1627 return -1;
1628 }
1629
1630 /* Return regno of the register REG and handle subregs too. */
1631 unsigned int
1632 reg_or_subregno (const_rtx reg)
1633 {
1634 if (GET_CODE (reg) == SUBREG)
1635 reg = SUBREG_REG (reg);
1636 gcc_assert (REG_P (reg));
1637 return REGNO (reg);
1638 }