* config/alpha/x-vms (version): Change "." to "_".
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
25
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
33
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
36
37 #include "config.h"
38 #include "system.h"
39 #include "rtl.h"
40 #include "tm_p.h"
41 #include "flags.h"
42 #include "hard-reg-set.h"
43 #include "regs.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
46 #include "recog.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "real.h"
50 #include "except.h"
51 #include "toplev.h"
52 #include "reload.h"
53 #include "predict.h"
54
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
61
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
72 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
73 \f
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
76 instructions. */
77 void
78 rebuild_jump_labels (f)
79 rtx f;
80 {
81 rtx insn;
82 int max_uid = 0;
83
84 max_uid = init_label_info (f) + 1;
85
86 mark_all_labels (f);
87
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
91
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
94 LABEL_NUSES (XEXP (insn, 0))++;
95
96 /* Keep track of labels used for marking handlers for exception
97 regions; they cannot usually be deleted. */
98
99 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
100 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
101 LABEL_NUSES (XEXP (insn, 0))++;
102 }
103 \f
104 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
105 non-fallthru insn. This is not generally true, as multiple barriers
106 may have crept in, or the BARRIER may be separated from the last
107 real insn by one or more NOTEs.
108
109 This simple pass moves barriers and removes duplicates so that the
110 old code is happy.
111 */
112 void
113 cleanup_barriers ()
114 {
115 rtx insn, next, prev;
116 for (insn = get_insns (); insn; insn = next)
117 {
118 next = NEXT_INSN (insn);
119 if (GET_CODE (insn) == BARRIER)
120 {
121 prev = prev_nonnote_insn (insn);
122 if (GET_CODE (prev) == BARRIER)
123 delete_barrier (insn);
124 else if (prev != PREV_INSN (insn))
125 reorder_insns (insn, insn, prev);
126 }
127 }
128 }
129 \f
130 void
131 copy_loop_headers (f)
132 rtx f;
133 {
134 rtx insn, next;
135 /* Now iterate optimizing jumps until nothing changes over one pass. */
136 for (insn = f; insn; insn = next)
137 {
138 rtx temp, temp1;
139
140 next = NEXT_INSN (insn);
141
142 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
143 jump. Try to optimize by duplicating the loop exit test if so.
144 This is only safe immediately after regscan, because it uses
145 the values of regno_first_uid and regno_last_uid. */
146 if (GET_CODE (insn) == NOTE
147 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
148 && (temp1 = next_nonnote_insn (insn)) != 0
149 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
150 {
151 temp = PREV_INSN (insn);
152 if (duplicate_loop_exit_test (insn))
153 {
154 next = NEXT_INSN (temp);
155 }
156 }
157 }
158 }
159
160 void
161 purge_line_number_notes (f)
162 rtx f;
163 {
164 rtx last_note = 0;
165 rtx insn;
166 /* Delete extraneous line number notes.
167 Note that two consecutive notes for different lines are not really
168 extraneous. There should be some indication where that line belonged,
169 even if it became empty. */
170
171 for (insn = f; insn; insn = NEXT_INSN (insn))
172 if (GET_CODE (insn) == NOTE)
173 {
174 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
175 /* Any previous line note was for the prologue; gdb wants a new
176 note after the prologue even if it is for the same line. */
177 last_note = NULL_RTX;
178 else if (NOTE_LINE_NUMBER (insn) >= 0)
179 {
180 /* Delete this note if it is identical to previous note. */
181 if (last_note
182 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
183 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
184 {
185 delete_related_insns (insn);
186 continue;
187 }
188
189 last_note = insn;
190 }
191 }
192 }
193 \f
194 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
195 notes whose labels don't occur in the insn any more. Returns the
196 largest INSN_UID found. */
197 static int
198 init_label_info (f)
199 rtx f;
200 {
201 int largest_uid = 0;
202 rtx insn;
203
204 for (insn = f; insn; insn = NEXT_INSN (insn))
205 {
206 if (GET_CODE (insn) == CODE_LABEL)
207 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
208 else if (GET_CODE (insn) == JUMP_INSN)
209 JUMP_LABEL (insn) = 0;
210 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
211 {
212 rtx note, next;
213
214 for (note = REG_NOTES (insn); note; note = next)
215 {
216 next = XEXP (note, 1);
217 if (REG_NOTE_KIND (note) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
219 remove_note (insn, note);
220 }
221 }
222 if (INSN_UID (insn) > largest_uid)
223 largest_uid = INSN_UID (insn);
224 }
225
226 return largest_uid;
227 }
228
229 /* Mark the label each jump jumps to.
230 Combine consecutive labels, and count uses of labels. */
231
232 static void
233 mark_all_labels (f)
234 rtx f;
235 {
236 rtx insn;
237
238 for (insn = f; insn; insn = NEXT_INSN (insn))
239 if (INSN_P (insn))
240 {
241 if (GET_CODE (insn) == CALL_INSN
242 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
243 {
244 mark_all_labels (XEXP (PATTERN (insn), 0));
245 mark_all_labels (XEXP (PATTERN (insn), 1));
246 mark_all_labels (XEXP (PATTERN (insn), 2));
247
248 /* Canonicalize the tail recursion label attached to the
249 CALL_PLACEHOLDER insn. */
250 if (XEXP (PATTERN (insn), 3))
251 {
252 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
253 XEXP (PATTERN (insn), 3));
254 mark_jump_label (label_ref, insn, 0);
255 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
256 }
257
258 continue;
259 }
260
261 mark_jump_label (PATTERN (insn), insn, 0);
262 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
263 {
264 /* When we know the LABEL_REF contained in a REG used in
265 an indirect jump, we'll have a REG_LABEL note so that
266 flow can tell where it's going. */
267 if (JUMP_LABEL (insn) == 0)
268 {
269 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
270 if (label_note)
271 {
272 /* But a LABEL_REF around the REG_LABEL note, so
273 that we can canonicalize it. */
274 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
275 XEXP (label_note, 0));
276
277 mark_jump_label (label_ref, insn, 0);
278 XEXP (label_note, 0) = XEXP (label_ref, 0);
279 JUMP_LABEL (insn) = XEXP (label_note, 0);
280 }
281 }
282 }
283 }
284 }
285
286 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
287 jump. Assume that this unconditional jump is to the exit test code. If
288 the code is sufficiently simple, make a copy of it before INSN,
289 followed by a jump to the exit of the loop. Then delete the unconditional
290 jump after INSN.
291
292 Return 1 if we made the change, else 0.
293
294 This is only safe immediately after a regscan pass because it uses the
295 values of regno_first_uid and regno_last_uid. */
296
297 static int
298 duplicate_loop_exit_test (loop_start)
299 rtx loop_start;
300 {
301 rtx insn, set, reg, p, link;
302 rtx copy = 0, first_copy = 0;
303 int num_insns = 0;
304 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
305 rtx lastexit;
306 int max_reg = max_reg_num ();
307 rtx *reg_map = 0;
308 rtx loop_pre_header_label;
309
310 /* Scan the exit code. We do not perform this optimization if any insn:
311
312 is a CALL_INSN
313 is a CODE_LABEL
314 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
315 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
316 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
317 is not valid.
318
319 We also do not do this if we find an insn with ASM_OPERANDS. While
320 this restriction should not be necessary, copying an insn with
321 ASM_OPERANDS can confuse asm_noperands in some cases.
322
323 Also, don't do this if the exit code is more than 20 insns. */
324
325 for (insn = exitcode;
326 insn
327 && ! (GET_CODE (insn) == NOTE
328 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
329 insn = NEXT_INSN (insn))
330 {
331 switch (GET_CODE (insn))
332 {
333 case CODE_LABEL:
334 case CALL_INSN:
335 return 0;
336 case NOTE:
337 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
338 a jump immediately after the loop start that branches outside
339 the loop but within an outer loop, near the exit test.
340 If we copied this exit test and created a phony
341 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
342 before the exit test look like these could be safely moved
343 out of the loop even if they actually may be never executed.
344 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
345
346 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
347 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
348 return 0;
349
350 if (optimize < 2
351 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
352 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
353 /* If we were to duplicate this code, we would not move
354 the BLOCK notes, and so debugging the moved code would
355 be difficult. Thus, we only move the code with -O2 or
356 higher. */
357 return 0;
358
359 break;
360 case JUMP_INSN:
361 case INSN:
362 /* The code below would grossly mishandle REG_WAS_0 notes,
363 so get rid of them here. */
364 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
365 remove_note (insn, p);
366 if (++num_insns > 20
367 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
368 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
369 return 0;
370 break;
371 default:
372 break;
373 }
374 }
375
376 /* Unless INSN is zero, we can do the optimization. */
377 if (insn == 0)
378 return 0;
379
380 lastexit = insn;
381
382 /* See if any insn sets a register only used in the loop exit code and
383 not a user variable. If so, replace it with a new register. */
384 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
385 if (GET_CODE (insn) == INSN
386 && (set = single_set (insn)) != 0
387 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
388 || (GET_CODE (reg) == SUBREG
389 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
390 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
391 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
392 {
393 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
394 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
395 break;
396
397 if (p != lastexit)
398 {
399 /* We can do the replacement. Allocate reg_map if this is the
400 first replacement we found. */
401 if (reg_map == 0)
402 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
403
404 REG_LOOP_TEST_P (reg) = 1;
405
406 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
407 }
408 }
409 loop_pre_header_label = gen_label_rtx ();
410
411 /* Now copy each insn. */
412 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
413 {
414 switch (GET_CODE (insn))
415 {
416 case BARRIER:
417 copy = emit_barrier_before (loop_start);
418 break;
419 case NOTE:
420 /* Only copy line-number notes. */
421 if (NOTE_LINE_NUMBER (insn) >= 0)
422 {
423 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
424 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
425 }
426 break;
427
428 case INSN:
429 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
430 if (reg_map)
431 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
432
433 mark_jump_label (PATTERN (copy), copy, 0);
434
435 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
436 make them. */
437 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
438 if (REG_NOTE_KIND (link) != REG_LABEL)
439 {
440 if (GET_CODE (link) == EXPR_LIST)
441 REG_NOTES (copy)
442 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
443 XEXP (link, 0),
444 REG_NOTES (copy)));
445 else
446 REG_NOTES (copy)
447 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
448 XEXP (link, 0),
449 REG_NOTES (copy)));
450 }
451
452 if (reg_map && REG_NOTES (copy))
453 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
454 break;
455
456 case JUMP_INSN:
457 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
458 loop_start);
459 if (reg_map)
460 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
461 mark_jump_label (PATTERN (copy), copy, 0);
462 if (REG_NOTES (insn))
463 {
464 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
465 if (reg_map)
466 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
467 }
468
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
471 them as untaken. */
472 if (any_condjump_p (copy))
473 {
474 rtx label = JUMP_LABEL (copy);
475 if (label)
476 {
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label)
480 && (prev_nonnote_insn (prev_nonnote_insn (label))
481 == next_nonnote_insn (loop_start)))
482 {
483 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy, loop_pre_header_label, 0);
487 }
488 else
489 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
490 }
491 }
492 break;
493
494 default:
495 abort ();
496 }
497
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
500 if (! first_copy)
501 first_copy = copy;
502 }
503
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy || GET_CODE (copy) != BARRIER)
507 {
508 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
509 loop_start);
510
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
515 if (! first_copy)
516 first_copy = copy;
517
518 mark_jump_label (PATTERN (copy), copy, 0);
519 emit_barrier_before (loop_start);
520 }
521
522 emit_label_before (loop_pre_header_label, loop_start);
523
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy, copy, max_reg);
528
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
531
532 delete_related_insns (next_nonnote_insn (loop_start));
533
534 /* Clean up. */
535 if (reg_map)
536 free (reg_map);
537
538 return 1;
539 }
540 \f
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. START and END may be such
543 notes. Returns the values of the new starting and ending insns, which
544 may be different if the original ones were such notes.
545 Return true if there were only such notes and no real instructions. */
546
547 bool
548 squeeze_notes (startp, endp)
549 rtx* startp;
550 rtx* endp;
551 {
552 rtx start = *startp;
553 rtx end = *endp;
554
555 rtx insn;
556 rtx next;
557 rtx last = NULL;
558 rtx past_end = NEXT_INSN (end);
559
560 for (insn = start; insn != past_end; insn = next)
561 {
562 next = NEXT_INSN (insn);
563 if (GET_CODE (insn) == NOTE
564 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
565 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
566 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
567 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
568 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
569 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
570 {
571 if (insn == start)
572 start = next;
573 else
574 {
575 rtx prev = PREV_INSN (insn);
576 PREV_INSN (insn) = PREV_INSN (start);
577 NEXT_INSN (insn) = start;
578 NEXT_INSN (PREV_INSN (insn)) = insn;
579 PREV_INSN (NEXT_INSN (insn)) = insn;
580 NEXT_INSN (prev) = next;
581 PREV_INSN (next) = prev;
582 }
583 }
584 else
585 last = insn;
586 }
587
588 /* There were no real instructions. */
589 if (start == past_end)
590 return true;
591
592 end = last;
593
594 *startp = start;
595 *endp = end;
596 return false;
597 }
598 \f
599 /* Return the label before INSN, or put a new label there. */
600
601 rtx
602 get_label_before (insn)
603 rtx insn;
604 {
605 rtx label;
606
607 /* Find an existing label at this point
608 or make a new one if there is none. */
609 label = prev_nonnote_insn (insn);
610
611 if (label == 0 || GET_CODE (label) != CODE_LABEL)
612 {
613 rtx prev = PREV_INSN (insn);
614
615 label = gen_label_rtx ();
616 emit_label_after (label, prev);
617 LABEL_NUSES (label) = 0;
618 }
619 return label;
620 }
621
622 /* Return the label after INSN, or put a new label there. */
623
624 rtx
625 get_label_after (insn)
626 rtx insn;
627 {
628 rtx label;
629
630 /* Find an existing label at this point
631 or make a new one if there is none. */
632 label = next_nonnote_insn (insn);
633
634 if (label == 0 || GET_CODE (label) != CODE_LABEL)
635 {
636 label = gen_label_rtx ();
637 emit_label_after (label, insn);
638 LABEL_NUSES (label) = 0;
639 }
640 return label;
641 }
642 \f
643 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
644 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
645 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
646 know whether it's source is floating point or integer comparison. Machine
647 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
648 to help this function avoid overhead in these cases. */
649 enum rtx_code
650 reversed_comparison_code_parts (code, arg0, arg1, insn)
651 rtx insn, arg0, arg1;
652 enum rtx_code code;
653 {
654 enum machine_mode mode;
655
656 /* If this is not actually a comparison, we can't reverse it. */
657 if (GET_RTX_CLASS (code) != '<')
658 return UNKNOWN;
659
660 mode = GET_MODE (arg0);
661 if (mode == VOIDmode)
662 mode = GET_MODE (arg1);
663
664 /* First see if machine description supply us way to reverse the comparison.
665 Give it priority over everything else to allow machine description to do
666 tricks. */
667 #ifdef REVERSIBLE_CC_MODE
668 if (GET_MODE_CLASS (mode) == MODE_CC
669 && REVERSIBLE_CC_MODE (mode))
670 {
671 #ifdef REVERSE_CONDITION
672 return REVERSE_CONDITION (code, mode);
673 #endif
674 return reverse_condition (code);
675 }
676 #endif
677
678 /* Try a few special cases based on the comparison code. */
679 switch (code)
680 {
681 case GEU:
682 case GTU:
683 case LEU:
684 case LTU:
685 case NE:
686 case EQ:
687 /* It is always safe to reverse EQ and NE, even for the floating
688 point. Similary the unsigned comparisons are never used for
689 floating point so we can reverse them in the default way. */
690 return reverse_condition (code);
691 case ORDERED:
692 case UNORDERED:
693 case LTGT:
694 case UNEQ:
695 /* In case we already see unordered comparison, we can be sure to
696 be dealing with floating point so we don't need any more tests. */
697 return reverse_condition_maybe_unordered (code);
698 case UNLT:
699 case UNLE:
700 case UNGT:
701 case UNGE:
702 /* We don't have safe way to reverse these yet. */
703 return UNKNOWN;
704 default:
705 break;
706 }
707
708 /* In case we give up IEEE compatibility, all comparisons are reversible. */
709 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
710 || flag_unsafe_math_optimizations)
711 return reverse_condition (code);
712
713 if (GET_MODE_CLASS (mode) == MODE_CC
714 #ifdef HAVE_cc0
715 || arg0 == cc0_rtx
716 #endif
717 )
718 {
719 rtx prev;
720 /* Try to search for the comparison to determine the real mode.
721 This code is expensive, but with sane machine description it
722 will be never used, since REVERSIBLE_CC_MODE will return true
723 in all cases. */
724 if (! insn)
725 return UNKNOWN;
726
727 for (prev = prev_nonnote_insn (insn);
728 prev != 0 && GET_CODE (prev) != CODE_LABEL;
729 prev = prev_nonnote_insn (prev))
730 {
731 rtx set = set_of (arg0, prev);
732 if (set && GET_CODE (set) == SET
733 && rtx_equal_p (SET_DEST (set), arg0))
734 {
735 rtx src = SET_SRC (set);
736
737 if (GET_CODE (src) == COMPARE)
738 {
739 rtx comparison = src;
740 arg0 = XEXP (src, 0);
741 mode = GET_MODE (arg0);
742 if (mode == VOIDmode)
743 mode = GET_MODE (XEXP (comparison, 1));
744 break;
745 }
746 /* We can get past reg-reg moves. This may be useful for model
747 of i387 comparisons that first move flag registers around. */
748 if (REG_P (src))
749 {
750 arg0 = src;
751 continue;
752 }
753 }
754 /* If register is clobbered in some ununderstandable way,
755 give up. */
756 if (set)
757 return UNKNOWN;
758 }
759 }
760
761 /* An integer condition. */
762 if (GET_CODE (arg0) == CONST_INT
763 || (GET_MODE (arg0) != VOIDmode
764 && GET_MODE_CLASS (mode) != MODE_CC
765 && ! FLOAT_MODE_P (mode)))
766 return reverse_condition (code);
767
768 return UNKNOWN;
769 }
770
771 /* An wrapper around the previous function to take COMPARISON as rtx
772 expression. This simplifies many callers. */
773 enum rtx_code
774 reversed_comparison_code (comparison, insn)
775 rtx comparison, insn;
776 {
777 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
778 return UNKNOWN;
779 return reversed_comparison_code_parts (GET_CODE (comparison),
780 XEXP (comparison, 0),
781 XEXP (comparison, 1), insn);
782 }
783 \f
784 /* Given an rtx-code for a comparison, return the code for the negated
785 comparison. If no such code exists, return UNKNOWN.
786
787 WATCH OUT! reverse_condition is not safe to use on a jump that might
788 be acting on the results of an IEEE floating point comparison, because
789 of the special treatment of non-signaling nans in comparisons.
790 Use reversed_comparison_code instead. */
791
792 enum rtx_code
793 reverse_condition (code)
794 enum rtx_code code;
795 {
796 switch (code)
797 {
798 case EQ:
799 return NE;
800 case NE:
801 return EQ;
802 case GT:
803 return LE;
804 case GE:
805 return LT;
806 case LT:
807 return GE;
808 case LE:
809 return GT;
810 case GTU:
811 return LEU;
812 case GEU:
813 return LTU;
814 case LTU:
815 return GEU;
816 case LEU:
817 return GTU;
818 case UNORDERED:
819 return ORDERED;
820 case ORDERED:
821 return UNORDERED;
822
823 case UNLT:
824 case UNLE:
825 case UNGT:
826 case UNGE:
827 case UNEQ:
828 case LTGT:
829 return UNKNOWN;
830
831 default:
832 abort ();
833 }
834 }
835
836 /* Similar, but we're allowed to generate unordered comparisons, which
837 makes it safe for IEEE floating-point. Of course, we have to recognize
838 that the target will support them too... */
839
840 enum rtx_code
841 reverse_condition_maybe_unordered (code)
842 enum rtx_code code;
843 {
844 /* Non-IEEE formats don't have unordered conditions. */
845 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
846 return reverse_condition (code);
847
848 switch (code)
849 {
850 case EQ:
851 return NE;
852 case NE:
853 return EQ;
854 case GT:
855 return UNLE;
856 case GE:
857 return UNLT;
858 case LT:
859 return UNGE;
860 case LE:
861 return UNGT;
862 case LTGT:
863 return UNEQ;
864 case UNORDERED:
865 return ORDERED;
866 case ORDERED:
867 return UNORDERED;
868 case UNLT:
869 return GE;
870 case UNLE:
871 return GT;
872 case UNGT:
873 return LE;
874 case UNGE:
875 return LT;
876 case UNEQ:
877 return LTGT;
878
879 default:
880 abort ();
881 }
882 }
883
884 /* Similar, but return the code when two operands of a comparison are swapped.
885 This IS safe for IEEE floating-point. */
886
887 enum rtx_code
888 swap_condition (code)
889 enum rtx_code code;
890 {
891 switch (code)
892 {
893 case EQ:
894 case NE:
895 case UNORDERED:
896 case ORDERED:
897 case UNEQ:
898 case LTGT:
899 return code;
900
901 case GT:
902 return LT;
903 case GE:
904 return LE;
905 case LT:
906 return GT;
907 case LE:
908 return GE;
909 case GTU:
910 return LTU;
911 case GEU:
912 return LEU;
913 case LTU:
914 return GTU;
915 case LEU:
916 return GEU;
917 case UNLT:
918 return UNGT;
919 case UNLE:
920 return UNGE;
921 case UNGT:
922 return UNLT;
923 case UNGE:
924 return UNLE;
925
926 default:
927 abort ();
928 }
929 }
930
931 /* Given a comparison CODE, return the corresponding unsigned comparison.
932 If CODE is an equality comparison or already an unsigned comparison,
933 CODE is returned. */
934
935 enum rtx_code
936 unsigned_condition (code)
937 enum rtx_code code;
938 {
939 switch (code)
940 {
941 case EQ:
942 case NE:
943 case GTU:
944 case GEU:
945 case LTU:
946 case LEU:
947 return code;
948
949 case GT:
950 return GTU;
951 case GE:
952 return GEU;
953 case LT:
954 return LTU;
955 case LE:
956 return LEU;
957
958 default:
959 abort ();
960 }
961 }
962
963 /* Similarly, return the signed version of a comparison. */
964
965 enum rtx_code
966 signed_condition (code)
967 enum rtx_code code;
968 {
969 switch (code)
970 {
971 case EQ:
972 case NE:
973 case GT:
974 case GE:
975 case LT:
976 case LE:
977 return code;
978
979 case GTU:
980 return GT;
981 case GEU:
982 return GE;
983 case LTU:
984 return LT;
985 case LEU:
986 return LE;
987
988 default:
989 abort ();
990 }
991 }
992 \f
993 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
994 truth of CODE1 implies the truth of CODE2. */
995
996 int
997 comparison_dominates_p (code1, code2)
998 enum rtx_code code1, code2;
999 {
1000 /* UNKNOWN comparison codes can happen as a result of trying to revert
1001 comparison codes.
1002 They can't match anything, so we have to reject them here. */
1003 if (code1 == UNKNOWN || code2 == UNKNOWN)
1004 return 0;
1005
1006 if (code1 == code2)
1007 return 1;
1008
1009 switch (code1)
1010 {
1011 case UNEQ:
1012 if (code2 == UNLE || code2 == UNGE)
1013 return 1;
1014 break;
1015
1016 case EQ:
1017 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1018 || code2 == ORDERED)
1019 return 1;
1020 break;
1021
1022 case UNLT:
1023 if (code2 == UNLE || code2 == NE)
1024 return 1;
1025 break;
1026
1027 case LT:
1028 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1029 return 1;
1030 break;
1031
1032 case UNGT:
1033 if (code2 == UNGE || code2 == NE)
1034 return 1;
1035 break;
1036
1037 case GT:
1038 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1039 return 1;
1040 break;
1041
1042 case GE:
1043 case LE:
1044 if (code2 == ORDERED)
1045 return 1;
1046 break;
1047
1048 case LTGT:
1049 if (code2 == NE || code2 == ORDERED)
1050 return 1;
1051 break;
1052
1053 case LTU:
1054 if (code2 == LEU || code2 == NE)
1055 return 1;
1056 break;
1057
1058 case GTU:
1059 if (code2 == GEU || code2 == NE)
1060 return 1;
1061 break;
1062
1063 case UNORDERED:
1064 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1065 || code2 == UNGE || code2 == UNGT)
1066 return 1;
1067 break;
1068
1069 default:
1070 break;
1071 }
1072
1073 return 0;
1074 }
1075 \f
1076 /* Return 1 if INSN is an unconditional jump and nothing else. */
1077
1078 int
1079 simplejump_p (insn)
1080 rtx insn;
1081 {
1082 return (GET_CODE (insn) == JUMP_INSN
1083 && GET_CODE (PATTERN (insn)) == SET
1084 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1085 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1086 }
1087
1088 /* Return nonzero if INSN is a (possibly) conditional jump
1089 and nothing more.
1090
1091 Use this function is deprecated, since we need to support combined
1092 branch and compare insns. Use any_condjump_p instead whenever possible. */
1093
1094 int
1095 condjump_p (insn)
1096 rtx insn;
1097 {
1098 rtx x = PATTERN (insn);
1099
1100 if (GET_CODE (x) != SET
1101 || GET_CODE (SET_DEST (x)) != PC)
1102 return 0;
1103
1104 x = SET_SRC (x);
1105 if (GET_CODE (x) == LABEL_REF)
1106 return 1;
1107 else
1108 return (GET_CODE (x) == IF_THEN_ELSE
1109 && ((GET_CODE (XEXP (x, 2)) == PC
1110 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1111 || GET_CODE (XEXP (x, 1)) == RETURN))
1112 || (GET_CODE (XEXP (x, 1)) == PC
1113 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1114 || GET_CODE (XEXP (x, 2)) == RETURN))));
1115
1116 return 0;
1117 }
1118
1119 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1120 PARALLEL.
1121
1122 Use this function is deprecated, since we need to support combined
1123 branch and compare insns. Use any_condjump_p instead whenever possible. */
1124
1125 int
1126 condjump_in_parallel_p (insn)
1127 rtx insn;
1128 {
1129 rtx x = PATTERN (insn);
1130
1131 if (GET_CODE (x) != PARALLEL)
1132 return 0;
1133 else
1134 x = XVECEXP (x, 0, 0);
1135
1136 if (GET_CODE (x) != SET)
1137 return 0;
1138 if (GET_CODE (SET_DEST (x)) != PC)
1139 return 0;
1140 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1141 return 1;
1142 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1143 return 0;
1144 if (XEXP (SET_SRC (x), 2) == pc_rtx
1145 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1146 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1147 return 1;
1148 if (XEXP (SET_SRC (x), 1) == pc_rtx
1149 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1150 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1151 return 1;
1152 return 0;
1153 }
1154
1155 /* Return set of PC, otherwise NULL. */
1156
1157 rtx
1158 pc_set (insn)
1159 rtx insn;
1160 {
1161 rtx pat;
1162 if (GET_CODE (insn) != JUMP_INSN)
1163 return NULL_RTX;
1164 pat = PATTERN (insn);
1165
1166 /* The set is allowed to appear either as the insn pattern or
1167 the first set in a PARALLEL. */
1168 if (GET_CODE (pat) == PARALLEL)
1169 pat = XVECEXP (pat, 0, 0);
1170 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1171 return pat;
1172
1173 return NULL_RTX;
1174 }
1175
1176 /* Return true when insn is an unconditional direct jump,
1177 possibly bundled inside a PARALLEL. */
1178
1179 int
1180 any_uncondjump_p (insn)
1181 rtx insn;
1182 {
1183 rtx x = pc_set (insn);
1184 if (!x)
1185 return 0;
1186 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1187 return 0;
1188 return 1;
1189 }
1190
1191 /* Return true when insn is a conditional jump. This function works for
1192 instructions containing PC sets in PARALLELs. The instruction may have
1193 various other effects so before removing the jump you must verify
1194 onlyjump_p.
1195
1196 Note that unlike condjump_p it returns false for unconditional jumps. */
1197
1198 int
1199 any_condjump_p (insn)
1200 rtx insn;
1201 {
1202 rtx x = pc_set (insn);
1203 enum rtx_code a, b;
1204
1205 if (!x)
1206 return 0;
1207 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1208 return 0;
1209
1210 a = GET_CODE (XEXP (SET_SRC (x), 1));
1211 b = GET_CODE (XEXP (SET_SRC (x), 2));
1212
1213 return ((b == PC && (a == LABEL_REF || a == RETURN))
1214 || (a == PC && (b == LABEL_REF || b == RETURN)));
1215 }
1216
1217 /* Return the label of a conditional jump. */
1218
1219 rtx
1220 condjump_label (insn)
1221 rtx insn;
1222 {
1223 rtx x = pc_set (insn);
1224
1225 if (!x)
1226 return NULL_RTX;
1227 x = SET_SRC (x);
1228 if (GET_CODE (x) == LABEL_REF)
1229 return x;
1230 if (GET_CODE (x) != IF_THEN_ELSE)
1231 return NULL_RTX;
1232 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1233 return XEXP (x, 1);
1234 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1235 return XEXP (x, 2);
1236 return NULL_RTX;
1237 }
1238
1239 /* Return true if INSN is a (possibly conditional) return insn. */
1240
1241 static int
1242 returnjump_p_1 (loc, data)
1243 rtx *loc;
1244 void *data ATTRIBUTE_UNUSED;
1245 {
1246 rtx x = *loc;
1247
1248 return x && (GET_CODE (x) == RETURN
1249 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1250 }
1251
1252 int
1253 returnjump_p (insn)
1254 rtx insn;
1255 {
1256 if (GET_CODE (insn) != JUMP_INSN)
1257 return 0;
1258 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1259 }
1260
1261 /* Return true if INSN is a jump that only transfers control and
1262 nothing more. */
1263
1264 int
1265 onlyjump_p (insn)
1266 rtx insn;
1267 {
1268 rtx set;
1269
1270 if (GET_CODE (insn) != JUMP_INSN)
1271 return 0;
1272
1273 set = single_set (insn);
1274 if (set == NULL)
1275 return 0;
1276 if (GET_CODE (SET_DEST (set)) != PC)
1277 return 0;
1278 if (side_effects_p (SET_SRC (set)))
1279 return 0;
1280
1281 return 1;
1282 }
1283
1284 #ifdef HAVE_cc0
1285
1286 /* Return non-zero if X is an RTX that only sets the condition codes
1287 and has no side effects. */
1288
1289 int
1290 only_sets_cc0_p (x)
1291 rtx x;
1292 {
1293
1294 if (! x)
1295 return 0;
1296
1297 if (INSN_P (x))
1298 x = PATTERN (x);
1299
1300 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1301 }
1302
1303 /* Return 1 if X is an RTX that does nothing but set the condition codes
1304 and CLOBBER or USE registers.
1305 Return -1 if X does explicitly set the condition codes,
1306 but also does other things. */
1307
1308 int
1309 sets_cc0_p (x)
1310 rtx x;
1311 {
1312
1313 if (! x)
1314 return 0;
1315
1316 if (INSN_P (x))
1317 x = PATTERN (x);
1318
1319 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1320 return 1;
1321 if (GET_CODE (x) == PARALLEL)
1322 {
1323 int i;
1324 int sets_cc0 = 0;
1325 int other_things = 0;
1326 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1327 {
1328 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1329 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1330 sets_cc0 = 1;
1331 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1332 other_things = 1;
1333 }
1334 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1335 }
1336 return 0;
1337 }
1338 #endif
1339 \f
1340 /* Follow any unconditional jump at LABEL;
1341 return the ultimate label reached by any such chain of jumps.
1342 If LABEL is not followed by a jump, return LABEL.
1343 If the chain loops or we can't find end, return LABEL,
1344 since that tells caller to avoid changing the insn.
1345
1346 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1347 a USE or CLOBBER. */
1348
1349 rtx
1350 follow_jumps (label)
1351 rtx label;
1352 {
1353 rtx insn;
1354 rtx next;
1355 rtx value = label;
1356 int depth;
1357
1358 for (depth = 0;
1359 (depth < 10
1360 && (insn = next_active_insn (value)) != 0
1361 && GET_CODE (insn) == JUMP_INSN
1362 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1363 && onlyjump_p (insn))
1364 || GET_CODE (PATTERN (insn)) == RETURN)
1365 && (next = NEXT_INSN (insn))
1366 && GET_CODE (next) == BARRIER);
1367 depth++)
1368 {
1369 /* Don't chain through the insn that jumps into a loop
1370 from outside the loop,
1371 since that would create multiple loop entry jumps
1372 and prevent loop optimization. */
1373 rtx tem;
1374 if (!reload_completed)
1375 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1376 if (GET_CODE (tem) == NOTE
1377 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1378 /* ??? Optional. Disables some optimizations, but makes
1379 gcov output more accurate with -O. */
1380 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1381 return value;
1382
1383 /* If we have found a cycle, make the insn jump to itself. */
1384 if (JUMP_LABEL (insn) == label)
1385 return label;
1386
1387 tem = next_active_insn (JUMP_LABEL (insn));
1388 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1389 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1390 break;
1391
1392 value = JUMP_LABEL (insn);
1393 }
1394 if (depth == 10)
1395 return label;
1396 return value;
1397 }
1398
1399 \f
1400 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1401 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1402 in INSN, then store one of them in JUMP_LABEL (INSN).
1403 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1404 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1405 Also, when there are consecutive labels, canonicalize on the last of them.
1406
1407 Note that two labels separated by a loop-beginning note
1408 must be kept distinct if we have not yet done loop-optimization,
1409 because the gap between them is where loop-optimize
1410 will want to move invariant code to. CROSS_JUMP tells us
1411 that loop-optimization is done with. */
1412
1413 void
1414 mark_jump_label (x, insn, in_mem)
1415 rtx x;
1416 rtx insn;
1417 int in_mem;
1418 {
1419 RTX_CODE code = GET_CODE (x);
1420 int i;
1421 const char *fmt;
1422
1423 switch (code)
1424 {
1425 case PC:
1426 case CC0:
1427 case REG:
1428 case SUBREG:
1429 case CONST_INT:
1430 case CONST_DOUBLE:
1431 case CLOBBER:
1432 case CALL:
1433 return;
1434
1435 case MEM:
1436 in_mem = 1;
1437 break;
1438
1439 case SYMBOL_REF:
1440 if (!in_mem)
1441 return;
1442
1443 /* If this is a constant-pool reference, see if it is a label. */
1444 if (CONSTANT_POOL_ADDRESS_P (x))
1445 mark_jump_label (get_pool_constant (x), insn, in_mem);
1446 break;
1447
1448 case LABEL_REF:
1449 {
1450 rtx label = XEXP (x, 0);
1451
1452 /* Ignore remaining references to unreachable labels that
1453 have been deleted. */
1454 if (GET_CODE (label) == NOTE
1455 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1456 break;
1457
1458 if (GET_CODE (label) != CODE_LABEL)
1459 abort ();
1460
1461 /* Ignore references to labels of containing functions. */
1462 if (LABEL_REF_NONLOCAL_P (x))
1463 break;
1464
1465 XEXP (x, 0) = label;
1466 if (! insn || ! INSN_DELETED_P (insn))
1467 ++LABEL_NUSES (label);
1468
1469 if (insn)
1470 {
1471 if (GET_CODE (insn) == JUMP_INSN)
1472 JUMP_LABEL (insn) = label;
1473 else
1474 {
1475 /* Add a REG_LABEL note for LABEL unless there already
1476 is one. All uses of a label, except for labels
1477 that are the targets of jumps, must have a
1478 REG_LABEL note. */
1479 if (! find_reg_note (insn, REG_LABEL, label))
1480 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1481 REG_NOTES (insn));
1482 }
1483 }
1484 return;
1485 }
1486
1487 /* Do walk the labels in a vector, but not the first operand of an
1488 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1489 case ADDR_VEC:
1490 case ADDR_DIFF_VEC:
1491 if (! INSN_DELETED_P (insn))
1492 {
1493 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1494
1495 for (i = 0; i < XVECLEN (x, eltnum); i++)
1496 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1497 }
1498 return;
1499
1500 default:
1501 break;
1502 }
1503
1504 fmt = GET_RTX_FORMAT (code);
1505 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1506 {
1507 if (fmt[i] == 'e')
1508 mark_jump_label (XEXP (x, i), insn, in_mem);
1509 else if (fmt[i] == 'E')
1510 {
1511 int j;
1512 for (j = 0; j < XVECLEN (x, i); j++)
1513 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1514 }
1515 }
1516 }
1517
1518 /* If all INSN does is set the pc, delete it,
1519 and delete the insn that set the condition codes for it
1520 if that's what the previous thing was. */
1521
1522 void
1523 delete_jump (insn)
1524 rtx insn;
1525 {
1526 rtx set = single_set (insn);
1527
1528 if (set && GET_CODE (SET_DEST (set)) == PC)
1529 delete_computation (insn);
1530 }
1531
1532 /* Verify INSN is a BARRIER and delete it. */
1533
1534 void
1535 delete_barrier (insn)
1536 rtx insn;
1537 {
1538 if (GET_CODE (insn) != BARRIER)
1539 abort ();
1540
1541 delete_insn (insn);
1542 }
1543
1544 /* Recursively delete prior insns that compute the value (used only by INSN
1545 which the caller is deleting) stored in the register mentioned by NOTE
1546 which is a REG_DEAD note associated with INSN. */
1547
1548 static void
1549 delete_prior_computation (note, insn)
1550 rtx note;
1551 rtx insn;
1552 {
1553 rtx our_prev;
1554 rtx reg = XEXP (note, 0);
1555
1556 for (our_prev = prev_nonnote_insn (insn);
1557 our_prev && (GET_CODE (our_prev) == INSN
1558 || GET_CODE (our_prev) == CALL_INSN);
1559 our_prev = prev_nonnote_insn (our_prev))
1560 {
1561 rtx pat = PATTERN (our_prev);
1562
1563 /* If we reach a CALL which is not calling a const function
1564 or the callee pops the arguments, then give up. */
1565 if (GET_CODE (our_prev) == CALL_INSN
1566 && (! CONST_OR_PURE_CALL_P (our_prev)
1567 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1568 break;
1569
1570 /* If we reach a SEQUENCE, it is too complex to try to
1571 do anything with it, so give up. */
1572 if (GET_CODE (pat) == SEQUENCE)
1573 break;
1574
1575 if (GET_CODE (pat) == USE
1576 && GET_CODE (XEXP (pat, 0)) == INSN)
1577 /* reorg creates USEs that look like this. We leave them
1578 alone because reorg needs them for its own purposes. */
1579 break;
1580
1581 if (reg_set_p (reg, pat))
1582 {
1583 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1584 break;
1585
1586 if (GET_CODE (pat) == PARALLEL)
1587 {
1588 /* If we find a SET of something else, we can't
1589 delete the insn. */
1590
1591 int i;
1592
1593 for (i = 0; i < XVECLEN (pat, 0); i++)
1594 {
1595 rtx part = XVECEXP (pat, 0, i);
1596
1597 if (GET_CODE (part) == SET
1598 && SET_DEST (part) != reg)
1599 break;
1600 }
1601
1602 if (i == XVECLEN (pat, 0))
1603 delete_computation (our_prev);
1604 }
1605 else if (GET_CODE (pat) == SET
1606 && GET_CODE (SET_DEST (pat)) == REG)
1607 {
1608 int dest_regno = REGNO (SET_DEST (pat));
1609 int dest_endregno
1610 = (dest_regno
1611 + (dest_regno < FIRST_PSEUDO_REGISTER
1612 ? HARD_REGNO_NREGS (dest_regno,
1613 GET_MODE (SET_DEST (pat))) : 1));
1614 int regno = REGNO (reg);
1615 int endregno
1616 = (regno
1617 + (regno < FIRST_PSEUDO_REGISTER
1618 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1619
1620 if (dest_regno >= regno
1621 && dest_endregno <= endregno)
1622 delete_computation (our_prev);
1623
1624 /* We may have a multi-word hard register and some, but not
1625 all, of the words of the register are needed in subsequent
1626 insns. Write REG_UNUSED notes for those parts that were not
1627 needed. */
1628 else if (dest_regno <= regno
1629 && dest_endregno >= endregno)
1630 {
1631 int i;
1632
1633 REG_NOTES (our_prev)
1634 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1635 REG_NOTES (our_prev));
1636
1637 for (i = dest_regno; i < dest_endregno; i++)
1638 if (! find_regno_note (our_prev, REG_UNUSED, i))
1639 break;
1640
1641 if (i == dest_endregno)
1642 delete_computation (our_prev);
1643 }
1644 }
1645
1646 break;
1647 }
1648
1649 /* If PAT references the register that dies here, it is an
1650 additional use. Hence any prior SET isn't dead. However, this
1651 insn becomes the new place for the REG_DEAD note. */
1652 if (reg_overlap_mentioned_p (reg, pat))
1653 {
1654 XEXP (note, 1) = REG_NOTES (our_prev);
1655 REG_NOTES (our_prev) = note;
1656 break;
1657 }
1658 }
1659 }
1660
1661 /* Delete INSN and recursively delete insns that compute values used only
1662 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1663 If we are running before flow.c, we need do nothing since flow.c will
1664 delete dead code. We also can't know if the registers being used are
1665 dead or not at this point.
1666
1667 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1668 nothing other than set a register that dies in this insn, we can delete
1669 that insn as well.
1670
1671 On machines with CC0, if CC0 is used in this insn, we may be able to
1672 delete the insn that set it. */
1673
1674 static void
1675 delete_computation (insn)
1676 rtx insn;
1677 {
1678 rtx note, next;
1679
1680 #ifdef HAVE_cc0
1681 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1682 {
1683 rtx prev = prev_nonnote_insn (insn);
1684 /* We assume that at this stage
1685 CC's are always set explicitly
1686 and always immediately before the jump that
1687 will use them. So if the previous insn
1688 exists to set the CC's, delete it
1689 (unless it performs auto-increments, etc.). */
1690 if (prev && GET_CODE (prev) == INSN
1691 && sets_cc0_p (PATTERN (prev)))
1692 {
1693 if (sets_cc0_p (PATTERN (prev)) > 0
1694 && ! side_effects_p (PATTERN (prev)))
1695 delete_computation (prev);
1696 else
1697 /* Otherwise, show that cc0 won't be used. */
1698 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1699 cc0_rtx, REG_NOTES (prev));
1700 }
1701 }
1702 #endif
1703
1704 for (note = REG_NOTES (insn); note; note = next)
1705 {
1706 next = XEXP (note, 1);
1707
1708 if (REG_NOTE_KIND (note) != REG_DEAD
1709 /* Verify that the REG_NOTE is legitimate. */
1710 || GET_CODE (XEXP (note, 0)) != REG)
1711 continue;
1712
1713 delete_prior_computation (note, insn);
1714 }
1715
1716 delete_related_insns (insn);
1717 }
1718 \f
1719 /* Delete insn INSN from the chain of insns and update label ref counts
1720 and delete insns now unreachable.
1721
1722 Returns the first insn after INSN that was not deleted.
1723
1724 Usage of this instruction is deprecated. Use delete_insn instead and
1725 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1726
1727 rtx
1728 delete_related_insns (insn)
1729 rtx insn;
1730 {
1731 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1732 rtx note;
1733 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1734
1735 while (next && INSN_DELETED_P (next))
1736 next = NEXT_INSN (next);
1737
1738 /* This insn is already deleted => return first following nondeleted. */
1739 if (INSN_DELETED_P (insn))
1740 return next;
1741
1742 delete_insn (insn);
1743
1744 /* If instruction is followed by a barrier,
1745 delete the barrier too. */
1746
1747 if (next != 0 && GET_CODE (next) == BARRIER)
1748 delete_insn (next);
1749
1750 /* If deleting a jump, decrement the count of the label,
1751 and delete the label if it is now unused. */
1752
1753 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1754 {
1755 rtx lab = JUMP_LABEL (insn), lab_next;
1756
1757 if (LABEL_NUSES (lab) == 0)
1758 {
1759 /* This can delete NEXT or PREV,
1760 either directly if NEXT is JUMP_LABEL (INSN),
1761 or indirectly through more levels of jumps. */
1762 delete_related_insns (lab);
1763
1764 /* I feel a little doubtful about this loop,
1765 but I see no clean and sure alternative way
1766 to find the first insn after INSN that is not now deleted.
1767 I hope this works. */
1768 while (next && INSN_DELETED_P (next))
1769 next = NEXT_INSN (next);
1770 return next;
1771 }
1772 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1773 && GET_CODE (lab_next) == JUMP_INSN
1774 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1775 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1776 {
1777 /* If we're deleting the tablejump, delete the dispatch table.
1778 We may not be able to kill the label immediately preceding
1779 just yet, as it might be referenced in code leading up to
1780 the tablejump. */
1781 delete_related_insns (lab_next);
1782 }
1783 }
1784
1785 /* Likewise if we're deleting a dispatch table. */
1786
1787 if (GET_CODE (insn) == JUMP_INSN
1788 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1789 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1790 {
1791 rtx pat = PATTERN (insn);
1792 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1793 int len = XVECLEN (pat, diff_vec_p);
1794
1795 for (i = 0; i < len; i++)
1796 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1797 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1798 while (next && INSN_DELETED_P (next))
1799 next = NEXT_INSN (next);
1800 return next;
1801 }
1802
1803 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1804 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1805 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1806 if (REG_NOTE_KIND (note) == REG_LABEL
1807 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1808 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1809 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1810 delete_related_insns (XEXP (note, 0));
1811
1812 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1813 prev = PREV_INSN (prev);
1814
1815 /* If INSN was a label and a dispatch table follows it,
1816 delete the dispatch table. The tablejump must have gone already.
1817 It isn't useful to fall through into a table. */
1818
1819 if (was_code_label
1820 && NEXT_INSN (insn) != 0
1821 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1822 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1823 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1824 next = delete_related_insns (NEXT_INSN (insn));
1825
1826 /* If INSN was a label, delete insns following it if now unreachable. */
1827
1828 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1829 {
1830 RTX_CODE code;
1831 while (next != 0
1832 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1833 || code == NOTE || code == BARRIER
1834 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1835 {
1836 if (code == NOTE
1837 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1838 next = NEXT_INSN (next);
1839 /* Keep going past other deleted labels to delete what follows. */
1840 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1841 next = NEXT_INSN (next);
1842 else
1843 /* Note: if this deletes a jump, it can cause more
1844 deletion of unreachable code, after a different label.
1845 As long as the value from this recursive call is correct,
1846 this invocation functions correctly. */
1847 next = delete_related_insns (next);
1848 }
1849 }
1850
1851 return next;
1852 }
1853
1854 /* Advance from INSN till reaching something not deleted
1855 then return that. May return INSN itself. */
1856
1857 rtx
1858 next_nondeleted_insn (insn)
1859 rtx insn;
1860 {
1861 while (INSN_DELETED_P (insn))
1862 insn = NEXT_INSN (insn);
1863 return insn;
1864 }
1865 \f
1866 /* Delete a range of insns from FROM to TO, inclusive.
1867 This is for the sake of peephole optimization, so assume
1868 that whatever these insns do will still be done by a new
1869 peephole insn that will replace them. */
1870
1871 void
1872 delete_for_peephole (from, to)
1873 rtx from, to;
1874 {
1875 rtx insn = from;
1876
1877 while (1)
1878 {
1879 rtx next = NEXT_INSN (insn);
1880 rtx prev = PREV_INSN (insn);
1881
1882 if (GET_CODE (insn) != NOTE)
1883 {
1884 INSN_DELETED_P (insn) = 1;
1885
1886 /* Patch this insn out of the chain. */
1887 /* We don't do this all at once, because we
1888 must preserve all NOTEs. */
1889 if (prev)
1890 NEXT_INSN (prev) = next;
1891
1892 if (next)
1893 PREV_INSN (next) = prev;
1894 }
1895
1896 if (insn == to)
1897 break;
1898 insn = next;
1899 }
1900
1901 /* Note that if TO is an unconditional jump
1902 we *do not* delete the BARRIER that follows,
1903 since the peephole that replaces this sequence
1904 is also an unconditional jump in that case. */
1905 }
1906 \f
1907 /* We have determined that INSN is never reached, and are about to
1908 delete it. Print a warning if the user asked for one.
1909
1910 To try to make this warning more useful, this should only be called
1911 once per basic block not reached, and it only warns when the basic
1912 block contains more than one line from the current function, and
1913 contains at least one operation. CSE and inlining can duplicate insns,
1914 so it's possible to get spurious warnings from this. */
1915
1916 void
1917 never_reached_warning (avoided_insn)
1918 rtx avoided_insn;
1919 {
1920 rtx insn;
1921 rtx a_line_note = NULL;
1922 int two_avoided_lines = 0;
1923 int contains_insn = 0;
1924
1925 if (! warn_notreached)
1926 return;
1927
1928 /* Scan forwards, looking at LINE_NUMBER notes, until
1929 we hit a LABEL or we run out of insns. */
1930
1931 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1932 {
1933 if (GET_CODE (insn) == CODE_LABEL)
1934 break;
1935 else if (GET_CODE (insn) == NOTE /* A line number note? */
1936 && NOTE_LINE_NUMBER (insn) >= 0)
1937 {
1938 if (a_line_note == NULL)
1939 a_line_note = insn;
1940 else
1941 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1942 != NOTE_LINE_NUMBER (insn));
1943 }
1944 else if (INSN_P (insn))
1945 contains_insn = 1;
1946 }
1947 if (two_avoided_lines && contains_insn)
1948 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1949 NOTE_LINE_NUMBER (a_line_note),
1950 "will never be executed");
1951 }
1952 \f
1953 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1954 NLABEL as a return. Accrue modifications into the change group. */
1955
1956 static void
1957 redirect_exp_1 (loc, olabel, nlabel, insn)
1958 rtx *loc;
1959 rtx olabel, nlabel;
1960 rtx insn;
1961 {
1962 rtx x = *loc;
1963 RTX_CODE code = GET_CODE (x);
1964 int i;
1965 const char *fmt;
1966
1967 if (code == LABEL_REF)
1968 {
1969 if (XEXP (x, 0) == olabel)
1970 {
1971 rtx n;
1972 if (nlabel)
1973 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1974 else
1975 n = gen_rtx_RETURN (VOIDmode);
1976
1977 validate_change (insn, loc, n, 1);
1978 return;
1979 }
1980 }
1981 else if (code == RETURN && olabel == 0)
1982 {
1983 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1984 if (loc == &PATTERN (insn))
1985 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1986 validate_change (insn, loc, x, 1);
1987 return;
1988 }
1989
1990 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1991 && GET_CODE (SET_SRC (x)) == LABEL_REF
1992 && XEXP (SET_SRC (x), 0) == olabel)
1993 {
1994 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1995 return;
1996 }
1997
1998 fmt = GET_RTX_FORMAT (code);
1999 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2000 {
2001 if (fmt[i] == 'e')
2002 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2003 else if (fmt[i] == 'E')
2004 {
2005 int j;
2006 for (j = 0; j < XVECLEN (x, i); j++)
2007 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2008 }
2009 }
2010 }
2011
2012 /* Similar, but apply the change group and report success or failure. */
2013
2014 static int
2015 redirect_exp (olabel, nlabel, insn)
2016 rtx olabel, nlabel;
2017 rtx insn;
2018 {
2019 rtx *loc;
2020
2021 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2022 loc = &XVECEXP (PATTERN (insn), 0, 0);
2023 else
2024 loc = &PATTERN (insn);
2025
2026 redirect_exp_1 (loc, olabel, nlabel, insn);
2027 if (num_validated_changes () == 0)
2028 return 0;
2029
2030 return apply_change_group ();
2031 }
2032
2033 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2034 the modifications into the change group. Return false if we did
2035 not see how to do that. */
2036
2037 int
2038 redirect_jump_1 (jump, nlabel)
2039 rtx jump, nlabel;
2040 {
2041 int ochanges = num_validated_changes ();
2042 rtx *loc;
2043
2044 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2045 loc = &XVECEXP (PATTERN (jump), 0, 0);
2046 else
2047 loc = &PATTERN (jump);
2048
2049 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2050 return num_validated_changes () > ochanges;
2051 }
2052
2053 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2054 jump target label is unused as a result, it and the code following
2055 it may be deleted.
2056
2057 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2058 RETURN insn.
2059
2060 The return value will be 1 if the change was made, 0 if it wasn't
2061 (this can only occur for NLABEL == 0). */
2062
2063 int
2064 redirect_jump (jump, nlabel, delete_unused)
2065 rtx jump, nlabel;
2066 int delete_unused;
2067 {
2068 rtx olabel = JUMP_LABEL (jump);
2069
2070 if (nlabel == olabel)
2071 return 1;
2072
2073 if (! redirect_exp (olabel, nlabel, jump))
2074 return 0;
2075
2076 JUMP_LABEL (jump) = nlabel;
2077 if (nlabel)
2078 ++LABEL_NUSES (nlabel);
2079
2080 /* If we're eliding the jump over exception cleanups at the end of a
2081 function, move the function end note so that -Wreturn-type works. */
2082 if (olabel && nlabel
2083 && NEXT_INSN (olabel)
2084 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2085 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2086 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2087
2088 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
2089 delete_related_insns (olabel);
2090
2091 return 1;
2092 }
2093
2094 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2095 Accrue the modifications into the change group. */
2096
2097 static void
2098 invert_exp_1 (insn)
2099 rtx insn;
2100 {
2101 RTX_CODE code;
2102 rtx x = pc_set (insn);
2103
2104 if (!x)
2105 abort ();
2106 x = SET_SRC (x);
2107
2108 code = GET_CODE (x);
2109
2110 if (code == IF_THEN_ELSE)
2111 {
2112 rtx comp = XEXP (x, 0);
2113 rtx tem;
2114 enum rtx_code reversed_code;
2115
2116 /* We can do this in two ways: The preferable way, which can only
2117 be done if this is not an integer comparison, is to reverse
2118 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2119 of the IF_THEN_ELSE. If we can't do either, fail. */
2120
2121 reversed_code = reversed_comparison_code (comp, insn);
2122
2123 if (reversed_code != UNKNOWN)
2124 {
2125 validate_change (insn, &XEXP (x, 0),
2126 gen_rtx_fmt_ee (reversed_code,
2127 GET_MODE (comp), XEXP (comp, 0),
2128 XEXP (comp, 1)),
2129 1);
2130 return;
2131 }
2132
2133 tem = XEXP (x, 1);
2134 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2135 validate_change (insn, &XEXP (x, 2), tem, 1);
2136 }
2137 else
2138 abort ();
2139 }
2140
2141 /* Invert the jump condition of conditional jump insn, INSN.
2142
2143 Return 1 if we can do so, 0 if we cannot find a way to do so that
2144 matches a pattern. */
2145
2146 static int
2147 invert_exp (insn)
2148 rtx insn;
2149 {
2150 invert_exp_1 (insn);
2151 if (num_validated_changes () == 0)
2152 return 0;
2153
2154 return apply_change_group ();
2155 }
2156
2157 /* Invert the condition of the jump JUMP, and make it jump to label
2158 NLABEL instead of where it jumps now. Accrue changes into the
2159 change group. Return false if we didn't see how to perform the
2160 inversion and redirection. */
2161
2162 int
2163 invert_jump_1 (jump, nlabel)
2164 rtx jump, nlabel;
2165 {
2166 int ochanges;
2167
2168 ochanges = num_validated_changes ();
2169 invert_exp_1 (jump);
2170 if (num_validated_changes () == ochanges)
2171 return 0;
2172
2173 return redirect_jump_1 (jump, nlabel);
2174 }
2175
2176 /* Invert the condition of the jump JUMP, and make it jump to label
2177 NLABEL instead of where it jumps now. Return true if successful. */
2178
2179 int
2180 invert_jump (jump, nlabel, delete_unused)
2181 rtx jump, nlabel;
2182 int delete_unused;
2183 {
2184 /* We have to either invert the condition and change the label or
2185 do neither. Either operation could fail. We first try to invert
2186 the jump. If that succeeds, we try changing the label. If that fails,
2187 we invert the jump back to what it was. */
2188
2189 if (! invert_exp (jump))
2190 return 0;
2191
2192 if (redirect_jump (jump, nlabel, delete_unused))
2193 {
2194 invert_br_probabilities (jump);
2195
2196 return 1;
2197 }
2198
2199 if (! invert_exp (jump))
2200 /* This should just be putting it back the way it was. */
2201 abort ();
2202
2203 return 0;
2204 }
2205
2206 \f
2207 /* Like rtx_equal_p except that it considers two REGs as equal
2208 if they renumber to the same value and considers two commutative
2209 operations to be the same if the order of the operands has been
2210 reversed.
2211
2212 ??? Addition is not commutative on the PA due to the weird implicit
2213 space register selection rules for memory addresses. Therefore, we
2214 don't consider a + b == b + a.
2215
2216 We could/should make this test a little tighter. Possibly only
2217 disabling it on the PA via some backend macro or only disabling this
2218 case when the PLUS is inside a MEM. */
2219
2220 int
2221 rtx_renumbered_equal_p (x, y)
2222 rtx x, y;
2223 {
2224 int i;
2225 RTX_CODE code = GET_CODE (x);
2226 const char *fmt;
2227
2228 if (x == y)
2229 return 1;
2230
2231 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2232 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2233 && GET_CODE (SUBREG_REG (y)) == REG)))
2234 {
2235 int reg_x = -1, reg_y = -1;
2236 int byte_x = 0, byte_y = 0;
2237
2238 if (GET_MODE (x) != GET_MODE (y))
2239 return 0;
2240
2241 /* If we haven't done any renumbering, don't
2242 make any assumptions. */
2243 if (reg_renumber == 0)
2244 return rtx_equal_p (x, y);
2245
2246 if (code == SUBREG)
2247 {
2248 reg_x = REGNO (SUBREG_REG (x));
2249 byte_x = SUBREG_BYTE (x);
2250
2251 if (reg_renumber[reg_x] >= 0)
2252 {
2253 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2254 GET_MODE (SUBREG_REG (x)),
2255 byte_x,
2256 GET_MODE (x));
2257 byte_x = 0;
2258 }
2259 }
2260 else
2261 {
2262 reg_x = REGNO (x);
2263 if (reg_renumber[reg_x] >= 0)
2264 reg_x = reg_renumber[reg_x];
2265 }
2266
2267 if (GET_CODE (y) == SUBREG)
2268 {
2269 reg_y = REGNO (SUBREG_REG (y));
2270 byte_y = SUBREG_BYTE (y);
2271
2272 if (reg_renumber[reg_y] >= 0)
2273 {
2274 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2275 GET_MODE (SUBREG_REG (y)),
2276 byte_y,
2277 GET_MODE (y));
2278 byte_y = 0;
2279 }
2280 }
2281 else
2282 {
2283 reg_y = REGNO (y);
2284 if (reg_renumber[reg_y] >= 0)
2285 reg_y = reg_renumber[reg_y];
2286 }
2287
2288 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2289 }
2290
2291 /* Now we have disposed of all the cases
2292 in which different rtx codes can match. */
2293 if (code != GET_CODE (y))
2294 return 0;
2295
2296 switch (code)
2297 {
2298 case PC:
2299 case CC0:
2300 case ADDR_VEC:
2301 case ADDR_DIFF_VEC:
2302 return 0;
2303
2304 case CONST_INT:
2305 return INTVAL (x) == INTVAL (y);
2306
2307 case LABEL_REF:
2308 /* We can't assume nonlocal labels have their following insns yet. */
2309 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2310 return XEXP (x, 0) == XEXP (y, 0);
2311
2312 /* Two label-refs are equivalent if they point at labels
2313 in the same position in the instruction stream. */
2314 return (next_real_insn (XEXP (x, 0))
2315 == next_real_insn (XEXP (y, 0)));
2316
2317 case SYMBOL_REF:
2318 return XSTR (x, 0) == XSTR (y, 0);
2319
2320 case CODE_LABEL:
2321 /* If we didn't match EQ equality above, they aren't the same. */
2322 return 0;
2323
2324 default:
2325 break;
2326 }
2327
2328 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2329
2330 if (GET_MODE (x) != GET_MODE (y))
2331 return 0;
2332
2333 /* For commutative operations, the RTX match if the operand match in any
2334 order. Also handle the simple binary and unary cases without a loop.
2335
2336 ??? Don't consider PLUS a commutative operator; see comments above. */
2337 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2338 && code != PLUS)
2339 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2340 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2341 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2342 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2343 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2344 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2345 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2346 else if (GET_RTX_CLASS (code) == '1')
2347 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2348
2349 /* Compare the elements. If any pair of corresponding elements
2350 fail to match, return 0 for the whole things. */
2351
2352 fmt = GET_RTX_FORMAT (code);
2353 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2354 {
2355 int j;
2356 switch (fmt[i])
2357 {
2358 case 'w':
2359 if (XWINT (x, i) != XWINT (y, i))
2360 return 0;
2361 break;
2362
2363 case 'i':
2364 if (XINT (x, i) != XINT (y, i))
2365 return 0;
2366 break;
2367
2368 case 't':
2369 if (XTREE (x, i) != XTREE (y, i))
2370 return 0;
2371 break;
2372
2373 case 's':
2374 if (strcmp (XSTR (x, i), XSTR (y, i)))
2375 return 0;
2376 break;
2377
2378 case 'e':
2379 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2380 return 0;
2381 break;
2382
2383 case 'u':
2384 if (XEXP (x, i) != XEXP (y, i))
2385 return 0;
2386 /* fall through. */
2387 case '0':
2388 break;
2389
2390 case 'E':
2391 if (XVECLEN (x, i) != XVECLEN (y, i))
2392 return 0;
2393 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2394 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2395 return 0;
2396 break;
2397
2398 default:
2399 abort ();
2400 }
2401 }
2402 return 1;
2403 }
2404 \f
2405 /* If X is a hard register or equivalent to one or a subregister of one,
2406 return the hard register number. If X is a pseudo register that was not
2407 assigned a hard register, return the pseudo register number. Otherwise,
2408 return -1. Any rtx is valid for X. */
2409
2410 int
2411 true_regnum (x)
2412 rtx x;
2413 {
2414 if (GET_CODE (x) == REG)
2415 {
2416 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2417 return reg_renumber[REGNO (x)];
2418 return REGNO (x);
2419 }
2420 if (GET_CODE (x) == SUBREG)
2421 {
2422 int base = true_regnum (SUBREG_REG (x));
2423 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2424 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2425 GET_MODE (SUBREG_REG (x)),
2426 SUBREG_BYTE (x), GET_MODE (x));
2427 }
2428 return -1;
2429 }
2430 \f
2431 /* Optimize code of the form:
2432
2433 for (x = a[i]; x; ...)
2434 ...
2435 for (x = a[i]; x; ...)
2436 ...
2437 foo:
2438
2439 Loop optimize will change the above code into
2440
2441 if (x = a[i])
2442 for (;;)
2443 { ...; if (! (x = ...)) break; }
2444 if (x = a[i])
2445 for (;;)
2446 { ...; if (! (x = ...)) break; }
2447 foo:
2448
2449 In general, if the first test fails, the program can branch
2450 directly to `foo' and skip the second try which is doomed to fail.
2451 We run this after loop optimization and before flow analysis. */
2452
2453 /* When comparing the insn patterns, we track the fact that different
2454 pseudo-register numbers may have been used in each computation.
2455 The following array stores an equivalence -- same_regs[I] == J means
2456 that pseudo register I was used in the first set of tests in a context
2457 where J was used in the second set. We also count the number of such
2458 pending equivalences. If nonzero, the expressions really aren't the
2459 same. */
2460
2461 static int *same_regs;
2462
2463 static int num_same_regs;
2464
2465 /* Track any registers modified between the target of the first jump and
2466 the second jump. They never compare equal. */
2467
2468 static char *modified_regs;
2469
2470 /* Record if memory was modified. */
2471
2472 static int modified_mem;
2473
2474 /* Called via note_stores on each insn between the target of the first
2475 branch and the second branch. It marks any changed registers. */
2476
2477 static void
2478 mark_modified_reg (dest, x, data)
2479 rtx dest;
2480 rtx x;
2481 void *data ATTRIBUTE_UNUSED;
2482 {
2483 int regno;
2484 unsigned int i;
2485
2486 if (GET_CODE (dest) == SUBREG)
2487 dest = SUBREG_REG (dest);
2488
2489 if (GET_CODE (dest) == MEM)
2490 modified_mem = 1;
2491
2492 if (GET_CODE (dest) != REG)
2493 return;
2494
2495 regno = REGNO (dest);
2496 if (regno >= FIRST_PSEUDO_REGISTER)
2497 modified_regs[regno] = 1;
2498 /* Don't consider a hard condition code register as modified,
2499 if it is only being set. thread_jumps will check if it is set
2500 to the same value. */
2501 else if (GET_MODE_CLASS (GET_MODE (dest)) != MODE_CC
2502 || GET_CODE (x) != SET
2503 || ! rtx_equal_p (dest, SET_DEST (x))
2504 || HARD_REGNO_NREGS (regno, GET_MODE (dest)) != 1)
2505 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
2506 modified_regs[regno + i] = 1;
2507 }
2508
2509 /* F is the first insn in the chain of insns. */
2510
2511 void
2512 thread_jumps (f, max_reg, flag_before_loop)
2513 rtx f;
2514 int max_reg;
2515 int flag_before_loop;
2516 {
2517 /* Basic algorithm is to find a conditional branch,
2518 the label it may branch to, and the branch after
2519 that label. If the two branches test the same condition,
2520 walk back from both branch paths until the insn patterns
2521 differ, or code labels are hit. If we make it back to
2522 the target of the first branch, then we know that the first branch
2523 will either always succeed or always fail depending on the relative
2524 senses of the two branches. So adjust the first branch accordingly
2525 in this case. */
2526
2527 rtx label, b1, b2, t1, t2;
2528 enum rtx_code code1, code2;
2529 rtx b1op0, b1op1, b2op0, b2op1;
2530 int changed = 1;
2531 int i;
2532 int *all_reset;
2533 enum rtx_code reversed_code1, reversed_code2;
2534
2535 /* Allocate register tables and quick-reset table. */
2536 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
2537 same_regs = (int *) xmalloc (max_reg * sizeof (int));
2538 all_reset = (int *) xmalloc (max_reg * sizeof (int));
2539 for (i = 0; i < max_reg; i++)
2540 all_reset[i] = -1;
2541
2542 while (changed)
2543 {
2544 changed = 0;
2545
2546 for (b1 = f; b1; b1 = NEXT_INSN (b1))
2547 {
2548 rtx set;
2549 rtx set2;
2550
2551 /* Get to a candidate branch insn. */
2552 if (GET_CODE (b1) != JUMP_INSN
2553 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
2554 continue;
2555
2556 memset (modified_regs, 0, max_reg * sizeof (char));
2557 modified_mem = 0;
2558
2559 memcpy (same_regs, all_reset, max_reg * sizeof (int));
2560 num_same_regs = 0;
2561
2562 label = JUMP_LABEL (b1);
2563
2564 /* Look for a branch after the target. Record any registers and
2565 memory modified between the target and the branch. Stop when we
2566 get to a label since we can't know what was changed there. */
2567 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
2568 {
2569 if (GET_CODE (b2) == CODE_LABEL)
2570 break;
2571
2572 else if (GET_CODE (b2) == JUMP_INSN)
2573 {
2574 /* If this is an unconditional jump and is the only use of
2575 its target label, we can follow it. */
2576 if (any_uncondjump_p (b2)
2577 && onlyjump_p (b2)
2578 && JUMP_LABEL (b2) != 0
2579 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
2580 {
2581 b2 = JUMP_LABEL (b2);
2582 continue;
2583 }
2584 else
2585 break;
2586 }
2587
2588 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
2589 continue;
2590
2591 if (GET_CODE (b2) == CALL_INSN)
2592 {
2593 modified_mem = 1;
2594 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2595 if (call_used_regs[i] && ! fixed_regs[i]
2596 && i != STACK_POINTER_REGNUM
2597 && i != FRAME_POINTER_REGNUM
2598 && i != HARD_FRAME_POINTER_REGNUM
2599 && i != ARG_POINTER_REGNUM)
2600 modified_regs[i] = 1;
2601 }
2602
2603 note_stores (PATTERN (b2), mark_modified_reg, NULL);
2604 }
2605
2606 /* Check the next candidate branch insn from the label
2607 of the first. */
2608 if (b2 == 0
2609 || GET_CODE (b2) != JUMP_INSN
2610 || b2 == b1
2611 || !any_condjump_p (b2)
2612 || !onlyjump_p (b2))
2613 continue;
2614 set = pc_set (b1);
2615 set2 = pc_set (b2);
2616
2617 /* Get the comparison codes and operands, reversing the
2618 codes if appropriate. If we don't have comparison codes,
2619 we can't do anything. */
2620 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
2621 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
2622 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
2623 reversed_code1 = code1;
2624 if (XEXP (SET_SRC (set), 1) == pc_rtx)
2625 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2626 else
2627 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2628
2629 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
2630 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
2631 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
2632 reversed_code2 = code2;
2633 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
2634 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2635 else
2636 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2637
2638 /* If they test the same things and knowing that B1 branches
2639 tells us whether or not B2 branches, check if we
2640 can thread the branch. */
2641 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
2642 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
2643 && (comparison_dominates_p (code1, code2)
2644 || comparison_dominates_p (code1, reversed_code2)))
2645
2646 {
2647 t1 = prev_nonnote_insn (b1);
2648 t2 = prev_nonnote_insn (b2);
2649
2650 while (t1 != 0 && t2 != 0)
2651 {
2652 if (t2 == label)
2653 {
2654 /* We have reached the target of the first branch.
2655 If there are no pending register equivalents,
2656 we know that this branch will either always
2657 succeed (if the senses of the two branches are
2658 the same) or always fail (if not). */
2659 rtx new_label;
2660
2661 if (num_same_regs != 0)
2662 break;
2663
2664 if (comparison_dominates_p (code1, code2))
2665 new_label = JUMP_LABEL (b2);
2666 else
2667 new_label = get_label_after (b2);
2668
2669 if (JUMP_LABEL (b1) != new_label)
2670 {
2671 rtx prev = PREV_INSN (new_label);
2672
2673 if (flag_before_loop
2674 && GET_CODE (prev) == NOTE
2675 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
2676 {
2677 /* Don't thread to the loop label. If a loop
2678 label is reused, loop optimization will
2679 be disabled for that loop. */
2680 new_label = gen_label_rtx ();
2681 emit_label_after (new_label, PREV_INSN (prev));
2682 }
2683 changed |= redirect_jump (b1, new_label, 1);
2684 }
2685 break;
2686 }
2687
2688 /* If either of these is not a normal insn (it might be
2689 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2690 have already been skipped above.) Similarly, fail
2691 if the insns are different. */
2692 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
2693 || recog_memoized (t1) != recog_memoized (t2)
2694 || ! rtx_equal_for_thread_p (PATTERN (t1),
2695 PATTERN (t2), t2))
2696 break;
2697
2698 t1 = prev_nonnote_insn (t1);
2699 t2 = prev_nonnote_insn (t2);
2700 }
2701 }
2702 }
2703 }
2704
2705 /* Clean up. */
2706 free (modified_regs);
2707 free (same_regs);
2708 free (all_reset);
2709 }
2710 \f
2711 /* This is like RTX_EQUAL_P except that it knows about our handling of
2712 possibly equivalent registers and knows to consider volatile and
2713 modified objects as not equal.
2714
2715 YINSN is the insn containing Y. */
2716
2717 int
2718 rtx_equal_for_thread_p (x, y, yinsn)
2719 rtx x, y;
2720 rtx yinsn;
2721 {
2722 int i;
2723 int j;
2724 enum rtx_code code;
2725 const char *fmt;
2726
2727 code = GET_CODE (x);
2728 /* Rtx's of different codes cannot be equal. */
2729 if (code != GET_CODE (y))
2730 return 0;
2731
2732 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2733 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2734
2735 if (GET_MODE (x) != GET_MODE (y))
2736 return 0;
2737
2738 /* For floating-point, consider everything unequal. This is a bit
2739 pessimistic, but this pass would only rarely do anything for FP
2740 anyway. */
2741 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
2742 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
2743 return 0;
2744
2745 /* For commutative operations, the RTX match if the operand match in any
2746 order. Also handle the simple binary and unary cases without a loop. */
2747 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2748 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2749 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
2750 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
2751 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
2752 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2753 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2754 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
2755 else if (GET_RTX_CLASS (code) == '1')
2756 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2757
2758 /* Handle special-cases first. */
2759 switch (code)
2760 {
2761 case REG:
2762 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
2763 return 1;
2764
2765 /* If neither is user variable or hard register, check for possible
2766 equivalence. */
2767 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
2768 || REGNO (x) < FIRST_PSEUDO_REGISTER
2769 || REGNO (y) < FIRST_PSEUDO_REGISTER)
2770 return 0;
2771
2772 if (same_regs[REGNO (x)] == -1)
2773 {
2774 same_regs[REGNO (x)] = REGNO (y);
2775 num_same_regs++;
2776
2777 /* If this is the first time we are seeing a register on the `Y'
2778 side, see if it is the last use. If not, we can't thread the
2779 jump, so mark it as not equivalent. */
2780 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
2781 return 0;
2782
2783 return 1;
2784 }
2785 else
2786 return (same_regs[REGNO (x)] == (int) REGNO (y));
2787
2788 break;
2789
2790 case MEM:
2791 /* If memory modified or either volatile, not equivalent.
2792 Else, check address. */
2793 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2794 return 0;
2795
2796 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2797
2798 case ASM_INPUT:
2799 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2800 return 0;
2801
2802 break;
2803
2804 case SET:
2805 /* Cancel a pending `same_regs' if setting equivalenced registers.
2806 Then process source. */
2807 if (GET_CODE (SET_DEST (x)) == REG
2808 && GET_CODE (SET_DEST (y)) == REG)
2809 {
2810 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
2811 {
2812 same_regs[REGNO (SET_DEST (x))] = -1;
2813 num_same_regs--;
2814 }
2815 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
2816 return 0;
2817 }
2818 else
2819 {
2820 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
2821 return 0;
2822 }
2823
2824 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
2825
2826 case LABEL_REF:
2827 return XEXP (x, 0) == XEXP (y, 0);
2828
2829 case SYMBOL_REF:
2830 return XSTR (x, 0) == XSTR (y, 0);
2831
2832 default:
2833 break;
2834 }
2835
2836 if (x == y)
2837 return 1;
2838
2839 fmt = GET_RTX_FORMAT (code);
2840 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2841 {
2842 switch (fmt[i])
2843 {
2844 case 'w':
2845 if (XWINT (x, i) != XWINT (y, i))
2846 return 0;
2847 break;
2848
2849 case 'n':
2850 case 'i':
2851 if (XINT (x, i) != XINT (y, i))
2852 return 0;
2853 break;
2854
2855 case 'V':
2856 case 'E':
2857 /* Two vectors must have the same length. */
2858 if (XVECLEN (x, i) != XVECLEN (y, i))
2859 return 0;
2860
2861 /* And the corresponding elements must match. */
2862 for (j = 0; j < XVECLEN (x, i); j++)
2863 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
2864 XVECEXP (y, i, j), yinsn) == 0)
2865 return 0;
2866 break;
2867
2868 case 'e':
2869 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
2870 return 0;
2871 break;
2872
2873 case 'S':
2874 case 's':
2875 if (strcmp (XSTR (x, i), XSTR (y, i)))
2876 return 0;
2877 break;
2878
2879 case 'u':
2880 /* These are just backpointers, so they don't matter. */
2881 break;
2882
2883 case '0':
2884 case 't':
2885 break;
2886
2887 /* It is believed that rtx's at this level will never
2888 contain anything but integers and other rtx's,
2889 except for within LABEL_REFs and SYMBOL_REFs. */
2890 default:
2891 abort ();
2892 }
2893 }
2894 return 1;
2895 }