feb7b4d8ca36d716817320a5edb96d682a31f203
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-attr.h"
63 #include "recog.h"
64 #include "function.h"
65 #include "expr.h"
66 #include "real.h"
67 #include "except.h"
68 #include "toplev.h"
69 #include "reload.h"
70
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
83
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
91
92 static rtx *jump_chain;
93
94 /* Maximum index in jump_chain. */
95
96 static int max_jump_chain;
97
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
103
104 static int cross_jump_death_matters = 0;
105
106 static int init_label_info PARAMS ((rtx));
107 static void delete_barrier_successors PARAMS ((rtx));
108 static void mark_all_labels PARAMS ((rtx, int));
109 static rtx delete_unreferenced_labels PARAMS ((rtx));
110 static void delete_noop_moves PARAMS ((rtx));
111 static int duplicate_loop_exit_test PARAMS ((rtx));
112 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
113 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
114 static int jump_back_p PARAMS ((rtx, rtx));
115 static int tension_vector_labels PARAMS ((rtx, int));
116 static void delete_computation PARAMS ((rtx));
117 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
118 static int redirect_exp PARAMS ((rtx, rtx, rtx));
119 static void invert_exp_1 PARAMS ((rtx));
120 static int invert_exp PARAMS ((rtx));
121 static void delete_from_jump_chain PARAMS ((rtx));
122 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
123 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
124 static void redirect_tablejump PARAMS ((rtx, rtx));
125 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
126 static int returnjump_p_1 PARAMS ((rtx *, void *));
127 static void delete_prior_computation PARAMS ((rtx, rtx));
128 \f
129 /* Main external entry point into the jump optimizer. See comments before
130 jump_optimize_1 for descriptions of the arguments. */
131 void
132 jump_optimize (f, cross_jump, noop_moves, after_regscan)
133 rtx f;
134 int cross_jump;
135 int noop_moves;
136 int after_regscan;
137 {
138 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
139 }
140
141 /* Alternate entry into the jump optimizer. This entry point only rebuilds
142 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
143 instructions. */
144 void
145 rebuild_jump_labels (f)
146 rtx f;
147 {
148 jump_optimize_1 (f, 0, 0, 0, 1, 0);
149 }
150
151 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
152
153 void
154 jump_optimize_minimal (f)
155 rtx f;
156 {
157 jump_optimize_1 (f, 0, 0, 0, 0, 1);
158 }
159 \f
160 /* Delete no-op jumps and optimize jumps to jumps
161 and jumps around jumps.
162 Delete unused labels and unreachable code.
163
164 If CROSS_JUMP is 1, detect matching code
165 before a jump and its destination and unify them.
166 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
167
168 If NOOP_MOVES is nonzero, delete no-op move insns.
169
170 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
171 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
172
173 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
174 and JUMP_LABEL field for jumping insns.
175
176 If `optimize' is zero, don't change any code,
177 just determine whether control drops off the end of the function.
178 This case occurs when we have -W and not -O.
179 It works because `delete_insn' checks the value of `optimize'
180 and refrains from actually deleting when that is 0.
181
182 If MINIMAL is nonzero, then we only perform trivial optimizations:
183
184 * Removal of unreachable code after BARRIERs.
185 * Removal of unreferenced CODE_LABELs.
186 * Removal of a jump to the next instruction.
187 * Removal of a conditional jump followed by an unconditional jump
188 to the same target as the conditional jump.
189 * Simplify a conditional jump around an unconditional jump.
190 * Simplify a jump to a jump.
191 * Delete extraneous line number notes.
192 */
193
194 static void
195 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
196 mark_labels_only, minimal)
197 rtx f;
198 int cross_jump;
199 int noop_moves;
200 int after_regscan;
201 int mark_labels_only;
202 int minimal;
203 {
204 register rtx insn, next;
205 int changed;
206 int old_max_reg;
207 int first = 1;
208 int max_uid = 0;
209 rtx last_insn;
210 #ifdef HAVE_trap
211 enum rtx_code reversed_code;
212 #endif
213
214 cross_jump_death_matters = (cross_jump == 2);
215 max_uid = init_label_info (f) + 1;
216
217 /* Leave some extra room for labels and duplicate exit test insns
218 we make. */
219 max_jump_chain = max_uid * 14 / 10;
220 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
221
222 mark_all_labels (f, cross_jump);
223
224 /* Keep track of labels used from static data; we don't track them
225 closely enough to delete them here, so make sure their reference
226 count doesn't drop to zero. */
227
228 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
229 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
230 LABEL_NUSES (XEXP (insn, 0))++;
231
232 /* Keep track of labels used for marking handlers for exception
233 regions; they cannot usually be deleted. */
234
235 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
236 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
237 LABEL_NUSES (XEXP (insn, 0))++;
238
239 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
240 notes and recompute LABEL_NUSES. */
241 if (mark_labels_only)
242 goto end;
243
244 delete_barrier_successors (f);
245
246 last_insn = delete_unreferenced_labels (f);
247
248 if (noop_moves)
249 delete_noop_moves (f);
250
251 /* Now iterate optimizing jumps until nothing changes over one pass. */
252 changed = 1;
253 old_max_reg = max_reg_num ();
254 while (changed)
255 {
256 changed = 0;
257
258 for (insn = f; insn; insn = next)
259 {
260 rtx reallabelprev;
261 rtx temp, temp1, temp2 = NULL_RTX;
262 rtx temp4 ATTRIBUTE_UNUSED;
263 rtx nlabel;
264 int this_is_any_uncondjump;
265 int this_is_any_condjump;
266 int this_is_onlyjump;
267
268 next = NEXT_INSN (insn);
269
270 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
271 jump. Try to optimize by duplicating the loop exit test if so.
272 This is only safe immediately after regscan, because it uses
273 the values of regno_first_uid and regno_last_uid. */
274 if (after_regscan && GET_CODE (insn) == NOTE
275 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
276 && (temp1 = next_nonnote_insn (insn)) != 0
277 && any_uncondjump_p (temp1)
278 && onlyjump_p (temp1))
279 {
280 temp = PREV_INSN (insn);
281 if (duplicate_loop_exit_test (insn))
282 {
283 changed = 1;
284 next = NEXT_INSN (temp);
285 continue;
286 }
287 }
288
289 if (GET_CODE (insn) != JUMP_INSN)
290 continue;
291
292 this_is_any_condjump = any_condjump_p (insn);
293 this_is_any_uncondjump = any_uncondjump_p (insn);
294 this_is_onlyjump = onlyjump_p (insn);
295
296 /* Tension the labels in dispatch tables. */
297
298 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
299 changed |= tension_vector_labels (PATTERN (insn), 0);
300 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
301 changed |= tension_vector_labels (PATTERN (insn), 1);
302
303 /* See if this jump goes to another jump and redirect if so. */
304 nlabel = follow_jumps (JUMP_LABEL (insn));
305 if (nlabel != JUMP_LABEL (insn))
306 changed |= redirect_jump (insn, nlabel, 1);
307
308 if (! optimize || minimal)
309 continue;
310
311 /* If a dispatch table always goes to the same place,
312 get rid of it and replace the insn that uses it. */
313
314 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
315 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
316 {
317 int i;
318 rtx pat = PATTERN (insn);
319 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
320 int len = XVECLEN (pat, diff_vec_p);
321 rtx dispatch = prev_real_insn (insn);
322 rtx set;
323
324 for (i = 0; i < len; i++)
325 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
326 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
327 break;
328
329 if (i == len
330 && dispatch != 0
331 && GET_CODE (dispatch) == JUMP_INSN
332 && JUMP_LABEL (dispatch) != 0
333 /* Don't mess with a casesi insn.
334 XXX according to the comment before computed_jump_p(),
335 all casesi insns should be a parallel of the jump
336 and a USE of a LABEL_REF. */
337 && ! ((set = single_set (dispatch)) != NULL
338 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
339 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
340 {
341 redirect_tablejump (dispatch,
342 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
343 changed = 1;
344 }
345 }
346
347 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
348
349 /* Detect jump to following insn. */
350 if (reallabelprev == insn
351 && (this_is_any_condjump || this_is_any_uncondjump)
352 && this_is_onlyjump)
353 {
354 next = next_real_insn (JUMP_LABEL (insn));
355 delete_jump (insn);
356
357 /* Remove the "inactive" but "real" insns (i.e. uses and
358 clobbers) in between here and there. */
359 temp = insn;
360 while ((temp = next_real_insn (temp)) != next)
361 delete_insn (temp);
362
363 changed = 1;
364 continue;
365 }
366
367 /* Detect a conditional jump going to the same place
368 as an immediately following unconditional jump. */
369 else if (this_is_any_condjump && this_is_onlyjump
370 && (temp = next_active_insn (insn)) != 0
371 && simplejump_p (temp)
372 && (next_active_insn (JUMP_LABEL (insn))
373 == next_active_insn (JUMP_LABEL (temp))))
374 {
375 /* Don't mess up test coverage analysis. */
376 temp2 = temp;
377 if (flag_test_coverage && !reload_completed)
378 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
379 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
380 break;
381
382 if (temp2 == temp)
383 {
384 /* Ensure that we jump to the later of the two labels.
385 Consider:
386
387 if (test) goto L2;
388 goto L1;
389 ...
390 L1:
391 (clobber return-reg)
392 L2:
393 (use return-reg)
394
395 If we leave the goto L1, we'll incorrectly leave
396 return-reg dead for TEST true. */
397
398 temp2 = next_active_insn (JUMP_LABEL (insn));
399 if (!temp2)
400 temp2 = get_last_insn ();
401 if (GET_CODE (temp2) != CODE_LABEL)
402 temp2 = prev_label (temp2);
403 if (temp2 != JUMP_LABEL (temp))
404 redirect_jump (temp, temp2, 1);
405
406 delete_jump (insn);
407 changed = 1;
408 continue;
409 }
410 }
411
412 /* Detect a conditional jump jumping over an unconditional jump. */
413
414 else if (this_is_any_condjump
415 && reallabelprev != 0
416 && GET_CODE (reallabelprev) == JUMP_INSN
417 && prev_active_insn (reallabelprev) == insn
418 && no_labels_between_p (insn, reallabelprev)
419 && any_uncondjump_p (reallabelprev)
420 && onlyjump_p (reallabelprev))
421 {
422 /* When we invert the unconditional jump, we will be
423 decrementing the usage count of its old label.
424 Make sure that we don't delete it now because that
425 might cause the following code to be deleted. */
426 rtx prev_uses = prev_nonnote_insn (reallabelprev);
427 rtx prev_label = JUMP_LABEL (insn);
428
429 if (prev_label)
430 ++LABEL_NUSES (prev_label);
431
432 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
433 {
434 /* It is very likely that if there are USE insns before
435 this jump, they hold REG_DEAD notes. These REG_DEAD
436 notes are no longer valid due to this optimization,
437 and will cause the life-analysis that following passes
438 (notably delayed-branch scheduling) to think that
439 these registers are dead when they are not.
440
441 To prevent this trouble, we just remove the USE insns
442 from the insn chain. */
443
444 while (prev_uses && GET_CODE (prev_uses) == INSN
445 && GET_CODE (PATTERN (prev_uses)) == USE)
446 {
447 rtx useless = prev_uses;
448 prev_uses = prev_nonnote_insn (prev_uses);
449 delete_insn (useless);
450 }
451
452 delete_insn (reallabelprev);
453 changed = 1;
454 }
455
456 /* We can now safely delete the label if it is unreferenced
457 since the delete_insn above has deleted the BARRIER. */
458 if (prev_label && --LABEL_NUSES (prev_label) == 0)
459 delete_insn (prev_label);
460
461 next = NEXT_INSN (insn);
462 }
463
464 /* If we have an unconditional jump preceded by a USE, try to put
465 the USE before the target and jump there. This simplifies many
466 of the optimizations below since we don't have to worry about
467 dealing with these USE insns. We only do this if the label
468 being branch to already has the identical USE or if code
469 never falls through to that label. */
470
471 else if (this_is_any_uncondjump
472 && (temp = prev_nonnote_insn (insn)) != 0
473 && GET_CODE (temp) == INSN
474 && GET_CODE (PATTERN (temp)) == USE
475 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
476 && (GET_CODE (temp1) == BARRIER
477 || (GET_CODE (temp1) == INSN
478 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
479 /* Don't do this optimization if we have a loop containing
480 only the USE instruction, and the loop start label has
481 a usage count of 1. This is because we will redo this
482 optimization everytime through the outer loop, and jump
483 opt will never exit. */
484 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
485 && temp2 == JUMP_LABEL (insn)
486 && LABEL_NUSES (temp2) == 1))
487 {
488 if (GET_CODE (temp1) == BARRIER)
489 {
490 emit_insn_after (PATTERN (temp), temp1);
491 temp1 = NEXT_INSN (temp1);
492 }
493
494 delete_insn (temp);
495 redirect_jump (insn, get_label_before (temp1), 1);
496 reallabelprev = prev_real_insn (temp1);
497 changed = 1;
498 next = NEXT_INSN (insn);
499 }
500
501 #ifdef HAVE_trap
502 /* Detect a conditional jump jumping over an unconditional trap. */
503 if (HAVE_trap
504 && this_is_any_condjump && this_is_onlyjump
505 && reallabelprev != 0
506 && GET_CODE (reallabelprev) == INSN
507 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
508 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
509 && prev_active_insn (reallabelprev) == insn
510 && no_labels_between_p (insn, reallabelprev)
511 && (temp2 = get_condition (insn, &temp4))
512 && ((reversed_code = reversed_comparison_code (temp2, insn))
513 != UNKNOWN))
514 {
515 rtx new = gen_cond_trap (reversed_code,
516 XEXP (temp2, 0), XEXP (temp2, 1),
517 TRAP_CODE (PATTERN (reallabelprev)));
518
519 if (new)
520 {
521 emit_insn_before (new, temp4);
522 delete_insn (reallabelprev);
523 delete_jump (insn);
524 changed = 1;
525 continue;
526 }
527 }
528 /* Detect a jump jumping to an unconditional trap. */
529 else if (HAVE_trap && this_is_onlyjump
530 && (temp = next_active_insn (JUMP_LABEL (insn)))
531 && GET_CODE (temp) == INSN
532 && GET_CODE (PATTERN (temp)) == TRAP_IF
533 && (this_is_any_uncondjump
534 || (this_is_any_condjump
535 && (temp2 = get_condition (insn, &temp4)))))
536 {
537 rtx tc = TRAP_CONDITION (PATTERN (temp));
538
539 if (tc == const_true_rtx
540 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
541 {
542 rtx new;
543 /* Replace an unconditional jump to a trap with a trap. */
544 if (this_is_any_uncondjump)
545 {
546 emit_barrier_after (emit_insn_before (gen_trap (), insn));
547 delete_jump (insn);
548 changed = 1;
549 continue;
550 }
551 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
552 XEXP (temp2, 1),
553 TRAP_CODE (PATTERN (temp)));
554 if (new)
555 {
556 emit_insn_before (new, temp4);
557 delete_jump (insn);
558 changed = 1;
559 continue;
560 }
561 }
562 /* If the trap condition and jump condition are mutually
563 exclusive, redirect the jump to the following insn. */
564 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
565 && this_is_any_condjump
566 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
567 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
568 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
569 && redirect_jump (insn, get_label_after (temp), 1))
570 {
571 changed = 1;
572 continue;
573 }
574 }
575 #endif
576 else
577 {
578 /* Now that the jump has been tensioned,
579 try cross jumping: check for identical code
580 before the jump and before its target label. */
581
582 /* First, cross jumping of conditional jumps: */
583
584 if (cross_jump && condjump_p (insn))
585 {
586 rtx newjpos, newlpos;
587 rtx x = prev_real_insn (JUMP_LABEL (insn));
588
589 /* A conditional jump may be crossjumped
590 only if the place it jumps to follows
591 an opposing jump that comes back here. */
592
593 if (x != 0 && ! jump_back_p (x, insn))
594 /* We have no opposing jump;
595 cannot cross jump this insn. */
596 x = 0;
597
598 newjpos = 0;
599 /* TARGET is nonzero if it is ok to cross jump
600 to code before TARGET. If so, see if matches. */
601 if (x != 0)
602 find_cross_jump (insn, x, 2,
603 &newjpos, &newlpos);
604
605 if (newjpos != 0)
606 {
607 do_cross_jump (insn, newjpos, newlpos);
608 /* Make the old conditional jump
609 into an unconditional one. */
610 PATTERN (insn) = gen_jump (JUMP_LABEL (insn));
611 INSN_CODE (insn) = -1;
612 emit_barrier_after (insn);
613 /* Add to jump_chain unless this is a new label
614 whose UID is too large. */
615 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
616 {
617 jump_chain[INSN_UID (insn)]
618 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
619 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
620 }
621 changed = 1;
622 next = insn;
623 }
624 }
625
626 /* Cross jumping of unconditional jumps:
627 a few differences. */
628
629 if (cross_jump && simplejump_p (insn))
630 {
631 rtx newjpos, newlpos;
632 rtx target;
633
634 newjpos = 0;
635
636 /* TARGET is nonzero if it is ok to cross jump
637 to code before TARGET. If so, see if matches. */
638 find_cross_jump (insn, JUMP_LABEL (insn), 1,
639 &newjpos, &newlpos);
640
641 /* If cannot cross jump to code before the label,
642 see if we can cross jump to another jump to
643 the same label. */
644 /* Try each other jump to this label. */
645 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
646 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
647 target != 0 && newjpos == 0;
648 target = jump_chain[INSN_UID (target)])
649 if (target != insn
650 && JUMP_LABEL (target) == JUMP_LABEL (insn)
651 /* Ignore TARGET if it's deleted. */
652 && ! INSN_DELETED_P (target))
653 find_cross_jump (insn, target, 2,
654 &newjpos, &newlpos);
655
656 if (newjpos != 0)
657 {
658 do_cross_jump (insn, newjpos, newlpos);
659 changed = 1;
660 next = insn;
661 }
662 }
663
664 /* This code was dead in the previous jump.c! */
665 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
666 {
667 /* Return insns all "jump to the same place"
668 so we can cross-jump between any two of them. */
669
670 rtx newjpos, newlpos, target;
671
672 newjpos = 0;
673
674 /* If cannot cross jump to code before the label,
675 see if we can cross jump to another jump to
676 the same label. */
677 /* Try each other jump to this label. */
678 for (target = jump_chain[0];
679 target != 0 && newjpos == 0;
680 target = jump_chain[INSN_UID (target)])
681 if (target != insn
682 && ! INSN_DELETED_P (target)
683 && GET_CODE (PATTERN (target)) == RETURN)
684 find_cross_jump (insn, target, 2,
685 &newjpos, &newlpos);
686
687 if (newjpos != 0)
688 {
689 do_cross_jump (insn, newjpos, newlpos);
690 changed = 1;
691 next = insn;
692 }
693 }
694 }
695 }
696
697 first = 0;
698 }
699
700 /* Delete extraneous line number notes.
701 Note that two consecutive notes for different lines are not really
702 extraneous. There should be some indication where that line belonged,
703 even if it became empty. */
704
705 {
706 rtx last_note = 0;
707
708 for (insn = f; insn; insn = NEXT_INSN (insn))
709 if (GET_CODE (insn) == NOTE)
710 {
711 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
712 /* Any previous line note was for the prologue; gdb wants a new
713 note after the prologue even if it is for the same line. */
714 last_note = NULL_RTX;
715 else if (NOTE_LINE_NUMBER (insn) >= 0)
716 {
717 /* Delete this note if it is identical to previous note. */
718 if (last_note
719 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
720 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
721 {
722 delete_insn (insn);
723 continue;
724 }
725
726 last_note = insn;
727 }
728 }
729 }
730
731 end:
732 /* Clean up. */
733 free (jump_chain);
734 jump_chain = 0;
735 }
736 \f
737 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
738 notes whose labels don't occur in the insn any more. Returns the
739 largest INSN_UID found. */
740 static int
741 init_label_info (f)
742 rtx f;
743 {
744 int largest_uid = 0;
745 rtx insn;
746
747 for (insn = f; insn; insn = NEXT_INSN (insn))
748 {
749 if (GET_CODE (insn) == CODE_LABEL)
750 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
751 else if (GET_CODE (insn) == JUMP_INSN)
752 JUMP_LABEL (insn) = 0;
753 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
754 {
755 rtx note, next;
756
757 for (note = REG_NOTES (insn); note; note = next)
758 {
759 next = XEXP (note, 1);
760 if (REG_NOTE_KIND (note) == REG_LABEL
761 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
762 remove_note (insn, note);
763 }
764 }
765 if (INSN_UID (insn) > largest_uid)
766 largest_uid = INSN_UID (insn);
767 }
768
769 return largest_uid;
770 }
771
772 /* Delete insns following barriers, up to next label.
773
774 Also delete no-op jumps created by gcse. */
775
776 static void
777 delete_barrier_successors (f)
778 rtx f;
779 {
780 rtx insn;
781 rtx set;
782
783 for (insn = f; insn;)
784 {
785 if (GET_CODE (insn) == BARRIER)
786 {
787 insn = NEXT_INSN (insn);
788
789 never_reached_warning (insn);
790
791 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
792 {
793 if (GET_CODE (insn) == JUMP_INSN)
794 {
795 /* Detect when we're deleting a tablejump; get rid of
796 the jump table as well. */
797 rtx next1 = next_nonnote_insn (insn);
798 rtx next2 = next1 ? next_nonnote_insn (next1) : 0;
799 if (next2 && GET_CODE (next1) == CODE_LABEL
800 && GET_CODE (next2) == JUMP_INSN
801 && (GET_CODE (PATTERN (next2)) == ADDR_VEC
802 || GET_CODE (PATTERN (next2)) == ADDR_DIFF_VEC))
803 {
804 delete_insn (insn);
805 insn = next2;
806 }
807 else
808 insn = delete_insn (insn);
809 }
810 else if (GET_CODE (insn) == NOTE
811 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
812 insn = NEXT_INSN (insn);
813 else
814 insn = delete_insn (insn);
815 }
816 /* INSN is now the code_label. */
817 }
818
819 /* Also remove (set (pc) (pc)) insns which can be created by
820 gcse. We eliminate such insns now to avoid having them
821 cause problems later. */
822 else if (GET_CODE (insn) == JUMP_INSN
823 && (set = pc_set (insn)) != NULL
824 && SET_SRC (set) == pc_rtx
825 && SET_DEST (set) == pc_rtx
826 && onlyjump_p (insn))
827 insn = delete_insn (insn);
828
829 else
830 insn = NEXT_INSN (insn);
831 }
832 }
833
834 /* Mark the label each jump jumps to.
835 Combine consecutive labels, and count uses of labels.
836
837 For each label, make a chain (using `jump_chain')
838 of all the *unconditional* jumps that jump to it;
839 also make a chain of all returns.
840
841 CROSS_JUMP indicates whether we are doing cross jumping
842 and if we are whether we will be paying attention to
843 death notes or not. */
844
845 static void
846 mark_all_labels (f, cross_jump)
847 rtx f;
848 int cross_jump;
849 {
850 rtx insn;
851
852 for (insn = f; insn; insn = NEXT_INSN (insn))
853 if (INSN_P (insn))
854 {
855 if (GET_CODE (insn) == CALL_INSN
856 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
857 {
858 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
859 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
860 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
861
862 /* Canonicalize the tail recursion label attached to the
863 CALL_PLACEHOLDER insn. */
864 if (XEXP (PATTERN (insn), 3))
865 {
866 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
867 XEXP (PATTERN (insn), 3));
868 mark_jump_label (label_ref, insn, cross_jump, 0);
869 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
870 }
871
872 continue;
873 }
874
875 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
876 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
877 {
878 /* When we know the LABEL_REF contained in a REG used in
879 an indirect jump, we'll have a REG_LABEL note so that
880 flow can tell where it's going. */
881 if (JUMP_LABEL (insn) == 0)
882 {
883 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
884 if (label_note)
885 {
886 /* But a LABEL_REF around the REG_LABEL note, so
887 that we can canonicalize it. */
888 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
889 XEXP (label_note, 0));
890
891 mark_jump_label (label_ref, insn, cross_jump, 0);
892 XEXP (label_note, 0) = XEXP (label_ref, 0);
893 JUMP_LABEL (insn) = XEXP (label_note, 0);
894 }
895 }
896 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
897 {
898 jump_chain[INSN_UID (insn)]
899 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
900 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
901 }
902 if (GET_CODE (PATTERN (insn)) == RETURN)
903 {
904 jump_chain[INSN_UID (insn)] = jump_chain[0];
905 jump_chain[0] = insn;
906 }
907 }
908 }
909 }
910
911 /* Delete all labels already not referenced.
912 Also find and return the last insn. */
913
914 static rtx
915 delete_unreferenced_labels (f)
916 rtx f;
917 {
918 rtx final = NULL_RTX;
919 rtx insn;
920
921 for (insn = f; insn;)
922 {
923 if (GET_CODE (insn) == CODE_LABEL
924 && LABEL_NUSES (insn) == 0
925 && LABEL_ALTERNATE_NAME (insn) == NULL)
926 insn = delete_insn (insn);
927 else
928 {
929 final = insn;
930 insn = NEXT_INSN (insn);
931 }
932 }
933
934 return final;
935 }
936
937 /* Delete various simple forms of moves which have no necessary
938 side effect. */
939
940 static void
941 delete_noop_moves (f)
942 rtx f;
943 {
944 rtx insn, next;
945
946 for (insn = f; insn;)
947 {
948 next = NEXT_INSN (insn);
949
950 if (GET_CODE (insn) == INSN)
951 {
952 register rtx body = PATTERN (insn);
953
954 /* Detect and delete no-op move instructions
955 resulting from not allocating a parameter in a register. */
956
957 if (GET_CODE (body) == SET && set_noop_p (body))
958 delete_computation (insn);
959
960 /* Detect and ignore no-op move instructions
961 resulting from smart or fortuitous register allocation. */
962
963 else if (GET_CODE (body) == SET)
964 {
965 int sreg = true_regnum (SET_SRC (body));
966 int dreg = true_regnum (SET_DEST (body));
967
968 if (sreg == dreg && sreg >= 0)
969 delete_insn (insn);
970 else if (sreg >= 0 && dreg >= 0)
971 {
972 rtx trial;
973 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
974 sreg, NULL, dreg,
975 GET_MODE (SET_SRC (body)));
976
977 if (tem != 0
978 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
979 {
980 /* DREG may have been the target of a REG_DEAD note in
981 the insn which makes INSN redundant. If so, reorg
982 would still think it is dead. So search for such a
983 note and delete it if we find it. */
984 if (! find_regno_note (insn, REG_UNUSED, dreg))
985 for (trial = prev_nonnote_insn (insn);
986 trial && GET_CODE (trial) != CODE_LABEL;
987 trial = prev_nonnote_insn (trial))
988 if (find_regno_note (trial, REG_DEAD, dreg))
989 {
990 remove_death (dreg, trial);
991 break;
992 }
993
994 /* Deleting insn could lose a death-note for SREG. */
995 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
996 {
997 /* Change this into a USE so that we won't emit
998 code for it, but still can keep the note. */
999 PATTERN (insn)
1000 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
1001 INSN_CODE (insn) = -1;
1002 /* Remove all reg notes but the REG_DEAD one. */
1003 REG_NOTES (insn) = trial;
1004 XEXP (trial, 1) = NULL_RTX;
1005 }
1006 else
1007 delete_insn (insn);
1008 }
1009 }
1010 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
1011 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
1012 NULL, 0, GET_MODE (SET_DEST (body))))
1013 {
1014 /* This handles the case where we have two consecutive
1015 assignments of the same constant to pseudos that didn't
1016 get a hard reg. Each SET from the constant will be
1017 converted into a SET of the spill register and an
1018 output reload will be made following it. This produces
1019 two loads of the same constant into the same spill
1020 register. */
1021
1022 rtx in_insn = insn;
1023
1024 /* Look back for a death note for the first reg.
1025 If there is one, it is no longer accurate. */
1026 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1027 {
1028 if ((GET_CODE (in_insn) == INSN
1029 || GET_CODE (in_insn) == JUMP_INSN)
1030 && find_regno_note (in_insn, REG_DEAD, dreg))
1031 {
1032 remove_death (dreg, in_insn);
1033 break;
1034 }
1035 in_insn = PREV_INSN (in_insn);
1036 }
1037
1038 /* Delete the second load of the value. */
1039 delete_insn (insn);
1040 }
1041 }
1042 else if (GET_CODE (body) == PARALLEL)
1043 {
1044 /* If each part is a set between two identical registers or
1045 a USE or CLOBBER, delete the insn. */
1046 int i, sreg, dreg;
1047 rtx tem;
1048
1049 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1050 {
1051 tem = XVECEXP (body, 0, i);
1052 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1053 continue;
1054
1055 if (GET_CODE (tem) != SET
1056 || (sreg = true_regnum (SET_SRC (tem))) < 0
1057 || (dreg = true_regnum (SET_DEST (tem))) < 0
1058 || dreg != sreg)
1059 break;
1060 }
1061
1062 if (i < 0)
1063 delete_insn (insn);
1064 }
1065 }
1066 insn = next;
1067 }
1068 }
1069
1070 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1071 jump. Assume that this unconditional jump is to the exit test code. If
1072 the code is sufficiently simple, make a copy of it before INSN,
1073 followed by a jump to the exit of the loop. Then delete the unconditional
1074 jump after INSN.
1075
1076 Return 1 if we made the change, else 0.
1077
1078 This is only safe immediately after a regscan pass because it uses the
1079 values of regno_first_uid and regno_last_uid. */
1080
1081 static int
1082 duplicate_loop_exit_test (loop_start)
1083 rtx loop_start;
1084 {
1085 rtx insn, set, reg, p, link;
1086 rtx copy = 0, first_copy = 0;
1087 int num_insns = 0;
1088 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1089 rtx lastexit;
1090 int max_reg = max_reg_num ();
1091 rtx *reg_map = 0;
1092
1093 /* Scan the exit code. We do not perform this optimization if any insn:
1094
1095 is a CALL_INSN
1096 is a CODE_LABEL
1097 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1098 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1099 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1100 is not valid.
1101
1102 We also do not do this if we find an insn with ASM_OPERANDS. While
1103 this restriction should not be necessary, copying an insn with
1104 ASM_OPERANDS can confuse asm_noperands in some cases.
1105
1106 Also, don't do this if the exit code is more than 20 insns. */
1107
1108 for (insn = exitcode;
1109 insn
1110 && ! (GET_CODE (insn) == NOTE
1111 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1112 insn = NEXT_INSN (insn))
1113 {
1114 switch (GET_CODE (insn))
1115 {
1116 case CODE_LABEL:
1117 case CALL_INSN:
1118 return 0;
1119 case NOTE:
1120 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1121 a jump immediately after the loop start that branches outside
1122 the loop but within an outer loop, near the exit test.
1123 If we copied this exit test and created a phony
1124 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1125 before the exit test look like these could be safely moved
1126 out of the loop even if they actually may be never executed.
1127 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1128
1129 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1130 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1131 return 0;
1132
1133 if (optimize < 2
1134 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1135 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1136 /* If we were to duplicate this code, we would not move
1137 the BLOCK notes, and so debugging the moved code would
1138 be difficult. Thus, we only move the code with -O2 or
1139 higher. */
1140 return 0;
1141
1142 break;
1143 case JUMP_INSN:
1144 case INSN:
1145 /* The code below would grossly mishandle REG_WAS_0 notes,
1146 so get rid of them here. */
1147 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1148 remove_note (insn, p);
1149 if (++num_insns > 20
1150 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1151 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1152 return 0;
1153 break;
1154 default:
1155 break;
1156 }
1157 }
1158
1159 /* Unless INSN is zero, we can do the optimization. */
1160 if (insn == 0)
1161 return 0;
1162
1163 lastexit = insn;
1164
1165 /* See if any insn sets a register only used in the loop exit code and
1166 not a user variable. If so, replace it with a new register. */
1167 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1168 if (GET_CODE (insn) == INSN
1169 && (set = single_set (insn)) != 0
1170 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1171 || (GET_CODE (reg) == SUBREG
1172 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1173 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1174 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1175 {
1176 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1177 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1178 break;
1179
1180 if (p != lastexit)
1181 {
1182 /* We can do the replacement. Allocate reg_map if this is the
1183 first replacement we found. */
1184 if (reg_map == 0)
1185 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1186
1187 REG_LOOP_TEST_P (reg) = 1;
1188
1189 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1190 }
1191 }
1192
1193 /* Now copy each insn. */
1194 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1195 {
1196 switch (GET_CODE (insn))
1197 {
1198 case BARRIER:
1199 copy = emit_barrier_before (loop_start);
1200 break;
1201 case NOTE:
1202 /* Only copy line-number notes. */
1203 if (NOTE_LINE_NUMBER (insn) >= 0)
1204 {
1205 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1206 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1207 }
1208 break;
1209
1210 case INSN:
1211 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1212 if (reg_map)
1213 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1214
1215 mark_jump_label (PATTERN (copy), copy, 0, 0);
1216
1217 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1218 make them. */
1219 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1220 if (REG_NOTE_KIND (link) != REG_LABEL)
1221 {
1222 if (GET_CODE (link) == EXPR_LIST)
1223 REG_NOTES (copy)
1224 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1225 XEXP (link, 0),
1226 REG_NOTES (copy)));
1227 else
1228 REG_NOTES (copy)
1229 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1230 XEXP (link, 0),
1231 REG_NOTES (copy)));
1232 }
1233
1234 if (reg_map && REG_NOTES (copy))
1235 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1236 break;
1237
1238 case JUMP_INSN:
1239 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1240 loop_start);
1241 if (reg_map)
1242 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1243 mark_jump_label (PATTERN (copy), copy, 0, 0);
1244 if (REG_NOTES (insn))
1245 {
1246 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1247 if (reg_map)
1248 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1249 }
1250
1251 /* If this is a simple jump, add it to the jump chain. */
1252
1253 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1254 && simplejump_p (copy))
1255 {
1256 jump_chain[INSN_UID (copy)]
1257 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1258 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1259 }
1260 break;
1261
1262 default:
1263 abort ();
1264 }
1265
1266 /* Record the first insn we copied. We need it so that we can
1267 scan the copied insns for new pseudo registers. */
1268 if (! first_copy)
1269 first_copy = copy;
1270 }
1271
1272 /* Now clean up by emitting a jump to the end label and deleting the jump
1273 at the start of the loop. */
1274 if (! copy || GET_CODE (copy) != BARRIER)
1275 {
1276 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1277 loop_start);
1278
1279 /* Record the first insn we copied. We need it so that we can
1280 scan the copied insns for new pseudo registers. This may not
1281 be strictly necessary since we should have copied at least one
1282 insn above. But I am going to be safe. */
1283 if (! first_copy)
1284 first_copy = copy;
1285
1286 mark_jump_label (PATTERN (copy), copy, 0, 0);
1287 if (INSN_UID (copy) < max_jump_chain
1288 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1289 {
1290 jump_chain[INSN_UID (copy)]
1291 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1292 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1293 }
1294 emit_barrier_before (loop_start);
1295 }
1296
1297 /* Now scan from the first insn we copied to the last insn we copied
1298 (copy) for new pseudo registers. Do this after the code to jump to
1299 the end label since that might create a new pseudo too. */
1300 reg_scan_update (first_copy, copy, max_reg);
1301
1302 /* Mark the exit code as the virtual top of the converted loop. */
1303 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1304
1305 delete_insn (next_nonnote_insn (loop_start));
1306
1307 /* Clean up. */
1308 if (reg_map)
1309 free (reg_map);
1310
1311 return 1;
1312 }
1313 \f
1314 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1315 notes between START and END out before START. Assume that END is not
1316 such a note. START may be such a note. Returns the value of the new
1317 starting insn, which may be different if the original start was such a
1318 note. */
1319
1320 rtx
1321 squeeze_notes (start, end)
1322 rtx start, end;
1323 {
1324 rtx insn;
1325 rtx next;
1326
1327 for (insn = start; insn != end; insn = next)
1328 {
1329 next = NEXT_INSN (insn);
1330 if (GET_CODE (insn) == NOTE
1331 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1332 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1333 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1334 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1335 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1336 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
1337 {
1338 if (insn == start)
1339 start = next;
1340 else
1341 {
1342 rtx prev = PREV_INSN (insn);
1343 PREV_INSN (insn) = PREV_INSN (start);
1344 NEXT_INSN (insn) = start;
1345 NEXT_INSN (PREV_INSN (insn)) = insn;
1346 PREV_INSN (NEXT_INSN (insn)) = insn;
1347 NEXT_INSN (prev) = next;
1348 PREV_INSN (next) = prev;
1349 }
1350 }
1351 }
1352
1353 return start;
1354 }
1355 \f
1356 /* Compare the instructions before insn E1 with those before E2
1357 to find an opportunity for cross jumping.
1358 (This means detecting identical sequences of insns followed by
1359 jumps to the same place, or followed by a label and a jump
1360 to that label, and replacing one with a jump to the other.)
1361
1362 Assume E1 is a jump that jumps to label E2
1363 (that is not always true but it might as well be).
1364 Find the longest possible equivalent sequences
1365 and store the first insns of those sequences into *F1 and *F2.
1366 Store zero there if no equivalent preceding instructions are found.
1367
1368 We give up if we find a label in stream 1.
1369 Actually we could transfer that label into stream 2. */
1370
1371 static void
1372 find_cross_jump (e1, e2, minimum, f1, f2)
1373 rtx e1, e2;
1374 int minimum;
1375 rtx *f1, *f2;
1376 {
1377 register rtx i1 = e1, i2 = e2;
1378 register rtx p1, p2;
1379 int lose = 0;
1380
1381 rtx last1 = 0, last2 = 0;
1382 rtx afterlast1 = 0, afterlast2 = 0;
1383
1384 *f1 = 0;
1385 *f2 = 0;
1386
1387 while (1)
1388 {
1389 i1 = prev_nonnote_insn (i1);
1390
1391 i2 = PREV_INSN (i2);
1392 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1393 i2 = PREV_INSN (i2);
1394
1395 if (i1 == 0)
1396 break;
1397
1398 /* Don't allow the range of insns preceding E1 or E2
1399 to include the other (E2 or E1). */
1400 if (i2 == e1 || i1 == e2)
1401 break;
1402
1403 /* If we will get to this code by jumping, those jumps will be
1404 tensioned to go directly to the new label (before I2),
1405 so this cross-jumping won't cost extra. So reduce the minimum. */
1406 if (GET_CODE (i1) == CODE_LABEL)
1407 {
1408 --minimum;
1409 break;
1410 }
1411
1412 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1413 break;
1414
1415 p1 = PATTERN (i1);
1416 p2 = PATTERN (i2);
1417
1418 /* If this is a CALL_INSN, compare register usage information.
1419 If we don't check this on stack register machines, the two
1420 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1421 numbers of stack registers in the same basic block.
1422 If we don't check this on machines with delay slots, a delay slot may
1423 be filled that clobbers a parameter expected by the subroutine.
1424
1425 ??? We take the simple route for now and assume that if they're
1426 equal, they were constructed identically. */
1427
1428 if (GET_CODE (i1) == CALL_INSN
1429 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1430 CALL_INSN_FUNCTION_USAGE (i2)))
1431 lose = 1;
1432
1433 #ifdef STACK_REGS
1434 /* If cross_jump_death_matters is not 0, the insn's mode
1435 indicates whether or not the insn contains any stack-like
1436 regs. */
1437
1438 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1439 {
1440 /* If register stack conversion has already been done, then
1441 death notes must also be compared before it is certain that
1442 the two instruction streams match. */
1443
1444 rtx note;
1445 HARD_REG_SET i1_regset, i2_regset;
1446
1447 CLEAR_HARD_REG_SET (i1_regset);
1448 CLEAR_HARD_REG_SET (i2_regset);
1449
1450 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1451 if (REG_NOTE_KIND (note) == REG_DEAD
1452 && STACK_REG_P (XEXP (note, 0)))
1453 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1454
1455 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1456 if (REG_NOTE_KIND (note) == REG_DEAD
1457 && STACK_REG_P (XEXP (note, 0)))
1458 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1459
1460 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1461
1462 lose = 1;
1463
1464 done:
1465 ;
1466 }
1467 #endif
1468
1469 /* Don't allow old-style asm or volatile extended asms to be accepted
1470 for cross jumping purposes. It is conceptually correct to allow
1471 them, since cross-jumping preserves the dynamic instruction order
1472 even though it is changing the static instruction order. However,
1473 if an asm is being used to emit an assembler pseudo-op, such as
1474 the MIPS `.set reorder' pseudo-op, then the static instruction order
1475 matters and it must be preserved. */
1476 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1477 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1478 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1479 lose = 1;
1480
1481 if (lose || GET_CODE (p1) != GET_CODE (p2)
1482 || ! rtx_renumbered_equal_p (p1, p2))
1483 {
1484 /* The following code helps take care of G++ cleanups. */
1485 rtx equiv1;
1486 rtx equiv2;
1487
1488 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1489 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1490 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1491 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1492 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1493 /* If the equivalences are not to a constant, they may
1494 reference pseudos that no longer exist, so we can't
1495 use them. */
1496 && CONSTANT_P (XEXP (equiv1, 0))
1497 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1498 {
1499 rtx s1 = single_set (i1);
1500 rtx s2 = single_set (i2);
1501 if (s1 != 0 && s2 != 0
1502 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1503 {
1504 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1505 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1506 if (! rtx_renumbered_equal_p (p1, p2))
1507 cancel_changes (0);
1508 else if (apply_change_group ())
1509 goto win;
1510 }
1511 }
1512
1513 /* Insns fail to match; cross jumping is limited to the following
1514 insns. */
1515
1516 #ifdef HAVE_cc0
1517 /* Don't allow the insn after a compare to be shared by
1518 cross-jumping unless the compare is also shared.
1519 Here, if either of these non-matching insns is a compare,
1520 exclude the following insn from possible cross-jumping. */
1521 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1522 last1 = afterlast1, last2 = afterlast2, ++minimum;
1523 #endif
1524
1525 /* If cross-jumping here will feed a jump-around-jump
1526 optimization, this jump won't cost extra, so reduce
1527 the minimum. */
1528 if (GET_CODE (i1) == JUMP_INSN
1529 && JUMP_LABEL (i1)
1530 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1531 --minimum;
1532 break;
1533 }
1534
1535 win:
1536 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1537 {
1538 /* Ok, this insn is potentially includable in a cross-jump here. */
1539 afterlast1 = last1, afterlast2 = last2;
1540 last1 = i1, last2 = i2, --minimum;
1541 }
1542 }
1543
1544 if (minimum <= 0 && last1 != 0 && last1 != e1)
1545 *f1 = last1, *f2 = last2;
1546 }
1547
1548 static void
1549 do_cross_jump (insn, newjpos, newlpos)
1550 rtx insn, newjpos, newlpos;
1551 {
1552 /* Find an existing label at this point
1553 or make a new one if there is none. */
1554 register rtx label = get_label_before (newlpos);
1555
1556 /* Make the same jump insn jump to the new point. */
1557 if (GET_CODE (PATTERN (insn)) == RETURN)
1558 {
1559 /* Remove from jump chain of returns. */
1560 delete_from_jump_chain (insn);
1561 /* Change the insn. */
1562 PATTERN (insn) = gen_jump (label);
1563 INSN_CODE (insn) = -1;
1564 JUMP_LABEL (insn) = label;
1565 LABEL_NUSES (label)++;
1566 /* Add to new the jump chain. */
1567 if (INSN_UID (label) < max_jump_chain
1568 && INSN_UID (insn) < max_jump_chain)
1569 {
1570 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1571 jump_chain[INSN_UID (label)] = insn;
1572 }
1573 }
1574 else
1575 redirect_jump (insn, label, 1);
1576
1577 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1578 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1579 the NEWJPOS stream. */
1580
1581 while (newjpos != insn)
1582 {
1583 rtx lnote;
1584
1585 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1586 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1587 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1588 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1589 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1590 remove_note (newlpos, lnote);
1591
1592 delete_insn (newjpos);
1593 newjpos = next_real_insn (newjpos);
1594 newlpos = next_real_insn (newlpos);
1595 }
1596 }
1597 \f
1598 /* Return the label before INSN, or put a new label there. */
1599
1600 rtx
1601 get_label_before (insn)
1602 rtx insn;
1603 {
1604 rtx label;
1605
1606 /* Find an existing label at this point
1607 or make a new one if there is none. */
1608 label = prev_nonnote_insn (insn);
1609
1610 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1611 {
1612 rtx prev = PREV_INSN (insn);
1613
1614 label = gen_label_rtx ();
1615 emit_label_after (label, prev);
1616 LABEL_NUSES (label) = 0;
1617 }
1618 return label;
1619 }
1620
1621 /* Return the label after INSN, or put a new label there. */
1622
1623 rtx
1624 get_label_after (insn)
1625 rtx insn;
1626 {
1627 rtx label;
1628
1629 /* Find an existing label at this point
1630 or make a new one if there is none. */
1631 label = next_nonnote_insn (insn);
1632
1633 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1634 {
1635 label = gen_label_rtx ();
1636 emit_label_after (label, insn);
1637 LABEL_NUSES (label) = 0;
1638 }
1639 return label;
1640 }
1641 \f
1642 /* Return 1 if INSN is a jump that jumps to right after TARGET
1643 only on the condition that TARGET itself would drop through.
1644 Assumes that TARGET is a conditional jump. */
1645
1646 static int
1647 jump_back_p (insn, target)
1648 rtx insn, target;
1649 {
1650 rtx cinsn, ctarget;
1651 enum rtx_code codei, codet;
1652 rtx set, tset;
1653
1654 if (! any_condjump_p (insn)
1655 || any_uncondjump_p (target)
1656 || target != prev_real_insn (JUMP_LABEL (insn)))
1657 return 0;
1658 set = pc_set (insn);
1659 tset = pc_set (target);
1660
1661 cinsn = XEXP (SET_SRC (set), 0);
1662 ctarget = XEXP (SET_SRC (tset), 0);
1663
1664 codei = GET_CODE (cinsn);
1665 codet = GET_CODE (ctarget);
1666
1667 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1668 {
1669 codei = reversed_comparison_code (cinsn, insn);
1670 if (codei == UNKNOWN)
1671 return 0;
1672 }
1673
1674 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1675 {
1676 codet = reversed_comparison_code (ctarget, target);
1677 if (codei == UNKNOWN)
1678 return 0;
1679 }
1680
1681 return (codei == codet
1682 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1683 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1684 }
1685 \f
1686 /* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
1687 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
1688 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
1689 know whether it's source is floating point or integer comparison. Machine
1690 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
1691 to help this function avoid overhead in these cases. */
1692 enum rtx_code
1693 reversed_comparison_code_parts (code, arg0, arg1, insn)
1694 rtx insn, arg0, arg1;
1695 enum rtx_code code;
1696 {
1697 enum machine_mode mode;
1698
1699 /* If this is not actually a comparison, we can't reverse it. */
1700 if (GET_RTX_CLASS (code) != '<')
1701 return UNKNOWN;
1702
1703 mode = GET_MODE (arg0);
1704 if (mode == VOIDmode)
1705 mode = GET_MODE (arg1);
1706
1707 /* First see if machine description supply us way to reverse the comparison.
1708 Give it priority over everything else to allow machine description to do
1709 tricks. */
1710 #ifdef REVERSIBLE_CC_MODE
1711 if (GET_MODE_CLASS (mode) == MODE_CC
1712 && REVERSIBLE_CC_MODE (mode))
1713 {
1714 #ifdef REVERSE_CONDITION
1715 return REVERSE_CONDITION (code, mode);
1716 #endif
1717 return reverse_condition (code);
1718 }
1719 #endif
1720
1721 /* Try few special cases based on the comparison code. */
1722 switch (code)
1723 {
1724 case GEU:
1725 case GTU:
1726 case LEU:
1727 case LTU:
1728 case NE:
1729 case EQ:
1730 /* It is always safe to reverse EQ and NE, even for the floating
1731 point. Similary the unsigned comparisons are never used for
1732 floating point so we can reverse them in the default way. */
1733 return reverse_condition (code);
1734 case ORDERED:
1735 case UNORDERED:
1736 case LTGT:
1737 case UNEQ:
1738 /* In case we already see unordered comparison, we can be sure to
1739 be dealing with floating point so we don't need any more tests. */
1740 return reverse_condition_maybe_unordered (code);
1741 case UNLT:
1742 case UNLE:
1743 case UNGT:
1744 case UNGE:
1745 /* We don't have safe way to reverse these yet. */
1746 return UNKNOWN;
1747 default:
1748 break;
1749 }
1750
1751 /* In case we give up IEEE compatibility, all comparisons are reversible. */
1752 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1753 || flag_unsafe_math_optimizations)
1754 return reverse_condition (code);
1755
1756 if (GET_MODE_CLASS (mode) == MODE_CC
1757 #ifdef HAVE_cc0
1758 || arg0 == cc0_rtx
1759 #endif
1760 )
1761 {
1762 rtx prev;
1763 /* Try to search for the comparison to determine the real mode.
1764 This code is expensive, but with sane machine description it
1765 will be never used, since REVERSIBLE_CC_MODE will return true
1766 in all cases. */
1767 if (! insn)
1768 return UNKNOWN;
1769
1770 for (prev = prev_nonnote_insn (insn);
1771 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1772 prev = prev_nonnote_insn (prev))
1773 {
1774 rtx set = set_of (arg0, prev);
1775 if (set && GET_CODE (set) == SET
1776 && rtx_equal_p (SET_DEST (set), arg0))
1777 {
1778 rtx src = SET_SRC (set);
1779
1780 if (GET_CODE (src) == COMPARE)
1781 {
1782 rtx comparison = src;
1783 arg0 = XEXP (src, 0);
1784 mode = GET_MODE (arg0);
1785 if (mode == VOIDmode)
1786 mode = GET_MODE (XEXP (comparison, 1));
1787 break;
1788 }
1789 /* We can get past reg-reg moves. This may be usefull for model
1790 of i387 comparisons that first move flag registers around. */
1791 if (REG_P (src))
1792 {
1793 arg0 = src;
1794 continue;
1795 }
1796 }
1797 /* If register is clobbered in some ununderstandable way,
1798 give up. */
1799 if (set)
1800 return UNKNOWN;
1801 }
1802 }
1803
1804 /* An integer condition. */
1805 if (GET_CODE (arg0) == CONST_INT
1806 || (GET_MODE (arg0) != VOIDmode
1807 && GET_MODE_CLASS (mode) != MODE_CC
1808 && ! FLOAT_MODE_P (mode)))
1809 return reverse_condition (code);
1810
1811 return UNKNOWN;
1812 }
1813
1814 /* An wrapper around the previous function to take COMPARISON as rtx
1815 expression. This simplifies many callers. */
1816 enum rtx_code
1817 reversed_comparison_code (comparison, insn)
1818 rtx comparison, insn;
1819 {
1820 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1821 return UNKNOWN;
1822 return reversed_comparison_code_parts (GET_CODE (comparison),
1823 XEXP (comparison, 0),
1824 XEXP (comparison, 1), insn);
1825 }
1826 \f
1827 /* Given an rtx-code for a comparison, return the code for the negated
1828 comparison. If no such code exists, return UNKNOWN.
1829
1830 WATCH OUT! reverse_condition is not safe to use on a jump that might
1831 be acting on the results of an IEEE floating point comparison, because
1832 of the special treatment of non-signaling nans in comparisons.
1833 Use reversed_comparison_code instead. */
1834
1835 enum rtx_code
1836 reverse_condition (code)
1837 enum rtx_code code;
1838 {
1839 switch (code)
1840 {
1841 case EQ:
1842 return NE;
1843 case NE:
1844 return EQ;
1845 case GT:
1846 return LE;
1847 case GE:
1848 return LT;
1849 case LT:
1850 return GE;
1851 case LE:
1852 return GT;
1853 case GTU:
1854 return LEU;
1855 case GEU:
1856 return LTU;
1857 case LTU:
1858 return GEU;
1859 case LEU:
1860 return GTU;
1861 case UNORDERED:
1862 return ORDERED;
1863 case ORDERED:
1864 return UNORDERED;
1865
1866 case UNLT:
1867 case UNLE:
1868 case UNGT:
1869 case UNGE:
1870 case UNEQ:
1871 case LTGT:
1872 return UNKNOWN;
1873
1874 default:
1875 abort ();
1876 }
1877 }
1878
1879 /* Similar, but we're allowed to generate unordered comparisons, which
1880 makes it safe for IEEE floating-point. Of course, we have to recognize
1881 that the target will support them too... */
1882
1883 enum rtx_code
1884 reverse_condition_maybe_unordered (code)
1885 enum rtx_code code;
1886 {
1887 /* Non-IEEE formats don't have unordered conditions. */
1888 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1889 return reverse_condition (code);
1890
1891 switch (code)
1892 {
1893 case EQ:
1894 return NE;
1895 case NE:
1896 return EQ;
1897 case GT:
1898 return UNLE;
1899 case GE:
1900 return UNLT;
1901 case LT:
1902 return UNGE;
1903 case LE:
1904 return UNGT;
1905 case LTGT:
1906 return UNEQ;
1907 case UNORDERED:
1908 return ORDERED;
1909 case ORDERED:
1910 return UNORDERED;
1911 case UNLT:
1912 return GE;
1913 case UNLE:
1914 return GT;
1915 case UNGT:
1916 return LE;
1917 case UNGE:
1918 return LT;
1919 case UNEQ:
1920 return LTGT;
1921
1922 default:
1923 abort ();
1924 }
1925 }
1926
1927 /* Similar, but return the code when two operands of a comparison are swapped.
1928 This IS safe for IEEE floating-point. */
1929
1930 enum rtx_code
1931 swap_condition (code)
1932 enum rtx_code code;
1933 {
1934 switch (code)
1935 {
1936 case EQ:
1937 case NE:
1938 case UNORDERED:
1939 case ORDERED:
1940 case UNEQ:
1941 case LTGT:
1942 return code;
1943
1944 case GT:
1945 return LT;
1946 case GE:
1947 return LE;
1948 case LT:
1949 return GT;
1950 case LE:
1951 return GE;
1952 case GTU:
1953 return LTU;
1954 case GEU:
1955 return LEU;
1956 case LTU:
1957 return GTU;
1958 case LEU:
1959 return GEU;
1960 case UNLT:
1961 return UNGT;
1962 case UNLE:
1963 return UNGE;
1964 case UNGT:
1965 return UNLT;
1966 case UNGE:
1967 return UNLE;
1968
1969 default:
1970 abort ();
1971 }
1972 }
1973
1974 /* Given a comparison CODE, return the corresponding unsigned comparison.
1975 If CODE is an equality comparison or already an unsigned comparison,
1976 CODE is returned. */
1977
1978 enum rtx_code
1979 unsigned_condition (code)
1980 enum rtx_code code;
1981 {
1982 switch (code)
1983 {
1984 case EQ:
1985 case NE:
1986 case GTU:
1987 case GEU:
1988 case LTU:
1989 case LEU:
1990 return code;
1991
1992 case GT:
1993 return GTU;
1994 case GE:
1995 return GEU;
1996 case LT:
1997 return LTU;
1998 case LE:
1999 return LEU;
2000
2001 default:
2002 abort ();
2003 }
2004 }
2005
2006 /* Similarly, return the signed version of a comparison. */
2007
2008 enum rtx_code
2009 signed_condition (code)
2010 enum rtx_code code;
2011 {
2012 switch (code)
2013 {
2014 case EQ:
2015 case NE:
2016 case GT:
2017 case GE:
2018 case LT:
2019 case LE:
2020 return code;
2021
2022 case GTU:
2023 return GT;
2024 case GEU:
2025 return GE;
2026 case LTU:
2027 return LT;
2028 case LEU:
2029 return LE;
2030
2031 default:
2032 abort ();
2033 }
2034 }
2035 \f
2036 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2037 truth of CODE1 implies the truth of CODE2. */
2038
2039 int
2040 comparison_dominates_p (code1, code2)
2041 enum rtx_code code1, code2;
2042 {
2043 /* UNKNOWN comparison codes can happen as a result of trying to revert
2044 comparison codes.
2045 They can't match anything, so we have to reject them here. */
2046 if (code1 == UNKNOWN || code2 == UNKNOWN)
2047 return 0;
2048
2049 if (code1 == code2)
2050 return 1;
2051
2052 switch (code1)
2053 {
2054 case UNEQ:
2055 if (code2 == UNLE || code2 == UNGE)
2056 return 1;
2057 break;
2058
2059 case EQ:
2060 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
2061 || code2 == ORDERED)
2062 return 1;
2063 break;
2064
2065 case UNLT:
2066 if (code2 == UNLE || code2 == NE)
2067 return 1;
2068 break;
2069
2070 case LT:
2071 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2072 return 1;
2073 break;
2074
2075 case UNGT:
2076 if (code2 == UNGE || code2 == NE)
2077 return 1;
2078 break;
2079
2080 case GT:
2081 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2082 return 1;
2083 break;
2084
2085 case GE:
2086 case LE:
2087 if (code2 == ORDERED)
2088 return 1;
2089 break;
2090
2091 case LTGT:
2092 if (code2 == NE || code2 == ORDERED)
2093 return 1;
2094 break;
2095
2096 case LTU:
2097 if (code2 == LEU || code2 == NE)
2098 return 1;
2099 break;
2100
2101 case GTU:
2102 if (code2 == GEU || code2 == NE)
2103 return 1;
2104 break;
2105
2106 case UNORDERED:
2107 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
2108 || code2 == UNGE || code2 == UNGT)
2109 return 1;
2110 break;
2111
2112 default:
2113 break;
2114 }
2115
2116 return 0;
2117 }
2118 \f
2119 /* Return 1 if INSN is an unconditional jump and nothing else. */
2120
2121 int
2122 simplejump_p (insn)
2123 rtx insn;
2124 {
2125 return (GET_CODE (insn) == JUMP_INSN
2126 && GET_CODE (PATTERN (insn)) == SET
2127 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2128 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2129 }
2130
2131 /* Return nonzero if INSN is a (possibly) conditional jump
2132 and nothing more.
2133
2134 Use this function is deprecated, since we need to support combined
2135 branch and compare insns. Use any_condjump_p instead whenever possible. */
2136
2137 int
2138 condjump_p (insn)
2139 rtx insn;
2140 {
2141 register rtx x = PATTERN (insn);
2142
2143 if (GET_CODE (x) != SET
2144 || GET_CODE (SET_DEST (x)) != PC)
2145 return 0;
2146
2147 x = SET_SRC (x);
2148 if (GET_CODE (x) == LABEL_REF)
2149 return 1;
2150 else
2151 return (GET_CODE (x) == IF_THEN_ELSE
2152 && ((GET_CODE (XEXP (x, 2)) == PC
2153 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2154 || GET_CODE (XEXP (x, 1)) == RETURN))
2155 || (GET_CODE (XEXP (x, 1)) == PC
2156 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2157 || GET_CODE (XEXP (x, 2)) == RETURN))));
2158
2159 return 0;
2160 }
2161
2162 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2163 PARALLEL.
2164
2165 Use this function is deprecated, since we need to support combined
2166 branch and compare insns. Use any_condjump_p instead whenever possible. */
2167
2168 int
2169 condjump_in_parallel_p (insn)
2170 rtx insn;
2171 {
2172 register rtx x = PATTERN (insn);
2173
2174 if (GET_CODE (x) != PARALLEL)
2175 return 0;
2176 else
2177 x = XVECEXP (x, 0, 0);
2178
2179 if (GET_CODE (x) != SET)
2180 return 0;
2181 if (GET_CODE (SET_DEST (x)) != PC)
2182 return 0;
2183 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2184 return 1;
2185 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2186 return 0;
2187 if (XEXP (SET_SRC (x), 2) == pc_rtx
2188 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2189 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2190 return 1;
2191 if (XEXP (SET_SRC (x), 1) == pc_rtx
2192 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2193 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2194 return 1;
2195 return 0;
2196 }
2197
2198 /* Return set of PC, otherwise NULL. */
2199
2200 rtx
2201 pc_set (insn)
2202 rtx insn;
2203 {
2204 rtx pat;
2205 if (GET_CODE (insn) != JUMP_INSN)
2206 return NULL_RTX;
2207 pat = PATTERN (insn);
2208
2209 /* The set is allowed to appear either as the insn pattern or
2210 the first set in a PARALLEL. */
2211 if (GET_CODE (pat) == PARALLEL)
2212 pat = XVECEXP (pat, 0, 0);
2213 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2214 return pat;
2215
2216 return NULL_RTX;
2217 }
2218
2219 /* Return true when insn is an unconditional direct jump,
2220 possibly bundled inside a PARALLEL. */
2221
2222 int
2223 any_uncondjump_p (insn)
2224 rtx insn;
2225 {
2226 rtx x = pc_set (insn);
2227 if (!x)
2228 return 0;
2229 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2230 return 0;
2231 return 1;
2232 }
2233
2234 /* Return true when insn is a conditional jump. This function works for
2235 instructions containing PC sets in PARALLELs. The instruction may have
2236 various other effects so before removing the jump you must verify
2237 onlyjump_p.
2238
2239 Note that unlike condjump_p it returns false for unconditional jumps. */
2240
2241 int
2242 any_condjump_p (insn)
2243 rtx insn;
2244 {
2245 rtx x = pc_set (insn);
2246 enum rtx_code a, b;
2247
2248 if (!x)
2249 return 0;
2250 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2251 return 0;
2252
2253 a = GET_CODE (XEXP (SET_SRC (x), 1));
2254 b = GET_CODE (XEXP (SET_SRC (x), 2));
2255
2256 return ((b == PC && (a == LABEL_REF || a == RETURN))
2257 || (a == PC && (b == LABEL_REF || b == RETURN)));
2258 }
2259
2260 /* Return the label of a conditional jump. */
2261
2262 rtx
2263 condjump_label (insn)
2264 rtx insn;
2265 {
2266 rtx x = pc_set (insn);
2267
2268 if (!x)
2269 return NULL_RTX;
2270 x = SET_SRC (x);
2271 if (GET_CODE (x) == LABEL_REF)
2272 return x;
2273 if (GET_CODE (x) != IF_THEN_ELSE)
2274 return NULL_RTX;
2275 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2276 return XEXP (x, 1);
2277 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2278 return XEXP (x, 2);
2279 return NULL_RTX;
2280 }
2281
2282 /* Return true if INSN is a (possibly conditional) return insn. */
2283
2284 static int
2285 returnjump_p_1 (loc, data)
2286 rtx *loc;
2287 void *data ATTRIBUTE_UNUSED;
2288 {
2289 rtx x = *loc;
2290 return x && GET_CODE (x) == RETURN;
2291 }
2292
2293 int
2294 returnjump_p (insn)
2295 rtx insn;
2296 {
2297 if (GET_CODE (insn) != JUMP_INSN)
2298 return 0;
2299 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2300 }
2301
2302 /* Return true if INSN is a jump that only transfers control and
2303 nothing more. */
2304
2305 int
2306 onlyjump_p (insn)
2307 rtx insn;
2308 {
2309 rtx set;
2310
2311 if (GET_CODE (insn) != JUMP_INSN)
2312 return 0;
2313
2314 set = single_set (insn);
2315 if (set == NULL)
2316 return 0;
2317 if (GET_CODE (SET_DEST (set)) != PC)
2318 return 0;
2319 if (side_effects_p (SET_SRC (set)))
2320 return 0;
2321
2322 return 1;
2323 }
2324
2325 #ifdef HAVE_cc0
2326
2327 /* Return 1 if X is an RTX that does nothing but set the condition codes
2328 and CLOBBER or USE registers.
2329 Return -1 if X does explicitly set the condition codes,
2330 but also does other things. */
2331
2332 int
2333 sets_cc0_p (x)
2334 rtx x ATTRIBUTE_UNUSED;
2335 {
2336 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2337 return 1;
2338 if (GET_CODE (x) == PARALLEL)
2339 {
2340 int i;
2341 int sets_cc0 = 0;
2342 int other_things = 0;
2343 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2344 {
2345 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2346 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2347 sets_cc0 = 1;
2348 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2349 other_things = 1;
2350 }
2351 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2352 }
2353 return 0;
2354 }
2355 #endif
2356 \f
2357 /* Follow any unconditional jump at LABEL;
2358 return the ultimate label reached by any such chain of jumps.
2359 If LABEL is not followed by a jump, return LABEL.
2360 If the chain loops or we can't find end, return LABEL,
2361 since that tells caller to avoid changing the insn.
2362
2363 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2364 a USE or CLOBBER. */
2365
2366 rtx
2367 follow_jumps (label)
2368 rtx label;
2369 {
2370 register rtx insn;
2371 register rtx next;
2372 register rtx value = label;
2373 register int depth;
2374
2375 for (depth = 0;
2376 (depth < 10
2377 && (insn = next_active_insn (value)) != 0
2378 && GET_CODE (insn) == JUMP_INSN
2379 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2380 && onlyjump_p (insn))
2381 || GET_CODE (PATTERN (insn)) == RETURN)
2382 && (next = NEXT_INSN (insn))
2383 && GET_CODE (next) == BARRIER);
2384 depth++)
2385 {
2386 /* Don't chain through the insn that jumps into a loop
2387 from outside the loop,
2388 since that would create multiple loop entry jumps
2389 and prevent loop optimization. */
2390 rtx tem;
2391 if (!reload_completed)
2392 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2393 if (GET_CODE (tem) == NOTE
2394 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2395 /* ??? Optional. Disables some optimizations, but makes
2396 gcov output more accurate with -O. */
2397 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2398 return value;
2399
2400 /* If we have found a cycle, make the insn jump to itself. */
2401 if (JUMP_LABEL (insn) == label)
2402 return label;
2403
2404 tem = next_active_insn (JUMP_LABEL (insn));
2405 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2406 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2407 break;
2408
2409 value = JUMP_LABEL (insn);
2410 }
2411 if (depth == 10)
2412 return label;
2413 return value;
2414 }
2415
2416 /* Assuming that field IDX of X is a vector of label_refs,
2417 replace each of them by the ultimate label reached by it.
2418 Return nonzero if a change is made.
2419 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2420
2421 static int
2422 tension_vector_labels (x, idx)
2423 register rtx x;
2424 register int idx;
2425 {
2426 int changed = 0;
2427 register int i;
2428 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2429 {
2430 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2431 register rtx nlabel = follow_jumps (olabel);
2432 if (nlabel && nlabel != olabel)
2433 {
2434 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2435 ++LABEL_NUSES (nlabel);
2436 if (--LABEL_NUSES (olabel) == 0)
2437 delete_insn (olabel);
2438 changed = 1;
2439 }
2440 }
2441 return changed;
2442 }
2443 \f
2444 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2445 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2446 in INSN, then store one of them in JUMP_LABEL (INSN).
2447 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2448 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2449 Also, when there are consecutive labels, canonicalize on the last of them.
2450
2451 Note that two labels separated by a loop-beginning note
2452 must be kept distinct if we have not yet done loop-optimization,
2453 because the gap between them is where loop-optimize
2454 will want to move invariant code to. CROSS_JUMP tells us
2455 that loop-optimization is done with.
2456
2457 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2458 two labels distinct if they are separated by only USE or CLOBBER insns. */
2459
2460 void
2461 mark_jump_label (x, insn, cross_jump, in_mem)
2462 register rtx x;
2463 rtx insn;
2464 int cross_jump;
2465 int in_mem;
2466 {
2467 register RTX_CODE code = GET_CODE (x);
2468 register int i;
2469 register const char *fmt;
2470
2471 switch (code)
2472 {
2473 case PC:
2474 case CC0:
2475 case REG:
2476 case SUBREG:
2477 case CONST_INT:
2478 case CONST_DOUBLE:
2479 case CLOBBER:
2480 case CALL:
2481 return;
2482
2483 case MEM:
2484 in_mem = 1;
2485 break;
2486
2487 case SYMBOL_REF:
2488 if (!in_mem)
2489 return;
2490
2491 /* If this is a constant-pool reference, see if it is a label. */
2492 if (CONSTANT_POOL_ADDRESS_P (x))
2493 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2494 break;
2495
2496 case LABEL_REF:
2497 {
2498 rtx label = XEXP (x, 0);
2499 rtx olabel = label;
2500 rtx next;
2501
2502 /* Ignore remaining references to unreachable labels that
2503 have been deleted. */
2504 if (GET_CODE (label) == NOTE
2505 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2506 break;
2507
2508 if (GET_CODE (label) != CODE_LABEL)
2509 abort ();
2510
2511 /* Ignore references to labels of containing functions. */
2512 if (LABEL_REF_NONLOCAL_P (x))
2513 break;
2514
2515 /* If there are other labels following this one,
2516 replace it with the last of the consecutive labels. */
2517 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2518 {
2519 if (GET_CODE (next) == CODE_LABEL)
2520 label = next;
2521 else if (cross_jump && GET_CODE (next) == INSN
2522 && (GET_CODE (PATTERN (next)) == USE
2523 || GET_CODE (PATTERN (next)) == CLOBBER))
2524 continue;
2525 else if (GET_CODE (next) != NOTE)
2526 break;
2527 else if (! cross_jump
2528 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2529 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2530 /* ??? Optional. Disables some optimizations, but
2531 makes gcov output more accurate with -O. */
2532 || (flag_test_coverage
2533 && NOTE_LINE_NUMBER (next) > 0)))
2534 break;
2535 }
2536
2537 XEXP (x, 0) = label;
2538 if (! insn || ! INSN_DELETED_P (insn))
2539 ++LABEL_NUSES (label);
2540
2541 if (insn)
2542 {
2543 if (GET_CODE (insn) == JUMP_INSN)
2544 JUMP_LABEL (insn) = label;
2545 else
2546 {
2547 /* If we've changed the label, update notes accordingly. */
2548 if (label != olabel)
2549 {
2550 rtx note;
2551
2552 /* We may have a REG_LABEL note to indicate that this
2553 instruction uses the label. */
2554 note = find_reg_note (insn, REG_LABEL, olabel);
2555 if (note)
2556 XEXP (note, 0) = label;
2557
2558 /* We may also have a REG_EQUAL note to indicate that
2559 a register is being set to the address of the
2560 label. */
2561 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
2562 if (note
2563 && GET_CODE (XEXP (note, 0)) == LABEL_REF
2564 && XEXP (XEXP (note, 0), 0) == olabel)
2565 XEXP (XEXP (note, 0), 0) = label;
2566 }
2567
2568 /* Add a REG_LABEL note for LABEL unless there already
2569 is one. All uses of a label, except for labels
2570 that are the targets of jumps, must have a
2571 REG_LABEL note. */
2572 if (! find_reg_note (insn, REG_LABEL, label))
2573 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2574 REG_NOTES (insn));
2575 }
2576 }
2577 return;
2578 }
2579
2580 /* Do walk the labels in a vector, but not the first operand of an
2581 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2582 case ADDR_VEC:
2583 case ADDR_DIFF_VEC:
2584 if (! INSN_DELETED_P (insn))
2585 {
2586 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2587
2588 for (i = 0; i < XVECLEN (x, eltnum); i++)
2589 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2590 cross_jump, in_mem);
2591 }
2592 return;
2593
2594 default:
2595 break;
2596 }
2597
2598 fmt = GET_RTX_FORMAT (code);
2599 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2600 {
2601 if (fmt[i] == 'e')
2602 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2603 else if (fmt[i] == 'E')
2604 {
2605 register int j;
2606 for (j = 0; j < XVECLEN (x, i); j++)
2607 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2608 }
2609 }
2610 }
2611
2612 /* If all INSN does is set the pc, delete it,
2613 and delete the insn that set the condition codes for it
2614 if that's what the previous thing was. */
2615
2616 void
2617 delete_jump (insn)
2618 rtx insn;
2619 {
2620 register rtx set = single_set (insn);
2621
2622 if (set && GET_CODE (SET_DEST (set)) == PC)
2623 delete_computation (insn);
2624 }
2625
2626 /* Verify INSN is a BARRIER and delete it. */
2627
2628 void
2629 delete_barrier (insn)
2630 rtx insn;
2631 {
2632 if (GET_CODE (insn) != BARRIER)
2633 abort ();
2634
2635 delete_insn (insn);
2636 }
2637
2638 /* Recursively delete prior insns that compute the value (used only by INSN
2639 which the caller is deleting) stored in the register mentioned by NOTE
2640 which is a REG_DEAD note associated with INSN. */
2641
2642 static void
2643 delete_prior_computation (note, insn)
2644 rtx note;
2645 rtx insn;
2646 {
2647 rtx our_prev;
2648 rtx reg = XEXP (note, 0);
2649
2650 for (our_prev = prev_nonnote_insn (insn);
2651 our_prev && (GET_CODE (our_prev) == INSN
2652 || GET_CODE (our_prev) == CALL_INSN);
2653 our_prev = prev_nonnote_insn (our_prev))
2654 {
2655 rtx pat = PATTERN (our_prev);
2656
2657 /* If we reach a CALL which is not calling a const function
2658 or the callee pops the arguments, then give up. */
2659 if (GET_CODE (our_prev) == CALL_INSN
2660 && (! CONST_CALL_P (our_prev)
2661 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2662 break;
2663
2664 /* If we reach a SEQUENCE, it is too complex to try to
2665 do anything with it, so give up. */
2666 if (GET_CODE (pat) == SEQUENCE)
2667 break;
2668
2669 if (GET_CODE (pat) == USE
2670 && GET_CODE (XEXP (pat, 0)) == INSN)
2671 /* reorg creates USEs that look like this. We leave them
2672 alone because reorg needs them for its own purposes. */
2673 break;
2674
2675 if (reg_set_p (reg, pat))
2676 {
2677 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2678 break;
2679
2680 if (GET_CODE (pat) == PARALLEL)
2681 {
2682 /* If we find a SET of something else, we can't
2683 delete the insn. */
2684
2685 int i;
2686
2687 for (i = 0; i < XVECLEN (pat, 0); i++)
2688 {
2689 rtx part = XVECEXP (pat, 0, i);
2690
2691 if (GET_CODE (part) == SET
2692 && SET_DEST (part) != reg)
2693 break;
2694 }
2695
2696 if (i == XVECLEN (pat, 0))
2697 delete_computation (our_prev);
2698 }
2699 else if (GET_CODE (pat) == SET
2700 && GET_CODE (SET_DEST (pat)) == REG)
2701 {
2702 int dest_regno = REGNO (SET_DEST (pat));
2703 int dest_endregno
2704 = (dest_regno
2705 + (dest_regno < FIRST_PSEUDO_REGISTER
2706 ? HARD_REGNO_NREGS (dest_regno,
2707 GET_MODE (SET_DEST (pat))) : 1));
2708 int regno = REGNO (reg);
2709 int endregno
2710 = (regno
2711 + (regno < FIRST_PSEUDO_REGISTER
2712 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2713
2714 if (dest_regno >= regno
2715 && dest_endregno <= endregno)
2716 delete_computation (our_prev);
2717
2718 /* We may have a multi-word hard register and some, but not
2719 all, of the words of the register are needed in subsequent
2720 insns. Write REG_UNUSED notes for those parts that were not
2721 needed. */
2722 else if (dest_regno <= regno
2723 && dest_endregno >= endregno)
2724 {
2725 int i;
2726
2727 REG_NOTES (our_prev)
2728 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2729 REG_NOTES (our_prev));
2730
2731 for (i = dest_regno; i < dest_endregno; i++)
2732 if (! find_regno_note (our_prev, REG_UNUSED, i))
2733 break;
2734
2735 if (i == dest_endregno)
2736 delete_computation (our_prev);
2737 }
2738 }
2739
2740 break;
2741 }
2742
2743 /* If PAT references the register that dies here, it is an
2744 additional use. Hence any prior SET isn't dead. However, this
2745 insn becomes the new place for the REG_DEAD note. */
2746 if (reg_overlap_mentioned_p (reg, pat))
2747 {
2748 XEXP (note, 1) = REG_NOTES (our_prev);
2749 REG_NOTES (our_prev) = note;
2750 break;
2751 }
2752 }
2753 }
2754
2755 /* Delete INSN and recursively delete insns that compute values used only
2756 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2757 If we are running before flow.c, we need do nothing since flow.c will
2758 delete dead code. We also can't know if the registers being used are
2759 dead or not at this point.
2760
2761 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2762 nothing other than set a register that dies in this insn, we can delete
2763 that insn as well.
2764
2765 On machines with CC0, if CC0 is used in this insn, we may be able to
2766 delete the insn that set it. */
2767
2768 static void
2769 delete_computation (insn)
2770 rtx insn;
2771 {
2772 rtx note, next;
2773
2774 #ifdef HAVE_cc0
2775 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2776 {
2777 rtx prev = prev_nonnote_insn (insn);
2778 /* We assume that at this stage
2779 CC's are always set explicitly
2780 and always immediately before the jump that
2781 will use them. So if the previous insn
2782 exists to set the CC's, delete it
2783 (unless it performs auto-increments, etc.). */
2784 if (prev && GET_CODE (prev) == INSN
2785 && sets_cc0_p (PATTERN (prev)))
2786 {
2787 if (sets_cc0_p (PATTERN (prev)) > 0
2788 && ! side_effects_p (PATTERN (prev)))
2789 delete_computation (prev);
2790 else
2791 /* Otherwise, show that cc0 won't be used. */
2792 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2793 cc0_rtx, REG_NOTES (prev));
2794 }
2795 }
2796 #endif
2797
2798 for (note = REG_NOTES (insn); note; note = next)
2799 {
2800 next = XEXP (note, 1);
2801
2802 if (REG_NOTE_KIND (note) != REG_DEAD
2803 /* Verify that the REG_NOTE is legitimate. */
2804 || GET_CODE (XEXP (note, 0)) != REG)
2805 continue;
2806
2807 delete_prior_computation (note, insn);
2808 }
2809
2810 delete_insn (insn);
2811 }
2812 \f
2813 /* Delete insn INSN from the chain of insns and update label ref counts.
2814 May delete some following insns as a consequence; may even delete
2815 a label elsewhere and insns that follow it.
2816
2817 Returns the first insn after INSN that was not deleted. */
2818
2819 rtx
2820 delete_insn (insn)
2821 register rtx insn;
2822 {
2823 register rtx next = NEXT_INSN (insn);
2824 register rtx prev = PREV_INSN (insn);
2825 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2826 register int dont_really_delete = 0;
2827 rtx note;
2828
2829 while (next && INSN_DELETED_P (next))
2830 next = NEXT_INSN (next);
2831
2832 /* This insn is already deleted => return first following nondeleted. */
2833 if (INSN_DELETED_P (insn))
2834 return next;
2835
2836 if (was_code_label)
2837 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2838
2839 /* Don't delete user-declared labels. When optimizing, convert them
2840 to special NOTEs instead. When not optimizing, leave them alone. */
2841 if (was_code_label && LABEL_NAME (insn) != 0)
2842 {
2843 if (optimize)
2844 {
2845 const char *name = LABEL_NAME (insn);
2846 PUT_CODE (insn, NOTE);
2847 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2848 NOTE_SOURCE_FILE (insn) = name;
2849 }
2850
2851 dont_really_delete = 1;
2852 }
2853 else
2854 /* Mark this insn as deleted. */
2855 INSN_DELETED_P (insn) = 1;
2856
2857 /* If this is an unconditional jump, delete it from the jump chain. */
2858 if (simplejump_p (insn))
2859 delete_from_jump_chain (insn);
2860
2861 /* If instruction is followed by a barrier,
2862 delete the barrier too. */
2863
2864 if (next != 0 && GET_CODE (next) == BARRIER)
2865 {
2866 INSN_DELETED_P (next) = 1;
2867 next = NEXT_INSN (next);
2868 }
2869
2870 /* Patch out INSN (and the barrier if any) */
2871
2872 if (! dont_really_delete)
2873 {
2874 if (prev)
2875 {
2876 NEXT_INSN (prev) = next;
2877 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2878 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2879 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2880 }
2881
2882 if (next)
2883 {
2884 PREV_INSN (next) = prev;
2885 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2886 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2887 }
2888
2889 if (prev && NEXT_INSN (prev) == 0)
2890 set_last_insn (prev);
2891 }
2892
2893 /* If deleting a jump, decrement the count of the label,
2894 and delete the label if it is now unused. */
2895
2896 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2897 {
2898 rtx lab = JUMP_LABEL (insn), lab_next;
2899
2900 if (--LABEL_NUSES (lab) == 0)
2901 {
2902 /* This can delete NEXT or PREV,
2903 either directly if NEXT is JUMP_LABEL (INSN),
2904 or indirectly through more levels of jumps. */
2905 delete_insn (lab);
2906
2907 /* I feel a little doubtful about this loop,
2908 but I see no clean and sure alternative way
2909 to find the first insn after INSN that is not now deleted.
2910 I hope this works. */
2911 while (next && INSN_DELETED_P (next))
2912 next = NEXT_INSN (next);
2913 return next;
2914 }
2915 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2916 && GET_CODE (lab_next) == JUMP_INSN
2917 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2918 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2919 {
2920 /* If we're deleting the tablejump, delete the dispatch table.
2921 We may not be able to kill the label immediately preceeding
2922 just yet, as it might be referenced in code leading up to
2923 the tablejump. */
2924 delete_insn (lab_next);
2925 }
2926 }
2927
2928 /* Likewise if we're deleting a dispatch table. */
2929
2930 if (GET_CODE (insn) == JUMP_INSN
2931 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2932 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2933 {
2934 rtx pat = PATTERN (insn);
2935 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2936 int len = XVECLEN (pat, diff_vec_p);
2937
2938 for (i = 0; i < len; i++)
2939 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2940 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2941 while (next && INSN_DELETED_P (next))
2942 next = NEXT_INSN (next);
2943 return next;
2944 }
2945
2946 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
2947 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2948 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2949 if (REG_NOTE_KIND (note) == REG_LABEL
2950 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
2951 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2952 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
2953 delete_insn (XEXP (note, 0));
2954
2955 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
2956 prev = PREV_INSN (prev);
2957
2958 /* If INSN was a label and a dispatch table follows it,
2959 delete the dispatch table. The tablejump must have gone already.
2960 It isn't useful to fall through into a table. */
2961
2962 if (was_code_label
2963 && NEXT_INSN (insn) != 0
2964 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
2965 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
2966 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
2967 next = delete_insn (NEXT_INSN (insn));
2968
2969 /* If INSN was a label, delete insns following it if now unreachable. */
2970
2971 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
2972 {
2973 register RTX_CODE code;
2974 while (next != 0
2975 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
2976 || code == NOTE || code == BARRIER
2977 || (code == CODE_LABEL && INSN_DELETED_P (next))))
2978 {
2979 if (code == NOTE
2980 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
2981 next = NEXT_INSN (next);
2982 /* Keep going past other deleted labels to delete what follows. */
2983 else if (code == CODE_LABEL && INSN_DELETED_P (next))
2984 next = NEXT_INSN (next);
2985 else
2986 /* Note: if this deletes a jump, it can cause more
2987 deletion of unreachable code, after a different label.
2988 As long as the value from this recursive call is correct,
2989 this invocation functions correctly. */
2990 next = delete_insn (next);
2991 }
2992 }
2993
2994 return next;
2995 }
2996
2997 /* Advance from INSN till reaching something not deleted
2998 then return that. May return INSN itself. */
2999
3000 rtx
3001 next_nondeleted_insn (insn)
3002 rtx insn;
3003 {
3004 while (INSN_DELETED_P (insn))
3005 insn = NEXT_INSN (insn);
3006 return insn;
3007 }
3008 \f
3009 /* Delete a range of insns from FROM to TO, inclusive.
3010 This is for the sake of peephole optimization, so assume
3011 that whatever these insns do will still be done by a new
3012 peephole insn that will replace them. */
3013
3014 void
3015 delete_for_peephole (from, to)
3016 register rtx from, to;
3017 {
3018 register rtx insn = from;
3019
3020 while (1)
3021 {
3022 register rtx next = NEXT_INSN (insn);
3023 register rtx prev = PREV_INSN (insn);
3024
3025 if (GET_CODE (insn) != NOTE)
3026 {
3027 INSN_DELETED_P (insn) = 1;
3028
3029 /* Patch this insn out of the chain. */
3030 /* We don't do this all at once, because we
3031 must preserve all NOTEs. */
3032 if (prev)
3033 NEXT_INSN (prev) = next;
3034
3035 if (next)
3036 PREV_INSN (next) = prev;
3037 }
3038
3039 if (insn == to)
3040 break;
3041 insn = next;
3042 }
3043
3044 /* Note that if TO is an unconditional jump
3045 we *do not* delete the BARRIER that follows,
3046 since the peephole that replaces this sequence
3047 is also an unconditional jump in that case. */
3048 }
3049 \f
3050 /* We have determined that INSN is never reached, and are about to
3051 delete it. Print a warning if the user asked for one.
3052
3053 To try to make this warning more useful, this should only be called
3054 once per basic block not reached, and it only warns when the basic
3055 block contains more than one line from the current function, and
3056 contains at least one operation. CSE and inlining can duplicate insns,
3057 so it's possible to get spurious warnings from this. */
3058
3059 void
3060 never_reached_warning (avoided_insn)
3061 rtx avoided_insn;
3062 {
3063 rtx insn;
3064 rtx a_line_note = NULL;
3065 int two_avoided_lines = 0;
3066 int contains_insn = 0;
3067
3068 if (! warn_notreached)
3069 return;
3070
3071 /* Scan forwards, looking at LINE_NUMBER notes, until
3072 we hit a LABEL or we run out of insns. */
3073
3074 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3075 {
3076 if (GET_CODE (insn) == CODE_LABEL)
3077 break;
3078 else if (GET_CODE (insn) == NOTE /* A line number note? */
3079 && NOTE_LINE_NUMBER (insn) >= 0)
3080 {
3081 if (a_line_note == NULL)
3082 a_line_note = insn;
3083 else
3084 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3085 != NOTE_LINE_NUMBER (insn));
3086 }
3087 else if (INSN_P (insn))
3088 contains_insn = 1;
3089 }
3090 if (two_avoided_lines && contains_insn)
3091 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3092 NOTE_LINE_NUMBER (a_line_note),
3093 "will never be executed");
3094 }
3095 \f
3096 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3097 NLABEL as a return. Accrue modifications into the change group. */
3098
3099 static void
3100 redirect_exp_1 (loc, olabel, nlabel, insn)
3101 rtx *loc;
3102 rtx olabel, nlabel;
3103 rtx insn;
3104 {
3105 register rtx x = *loc;
3106 register RTX_CODE code = GET_CODE (x);
3107 register int i;
3108 register const char *fmt;
3109
3110 if (code == LABEL_REF)
3111 {
3112 if (XEXP (x, 0) == olabel)
3113 {
3114 rtx n;
3115 if (nlabel)
3116 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3117 else
3118 n = gen_rtx_RETURN (VOIDmode);
3119
3120 validate_change (insn, loc, n, 1);
3121 return;
3122 }
3123 }
3124 else if (code == RETURN && olabel == 0)
3125 {
3126 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3127 if (loc == &PATTERN (insn))
3128 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3129 validate_change (insn, loc, x, 1);
3130 return;
3131 }
3132
3133 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3134 && GET_CODE (SET_SRC (x)) == LABEL_REF
3135 && XEXP (SET_SRC (x), 0) == olabel)
3136 {
3137 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3138 return;
3139 }
3140
3141 fmt = GET_RTX_FORMAT (code);
3142 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3143 {
3144 if (fmt[i] == 'e')
3145 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3146 else if (fmt[i] == 'E')
3147 {
3148 register int j;
3149 for (j = 0; j < XVECLEN (x, i); j++)
3150 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3151 }
3152 }
3153 }
3154
3155 /* Similar, but apply the change group and report success or failure. */
3156
3157 static int
3158 redirect_exp (olabel, nlabel, insn)
3159 rtx olabel, nlabel;
3160 rtx insn;
3161 {
3162 rtx *loc;
3163
3164 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3165 loc = &XVECEXP (PATTERN (insn), 0, 0);
3166 else
3167 loc = &PATTERN (insn);
3168
3169 redirect_exp_1 (loc, olabel, nlabel, insn);
3170 if (num_validated_changes () == 0)
3171 return 0;
3172
3173 return apply_change_group ();
3174 }
3175
3176 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3177 the modifications into the change group. Return false if we did
3178 not see how to do that. */
3179
3180 int
3181 redirect_jump_1 (jump, nlabel)
3182 rtx jump, nlabel;
3183 {
3184 int ochanges = num_validated_changes ();
3185 rtx *loc;
3186
3187 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3188 loc = &XVECEXP (PATTERN (jump), 0, 0);
3189 else
3190 loc = &PATTERN (jump);
3191
3192 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3193 return num_validated_changes () > ochanges;
3194 }
3195
3196 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3197 jump target label is unused as a result, it and the code following
3198 it may be deleted.
3199
3200 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3201 RETURN insn.
3202
3203 The return value will be 1 if the change was made, 0 if it wasn't
3204 (this can only occur for NLABEL == 0). */
3205
3206 int
3207 redirect_jump (jump, nlabel, delete_unused)
3208 rtx jump, nlabel;
3209 int delete_unused;
3210 {
3211 register rtx olabel = JUMP_LABEL (jump);
3212
3213 if (nlabel == olabel)
3214 return 1;
3215
3216 if (! redirect_exp (olabel, nlabel, jump))
3217 return 0;
3218
3219 /* If this is an unconditional branch, delete it from the jump_chain of
3220 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3221 have UID's in range and JUMP_CHAIN is valid). */
3222 if (jump_chain && (simplejump_p (jump)
3223 || GET_CODE (PATTERN (jump)) == RETURN))
3224 {
3225 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3226
3227 delete_from_jump_chain (jump);
3228 if (label_index < max_jump_chain
3229 && INSN_UID (jump) < max_jump_chain)
3230 {
3231 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3232 jump_chain[label_index] = jump;
3233 }
3234 }
3235
3236 JUMP_LABEL (jump) = nlabel;
3237 if (nlabel)
3238 ++LABEL_NUSES (nlabel);
3239
3240 /* If we're eliding the jump over exception cleanups at the end of a
3241 function, move the function end note so that -Wreturn-type works. */
3242 if (olabel && nlabel
3243 && NEXT_INSN (olabel)
3244 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3245 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3246 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3247
3248 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3249 delete_insn (olabel);
3250
3251 return 1;
3252 }
3253
3254 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3255 Accrue the modifications into the change group. */
3256
3257 static void
3258 invert_exp_1 (insn)
3259 rtx insn;
3260 {
3261 register RTX_CODE code;
3262 rtx x = pc_set (insn);
3263
3264 if (!x)
3265 abort ();
3266 x = SET_SRC (x);
3267
3268 code = GET_CODE (x);
3269
3270 if (code == IF_THEN_ELSE)
3271 {
3272 register rtx comp = XEXP (x, 0);
3273 register rtx tem;
3274 enum rtx_code reversed_code;
3275
3276 /* We can do this in two ways: The preferable way, which can only
3277 be done if this is not an integer comparison, is to reverse
3278 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3279 of the IF_THEN_ELSE. If we can't do either, fail. */
3280
3281 reversed_code = reversed_comparison_code (comp, insn);
3282
3283 if (reversed_code != UNKNOWN)
3284 {
3285 validate_change (insn, &XEXP (x, 0),
3286 gen_rtx_fmt_ee (reversed_code,
3287 GET_MODE (comp), XEXP (comp, 0),
3288 XEXP (comp, 1)),
3289 1);
3290 return;
3291 }
3292
3293 tem = XEXP (x, 1);
3294 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3295 validate_change (insn, &XEXP (x, 2), tem, 1);
3296 }
3297 else
3298 abort ();
3299 }
3300
3301 /* Invert the jump condition of conditional jump insn, INSN.
3302
3303 Return 1 if we can do so, 0 if we cannot find a way to do so that
3304 matches a pattern. */
3305
3306 static int
3307 invert_exp (insn)
3308 rtx insn;
3309 {
3310 invert_exp_1 (insn);
3311 if (num_validated_changes () == 0)
3312 return 0;
3313
3314 return apply_change_group ();
3315 }
3316
3317 /* Invert the condition of the jump JUMP, and make it jump to label
3318 NLABEL instead of where it jumps now. Accrue changes into the
3319 change group. Return false if we didn't see how to perform the
3320 inversion and redirection. */
3321
3322 int
3323 invert_jump_1 (jump, nlabel)
3324 rtx jump, nlabel;
3325 {
3326 int ochanges;
3327
3328 ochanges = num_validated_changes ();
3329 invert_exp_1 (jump);
3330 if (num_validated_changes () == ochanges)
3331 return 0;
3332
3333 return redirect_jump_1 (jump, nlabel);
3334 }
3335
3336 /* Invert the condition of the jump JUMP, and make it jump to label
3337 NLABEL instead of where it jumps now. Return true if successful. */
3338
3339 int
3340 invert_jump (jump, nlabel, delete_unused)
3341 rtx jump, nlabel;
3342 int delete_unused;
3343 {
3344 /* We have to either invert the condition and change the label or
3345 do neither. Either operation could fail. We first try to invert
3346 the jump. If that succeeds, we try changing the label. If that fails,
3347 we invert the jump back to what it was. */
3348
3349 if (! invert_exp (jump))
3350 return 0;
3351
3352 if (redirect_jump (jump, nlabel, delete_unused))
3353 {
3354 /* An inverted jump means that a probability taken becomes a
3355 probability not taken. Subtract the branch probability from the
3356 probability base to convert it back to a taken probability. */
3357
3358 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3359 if (note)
3360 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3361
3362 return 1;
3363 }
3364
3365 if (! invert_exp (jump))
3366 /* This should just be putting it back the way it was. */
3367 abort ();
3368
3369 return 0;
3370 }
3371
3372 /* Delete the instruction JUMP from any jump chain it might be on. */
3373
3374 static void
3375 delete_from_jump_chain (jump)
3376 rtx jump;
3377 {
3378 int index;
3379 rtx olabel = JUMP_LABEL (jump);
3380
3381 /* Handle unconditional jumps. */
3382 if (jump_chain && olabel != 0
3383 && INSN_UID (olabel) < max_jump_chain
3384 && simplejump_p (jump))
3385 index = INSN_UID (olabel);
3386 /* Handle return insns. */
3387 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3388 index = 0;
3389 else
3390 return;
3391
3392 if (jump_chain[index] == jump)
3393 jump_chain[index] = jump_chain[INSN_UID (jump)];
3394 else
3395 {
3396 rtx insn;
3397
3398 for (insn = jump_chain[index];
3399 insn != 0;
3400 insn = jump_chain[INSN_UID (insn)])
3401 if (jump_chain[INSN_UID (insn)] == jump)
3402 {
3403 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3404 break;
3405 }
3406 }
3407 }
3408 \f
3409 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3410
3411 If the old jump target label (before the dispatch table) becomes unused,
3412 it and the dispatch table may be deleted. In that case, find the insn
3413 before the jump references that label and delete it and logical successors
3414 too. */
3415
3416 static void
3417 redirect_tablejump (jump, nlabel)
3418 rtx jump, nlabel;
3419 {
3420 register rtx olabel = JUMP_LABEL (jump);
3421 rtx *notep, note, next;
3422
3423 /* Add this jump to the jump_chain of NLABEL. */
3424 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3425 && INSN_UID (jump) < max_jump_chain)
3426 {
3427 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3428 jump_chain[INSN_UID (nlabel)] = jump;
3429 }
3430
3431 for (notep = &REG_NOTES (jump), note = *notep; note; note = next)
3432 {
3433 next = XEXP (note, 1);
3434
3435 if (REG_NOTE_KIND (note) != REG_DEAD
3436 /* Verify that the REG_NOTE is legitimate. */
3437 || GET_CODE (XEXP (note, 0)) != REG
3438 || ! reg_mentioned_p (XEXP (note, 0), PATTERN (jump)))
3439 notep = &XEXP (note, 1);
3440 else
3441 {
3442 delete_prior_computation (note, jump);
3443 *notep = next;
3444 }
3445 }
3446
3447 PATTERN (jump) = gen_jump (nlabel);
3448 JUMP_LABEL (jump) = nlabel;
3449 ++LABEL_NUSES (nlabel);
3450 INSN_CODE (jump) = -1;
3451
3452 if (--LABEL_NUSES (olabel) == 0)
3453 {
3454 delete_labelref_insn (jump, olabel, 0);
3455 delete_insn (olabel);
3456 }
3457 }
3458
3459 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3460 If we found one, delete it and then delete this insn if DELETE_THIS is
3461 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3462
3463 static int
3464 delete_labelref_insn (insn, label, delete_this)
3465 rtx insn, label;
3466 int delete_this;
3467 {
3468 int deleted = 0;
3469 rtx link;
3470
3471 if (GET_CODE (insn) != NOTE
3472 && reg_mentioned_p (label, PATTERN (insn)))
3473 {
3474 if (delete_this)
3475 {
3476 delete_insn (insn);
3477 deleted = 1;
3478 }
3479 else
3480 return 1;
3481 }
3482
3483 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3484 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3485 {
3486 if (delete_this)
3487 {
3488 delete_insn (insn);
3489 deleted = 1;
3490 }
3491 else
3492 return 1;
3493 }
3494
3495 return deleted;
3496 }
3497 \f
3498 /* Like rtx_equal_p except that it considers two REGs as equal
3499 if they renumber to the same value and considers two commutative
3500 operations to be the same if the order of the operands has been
3501 reversed.
3502
3503 ??? Addition is not commutative on the PA due to the weird implicit
3504 space register selection rules for memory addresses. Therefore, we
3505 don't consider a + b == b + a.
3506
3507 We could/should make this test a little tighter. Possibly only
3508 disabling it on the PA via some backend macro or only disabling this
3509 case when the PLUS is inside a MEM. */
3510
3511 int
3512 rtx_renumbered_equal_p (x, y)
3513 rtx x, y;
3514 {
3515 register int i;
3516 register RTX_CODE code = GET_CODE (x);
3517 register const char *fmt;
3518
3519 if (x == y)
3520 return 1;
3521
3522 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3523 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3524 && GET_CODE (SUBREG_REG (y)) == REG)))
3525 {
3526 int reg_x = -1, reg_y = -1;
3527 int byte_x = 0, byte_y = 0;
3528
3529 if (GET_MODE (x) != GET_MODE (y))
3530 return 0;
3531
3532 /* If we haven't done any renumbering, don't
3533 make any assumptions. */
3534 if (reg_renumber == 0)
3535 return rtx_equal_p (x, y);
3536
3537 if (code == SUBREG)
3538 {
3539 reg_x = REGNO (SUBREG_REG (x));
3540 byte_x = SUBREG_BYTE (x);
3541
3542 if (reg_renumber[reg_x] >= 0)
3543 {
3544 reg_x = subreg_regno_offset (reg_renumber[reg_x],
3545 GET_MODE (SUBREG_REG (x)),
3546 byte_x,
3547 GET_MODE (x));
3548 byte_x = 0;
3549 }
3550 }
3551 else
3552 {
3553 reg_x = REGNO (x);
3554 if (reg_renumber[reg_x] >= 0)
3555 reg_x = reg_renumber[reg_x];
3556 }
3557
3558 if (GET_CODE (y) == SUBREG)
3559 {
3560 reg_y = REGNO (SUBREG_REG (y));
3561 byte_y = SUBREG_BYTE (y);
3562
3563 if (reg_renumber[reg_y] >= 0)
3564 {
3565 reg_y = subreg_regno_offset (reg_renumber[reg_y],
3566 GET_MODE (SUBREG_REG (y)),
3567 byte_y,
3568 GET_MODE (y));
3569 byte_y = 0;
3570 }
3571 }
3572 else
3573 {
3574 reg_y = REGNO (y);
3575 if (reg_renumber[reg_y] >= 0)
3576 reg_y = reg_renumber[reg_y];
3577 }
3578
3579 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
3580 }
3581
3582 /* Now we have disposed of all the cases
3583 in which different rtx codes can match. */
3584 if (code != GET_CODE (y))
3585 return 0;
3586
3587 switch (code)
3588 {
3589 case PC:
3590 case CC0:
3591 case ADDR_VEC:
3592 case ADDR_DIFF_VEC:
3593 return 0;
3594
3595 case CONST_INT:
3596 return INTVAL (x) == INTVAL (y);
3597
3598 case LABEL_REF:
3599 /* We can't assume nonlocal labels have their following insns yet. */
3600 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3601 return XEXP (x, 0) == XEXP (y, 0);
3602
3603 /* Two label-refs are equivalent if they point at labels
3604 in the same position in the instruction stream. */
3605 return (next_real_insn (XEXP (x, 0))
3606 == next_real_insn (XEXP (y, 0)));
3607
3608 case SYMBOL_REF:
3609 return XSTR (x, 0) == XSTR (y, 0);
3610
3611 case CODE_LABEL:
3612 /* If we didn't match EQ equality above, they aren't the same. */
3613 return 0;
3614
3615 default:
3616 break;
3617 }
3618
3619 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3620
3621 if (GET_MODE (x) != GET_MODE (y))
3622 return 0;
3623
3624 /* For commutative operations, the RTX match if the operand match in any
3625 order. Also handle the simple binary and unary cases without a loop.
3626
3627 ??? Don't consider PLUS a commutative operator; see comments above. */
3628 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3629 && code != PLUS)
3630 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3631 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3632 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3633 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3634 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3635 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3636 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3637 else if (GET_RTX_CLASS (code) == '1')
3638 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3639
3640 /* Compare the elements. If any pair of corresponding elements
3641 fail to match, return 0 for the whole things. */
3642
3643 fmt = GET_RTX_FORMAT (code);
3644 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3645 {
3646 register int j;
3647 switch (fmt[i])
3648 {
3649 case 'w':
3650 if (XWINT (x, i) != XWINT (y, i))
3651 return 0;
3652 break;
3653
3654 case 'i':
3655 if (XINT (x, i) != XINT (y, i))
3656 return 0;
3657 break;
3658
3659 case 's':
3660 if (strcmp (XSTR (x, i), XSTR (y, i)))
3661 return 0;
3662 break;
3663
3664 case 'e':
3665 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3666 return 0;
3667 break;
3668
3669 case 'u':
3670 if (XEXP (x, i) != XEXP (y, i))
3671 return 0;
3672 /* fall through. */
3673 case '0':
3674 break;
3675
3676 case 'E':
3677 if (XVECLEN (x, i) != XVECLEN (y, i))
3678 return 0;
3679 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3680 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3681 return 0;
3682 break;
3683
3684 default:
3685 abort ();
3686 }
3687 }
3688 return 1;
3689 }
3690 \f
3691 /* If X is a hard register or equivalent to one or a subregister of one,
3692 return the hard register number. If X is a pseudo register that was not
3693 assigned a hard register, return the pseudo register number. Otherwise,
3694 return -1. Any rtx is valid for X. */
3695
3696 int
3697 true_regnum (x)
3698 rtx x;
3699 {
3700 if (GET_CODE (x) == REG)
3701 {
3702 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3703 return reg_renumber[REGNO (x)];
3704 return REGNO (x);
3705 }
3706 if (GET_CODE (x) == SUBREG)
3707 {
3708 int base = true_regnum (SUBREG_REG (x));
3709 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3710 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
3711 GET_MODE (SUBREG_REG (x)),
3712 SUBREG_BYTE (x), GET_MODE (x));
3713 }
3714 return -1;
3715 }
3716 \f
3717 /* Optimize code of the form:
3718
3719 for (x = a[i]; x; ...)
3720 ...
3721 for (x = a[i]; x; ...)
3722 ...
3723 foo:
3724
3725 Loop optimize will change the above code into
3726
3727 if (x = a[i])
3728 for (;;)
3729 { ...; if (! (x = ...)) break; }
3730 if (x = a[i])
3731 for (;;)
3732 { ...; if (! (x = ...)) break; }
3733 foo:
3734
3735 In general, if the first test fails, the program can branch
3736 directly to `foo' and skip the second try which is doomed to fail.
3737 We run this after loop optimization and before flow analysis. */
3738
3739 /* When comparing the insn patterns, we track the fact that different
3740 pseudo-register numbers may have been used in each computation.
3741 The following array stores an equivalence -- same_regs[I] == J means
3742 that pseudo register I was used in the first set of tests in a context
3743 where J was used in the second set. We also count the number of such
3744 pending equivalences. If nonzero, the expressions really aren't the
3745 same. */
3746
3747 static int *same_regs;
3748
3749 static int num_same_regs;
3750
3751 /* Track any registers modified between the target of the first jump and
3752 the second jump. They never compare equal. */
3753
3754 static char *modified_regs;
3755
3756 /* Record if memory was modified. */
3757
3758 static int modified_mem;
3759
3760 /* Called via note_stores on each insn between the target of the first
3761 branch and the second branch. It marks any changed registers. */
3762
3763 static void
3764 mark_modified_reg (dest, x, data)
3765 rtx dest;
3766 rtx x;
3767 void *data ATTRIBUTE_UNUSED;
3768 {
3769 int regno;
3770 unsigned int i;
3771
3772 if (GET_CODE (dest) == SUBREG)
3773 dest = SUBREG_REG (dest);
3774
3775 if (GET_CODE (dest) == MEM)
3776 modified_mem = 1;
3777
3778 if (GET_CODE (dest) != REG)
3779 return;
3780
3781 regno = REGNO (dest);
3782 if (regno >= FIRST_PSEUDO_REGISTER)
3783 modified_regs[regno] = 1;
3784 /* Don't consider a hard condition code register as modified,
3785 if it is only being set. thread_jumps will check if it is set
3786 to the same value. */
3787 else if (GET_MODE_CLASS (GET_MODE (dest)) != MODE_CC
3788 || GET_CODE (x) != SET
3789 || ! rtx_equal_p (dest, SET_DEST (x))
3790 || HARD_REGNO_NREGS (regno, GET_MODE (dest)) != 1)
3791 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3792 modified_regs[regno + i] = 1;
3793 }
3794
3795 /* F is the first insn in the chain of insns. */
3796
3797 void
3798 thread_jumps (f, max_reg, flag_before_loop)
3799 rtx f;
3800 int max_reg;
3801 int flag_before_loop;
3802 {
3803 /* Basic algorithm is to find a conditional branch,
3804 the label it may branch to, and the branch after
3805 that label. If the two branches test the same condition,
3806 walk back from both branch paths until the insn patterns
3807 differ, or code labels are hit. If we make it back to
3808 the target of the first branch, then we know that the first branch
3809 will either always succeed or always fail depending on the relative
3810 senses of the two branches. So adjust the first branch accordingly
3811 in this case. */
3812
3813 rtx label, b1, b2, t1, t2;
3814 enum rtx_code code1, code2;
3815 rtx b1op0, b1op1, b2op0, b2op1;
3816 int changed = 1;
3817 int i;
3818 int *all_reset;
3819 enum rtx_code reversed_code1, reversed_code2;
3820
3821 /* Allocate register tables and quick-reset table. */
3822 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3823 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3824 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3825 for (i = 0; i < max_reg; i++)
3826 all_reset[i] = -1;
3827
3828 while (changed)
3829 {
3830 changed = 0;
3831
3832 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3833 {
3834 rtx set;
3835 rtx set2;
3836
3837 /* Get to a candidate branch insn. */
3838 if (GET_CODE (b1) != JUMP_INSN
3839 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3840 continue;
3841
3842 memset (modified_regs, 0, max_reg * sizeof (char));
3843 modified_mem = 0;
3844
3845 memcpy (same_regs, all_reset, max_reg * sizeof (int));
3846 num_same_regs = 0;
3847
3848 label = JUMP_LABEL (b1);
3849
3850 /* Look for a branch after the target. Record any registers and
3851 memory modified between the target and the branch. Stop when we
3852 get to a label since we can't know what was changed there. */
3853 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3854 {
3855 if (GET_CODE (b2) == CODE_LABEL)
3856 break;
3857
3858 else if (GET_CODE (b2) == JUMP_INSN)
3859 {
3860 /* If this is an unconditional jump and is the only use of
3861 its target label, we can follow it. */
3862 if (any_uncondjump_p (b2)
3863 && onlyjump_p (b2)
3864 && JUMP_LABEL (b2) != 0
3865 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3866 {
3867 b2 = JUMP_LABEL (b2);
3868 continue;
3869 }
3870 else
3871 break;
3872 }
3873
3874 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3875 continue;
3876
3877 if (GET_CODE (b2) == CALL_INSN)
3878 {
3879 modified_mem = 1;
3880 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3881 if (call_used_regs[i] && ! fixed_regs[i]
3882 && i != STACK_POINTER_REGNUM
3883 && i != FRAME_POINTER_REGNUM
3884 && i != HARD_FRAME_POINTER_REGNUM
3885 && i != ARG_POINTER_REGNUM)
3886 modified_regs[i] = 1;
3887 }
3888
3889 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3890 }
3891
3892 /* Check the next candidate branch insn from the label
3893 of the first. */
3894 if (b2 == 0
3895 || GET_CODE (b2) != JUMP_INSN
3896 || b2 == b1
3897 || !any_condjump_p (b2)
3898 || !onlyjump_p (b2))
3899 continue;
3900 set = pc_set (b1);
3901 set2 = pc_set (b2);
3902
3903 /* Get the comparison codes and operands, reversing the
3904 codes if appropriate. If we don't have comparison codes,
3905 we can't do anything. */
3906 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3907 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3908 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3909 reversed_code1 = code1;
3910 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3911 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3912 else
3913 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3914
3915 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3916 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3917 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3918 reversed_code2 = code2;
3919 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3920 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3921 else
3922 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3923
3924 /* If they test the same things and knowing that B1 branches
3925 tells us whether or not B2 branches, check if we
3926 can thread the branch. */
3927 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3928 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3929 && (comparison_dominates_p (code1, code2)
3930 || comparison_dominates_p (code1, reversed_code2)))
3931
3932 {
3933 t1 = prev_nonnote_insn (b1);
3934 t2 = prev_nonnote_insn (b2);
3935
3936 while (t1 != 0 && t2 != 0)
3937 {
3938 if (t2 == label)
3939 {
3940 /* We have reached the target of the first branch.
3941 If there are no pending register equivalents,
3942 we know that this branch will either always
3943 succeed (if the senses of the two branches are
3944 the same) or always fail (if not). */
3945 rtx new_label;
3946
3947 if (num_same_regs != 0)
3948 break;
3949
3950 if (comparison_dominates_p (code1, code2))
3951 new_label = JUMP_LABEL (b2);
3952 else
3953 new_label = get_label_after (b2);
3954
3955 if (JUMP_LABEL (b1) != new_label)
3956 {
3957 rtx prev = PREV_INSN (new_label);
3958
3959 if (flag_before_loop
3960 && GET_CODE (prev) == NOTE
3961 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3962 {
3963 /* Don't thread to the loop label. If a loop
3964 label is reused, loop optimization will
3965 be disabled for that loop. */
3966 new_label = gen_label_rtx ();
3967 emit_label_after (new_label, PREV_INSN (prev));
3968 }
3969 changed |= redirect_jump (b1, new_label, 1);
3970 }
3971 break;
3972 }
3973
3974 /* If either of these is not a normal insn (it might be
3975 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
3976 have already been skipped above.) Similarly, fail
3977 if the insns are different. */
3978 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
3979 || recog_memoized (t1) != recog_memoized (t2)
3980 || ! rtx_equal_for_thread_p (PATTERN (t1),
3981 PATTERN (t2), t2))
3982 break;
3983
3984 t1 = prev_nonnote_insn (t1);
3985 t2 = prev_nonnote_insn (t2);
3986 }
3987 }
3988 }
3989 }
3990
3991 /* Clean up. */
3992 free (modified_regs);
3993 free (same_regs);
3994 free (all_reset);
3995 }
3996 \f
3997 /* This is like RTX_EQUAL_P except that it knows about our handling of
3998 possibly equivalent registers and knows to consider volatile and
3999 modified objects as not equal.
4000
4001 YINSN is the insn containing Y. */
4002
4003 int
4004 rtx_equal_for_thread_p (x, y, yinsn)
4005 rtx x, y;
4006 rtx yinsn;
4007 {
4008 register int i;
4009 register int j;
4010 register enum rtx_code code;
4011 register const char *fmt;
4012
4013 code = GET_CODE (x);
4014 /* Rtx's of different codes cannot be equal. */
4015 if (code != GET_CODE (y))
4016 return 0;
4017
4018 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4019 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4020
4021 if (GET_MODE (x) != GET_MODE (y))
4022 return 0;
4023
4024 /* For floating-point, consider everything unequal. This is a bit
4025 pessimistic, but this pass would only rarely do anything for FP
4026 anyway. */
4027 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4028 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
4029 return 0;
4030
4031 /* For commutative operations, the RTX match if the operand match in any
4032 order. Also handle the simple binary and unary cases without a loop. */
4033 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4034 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4035 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4036 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4037 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4038 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4039 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4040 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4041 else if (GET_RTX_CLASS (code) == '1')
4042 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4043
4044 /* Handle special-cases first. */
4045 switch (code)
4046 {
4047 case REG:
4048 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4049 return 1;
4050
4051 /* If neither is user variable or hard register, check for possible
4052 equivalence. */
4053 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4054 || REGNO (x) < FIRST_PSEUDO_REGISTER
4055 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4056 return 0;
4057
4058 if (same_regs[REGNO (x)] == -1)
4059 {
4060 same_regs[REGNO (x)] = REGNO (y);
4061 num_same_regs++;
4062
4063 /* If this is the first time we are seeing a register on the `Y'
4064 side, see if it is the last use. If not, we can't thread the
4065 jump, so mark it as not equivalent. */
4066 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4067 return 0;
4068
4069 return 1;
4070 }
4071 else
4072 return (same_regs[REGNO (x)] == (int) REGNO (y));
4073
4074 break;
4075
4076 case MEM:
4077 /* If memory modified or either volatile, not equivalent.
4078 Else, check address. */
4079 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4080 return 0;
4081
4082 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4083
4084 case ASM_INPUT:
4085 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4086 return 0;
4087
4088 break;
4089
4090 case SET:
4091 /* Cancel a pending `same_regs' if setting equivalenced registers.
4092 Then process source. */
4093 if (GET_CODE (SET_DEST (x)) == REG
4094 && GET_CODE (SET_DEST (y)) == REG)
4095 {
4096 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4097 {
4098 same_regs[REGNO (SET_DEST (x))] = -1;
4099 num_same_regs--;
4100 }
4101 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4102 return 0;
4103 }
4104 else
4105 {
4106 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4107 return 0;
4108 }
4109
4110 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4111
4112 case LABEL_REF:
4113 return XEXP (x, 0) == XEXP (y, 0);
4114
4115 case SYMBOL_REF:
4116 return XSTR (x, 0) == XSTR (y, 0);
4117
4118 default:
4119 break;
4120 }
4121
4122 if (x == y)
4123 return 1;
4124
4125 fmt = GET_RTX_FORMAT (code);
4126 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4127 {
4128 switch (fmt[i])
4129 {
4130 case 'w':
4131 if (XWINT (x, i) != XWINT (y, i))
4132 return 0;
4133 break;
4134
4135 case 'n':
4136 case 'i':
4137 if (XINT (x, i) != XINT (y, i))
4138 return 0;
4139 break;
4140
4141 case 'V':
4142 case 'E':
4143 /* Two vectors must have the same length. */
4144 if (XVECLEN (x, i) != XVECLEN (y, i))
4145 return 0;
4146
4147 /* And the corresponding elements must match. */
4148 for (j = 0; j < XVECLEN (x, i); j++)
4149 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4150 XVECEXP (y, i, j), yinsn) == 0)
4151 return 0;
4152 break;
4153
4154 case 'e':
4155 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4156 return 0;
4157 break;
4158
4159 case 'S':
4160 case 's':
4161 if (strcmp (XSTR (x, i), XSTR (y, i)))
4162 return 0;
4163 break;
4164
4165 case 'u':
4166 /* These are just backpointers, so they don't matter. */
4167 break;
4168
4169 case '0':
4170 case 't':
4171 break;
4172
4173 /* It is believed that rtx's at this level will never
4174 contain anything but integers and other rtx's,
4175 except for within LABEL_REFs and SYMBOL_REFs. */
4176 default:
4177 abort ();
4178 }
4179 }
4180 return 1;
4181 }