alias.c (mark_constant_function): Use INSN_P.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
70
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
83
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
91
92 static rtx *jump_chain;
93
94 /* Maximum index in jump_chain. */
95
96 static int max_jump_chain;
97
98 /* Set nonzero by jump_optimize if control can fall through
99 to the end of the function. */
100 int can_reach_end;
101
102 /* Indicates whether death notes are significant in cross jump analysis.
103 Normally they are not significant, because of A and B jump to C,
104 and R dies in A, it must die in B. But this might not be true after
105 stack register conversion, and we must compare death notes in that
106 case. */
107
108 static int cross_jump_death_matters = 0;
109
110 static int init_label_info PARAMS ((rtx));
111 static void delete_barrier_successors PARAMS ((rtx));
112 static void mark_all_labels PARAMS ((rtx, int));
113 static rtx delete_unreferenced_labels PARAMS ((rtx));
114 static void delete_noop_moves PARAMS ((rtx));
115 static int calculate_can_reach_end PARAMS ((rtx, int));
116 static int duplicate_loop_exit_test PARAMS ((rtx));
117 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
118 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
119 static int jump_back_p PARAMS ((rtx, rtx));
120 static int tension_vector_labels PARAMS ((rtx, int));
121 static void mark_jump_label PARAMS ((rtx, rtx, int, int));
122 static void delete_computation PARAMS ((rtx));
123 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
124 static int redirect_exp PARAMS ((rtx, rtx, rtx));
125 static void invert_exp_1 PARAMS ((rtx));
126 static int invert_exp PARAMS ((rtx));
127 static void delete_from_jump_chain PARAMS ((rtx));
128 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
129 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
130 static void redirect_tablejump PARAMS ((rtx, rtx));
131 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
132 static int returnjump_p_1 PARAMS ((rtx *, void *));
133 static void delete_prior_computation PARAMS ((rtx, rtx));
134 \f
135 /* Main external entry point into the jump optimizer. See comments before
136 jump_optimize_1 for descriptions of the arguments. */
137 void
138 jump_optimize (f, cross_jump, noop_moves, after_regscan)
139 rtx f;
140 int cross_jump;
141 int noop_moves;
142 int after_regscan;
143 {
144 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
145 }
146
147 /* Alternate entry into the jump optimizer. This entry point only rebuilds
148 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
149 instructions. */
150 void
151 rebuild_jump_labels (f)
152 rtx f;
153 {
154 jump_optimize_1 (f, 0, 0, 0, 1, 0);
155 }
156
157 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
158
159 void
160 jump_optimize_minimal (f)
161 rtx f;
162 {
163 jump_optimize_1 (f, 0, 0, 0, 0, 1);
164 }
165 \f
166 /* Delete no-op jumps and optimize jumps to jumps
167 and jumps around jumps.
168 Delete unused labels and unreachable code.
169
170 If CROSS_JUMP is 1, detect matching code
171 before a jump and its destination and unify them.
172 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
173
174 If NOOP_MOVES is nonzero, delete no-op move insns.
175
176 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
177 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
178
179 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
180 and JUMP_LABEL field for jumping insns.
181
182 If `optimize' is zero, don't change any code,
183 just determine whether control drops off the end of the function.
184 This case occurs when we have -W and not -O.
185 It works because `delete_insn' checks the value of `optimize'
186 and refrains from actually deleting when that is 0.
187
188 If MINIMAL is nonzero, then we only perform trivial optimizations:
189
190 * Removal of unreachable code after BARRIERs.
191 * Removal of unreferenced CODE_LABELs.
192 * Removal of a jump to the next instruction.
193 * Removal of a conditional jump followed by an unconditional jump
194 to the same target as the conditional jump.
195 * Simplify a conditional jump around an unconditional jump.
196 * Simplify a jump to a jump.
197 * Delete extraneous line number notes.
198 */
199
200 static void
201 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
202 mark_labels_only, minimal)
203 rtx f;
204 int cross_jump;
205 int noop_moves;
206 int after_regscan;
207 int mark_labels_only;
208 int minimal;
209 {
210 register rtx insn, next;
211 int changed;
212 int old_max_reg;
213 int first = 1;
214 int max_uid = 0;
215 rtx last_insn;
216
217 cross_jump_death_matters = (cross_jump == 2);
218 max_uid = init_label_info (f) + 1;
219
220 /* If we are performing cross jump optimizations, then initialize
221 tables mapping UIDs to EH regions to avoid incorrect movement
222 of insns from one EH region to another. */
223 if (flag_exceptions && cross_jump)
224 init_insn_eh_region (f, max_uid);
225
226 if (! mark_labels_only)
227 delete_barrier_successors (f);
228
229 /* Leave some extra room for labels and duplicate exit test insns
230 we make. */
231 max_jump_chain = max_uid * 14 / 10;
232 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
233
234 mark_all_labels (f, cross_jump);
235
236 /* Keep track of labels used from static data; we don't track them
237 closely enough to delete them here, so make sure their reference
238 count doesn't drop to zero. */
239
240 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
241 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
242 LABEL_NUSES (XEXP (insn, 0))++;
243
244 check_exception_handler_labels ();
245
246 /* Keep track of labels used for marking handlers for exception
247 regions; they cannot usually be deleted. */
248
249 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
250 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
251 LABEL_NUSES (XEXP (insn, 0))++;
252
253 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
254 notes and recompute LABEL_NUSES. */
255 if (mark_labels_only)
256 goto end;
257
258 if (! minimal)
259 exception_optimize ();
260
261 last_insn = delete_unreferenced_labels (f);
262
263 if (noop_moves)
264 delete_noop_moves (f);
265
266 /* If we haven't yet gotten to reload and we have just run regscan,
267 delete any insn that sets a register that isn't used elsewhere.
268 This helps some of the optimizations below by having less insns
269 being jumped around. */
270
271 if (optimize && ! reload_completed && after_regscan)
272 for (insn = f; insn; insn = next)
273 {
274 rtx set = single_set (insn);
275
276 next = NEXT_INSN (insn);
277
278 if (set && GET_CODE (SET_DEST (set)) == REG
279 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
280 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
281 /* We use regno_last_note_uid so as not to delete the setting
282 of a reg that's used in notes. A subsequent optimization
283 might arrange to use that reg for real. */
284 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
285 && ! side_effects_p (SET_SRC (set))
286 && ! find_reg_note (insn, REG_RETVAL, 0)
287 /* An ADDRESSOF expression can turn into a use of the internal arg
288 pointer, so do not delete the initialization of the internal
289 arg pointer yet. If it is truly dead, flow will delete the
290 initializing insn. */
291 && SET_DEST (set) != current_function_internal_arg_pointer)
292 delete_insn (insn);
293 }
294
295 /* Now iterate optimizing jumps until nothing changes over one pass. */
296 changed = 1;
297 old_max_reg = max_reg_num ();
298 while (changed)
299 {
300 changed = 0;
301
302 for (insn = f; insn; insn = next)
303 {
304 rtx reallabelprev;
305 rtx temp, temp1, temp2 = NULL_RTX;
306 rtx temp4 ATTRIBUTE_UNUSED;
307 rtx nlabel;
308 int this_is_any_uncondjump;
309 int this_is_any_condjump;
310 int this_is_onlyjump;
311
312 next = NEXT_INSN (insn);
313
314 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
315 jump. Try to optimize by duplicating the loop exit test if so.
316 This is only safe immediately after regscan, because it uses
317 the values of regno_first_uid and regno_last_uid. */
318 if (after_regscan && GET_CODE (insn) == NOTE
319 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
320 && (temp1 = next_nonnote_insn (insn)) != 0
321 && any_uncondjump_p (temp1)
322 && onlyjump_p (temp1))
323 {
324 temp = PREV_INSN (insn);
325 if (duplicate_loop_exit_test (insn))
326 {
327 changed = 1;
328 next = NEXT_INSN (temp);
329 continue;
330 }
331 }
332
333 if (GET_CODE (insn) != JUMP_INSN)
334 continue;
335
336 this_is_any_condjump = any_condjump_p (insn);
337 this_is_any_uncondjump = any_uncondjump_p (insn);
338 this_is_onlyjump = onlyjump_p (insn);
339
340 /* Tension the labels in dispatch tables. */
341
342 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
343 changed |= tension_vector_labels (PATTERN (insn), 0);
344 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
345 changed |= tension_vector_labels (PATTERN (insn), 1);
346
347 /* See if this jump goes to another jump and redirect if so. */
348 nlabel = follow_jumps (JUMP_LABEL (insn));
349 if (nlabel != JUMP_LABEL (insn))
350 changed |= redirect_jump (insn, nlabel, 1);
351
352 if (! optimize || minimal)
353 continue;
354
355 /* If a dispatch table always goes to the same place,
356 get rid of it and replace the insn that uses it. */
357
358 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
359 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
360 {
361 int i;
362 rtx pat = PATTERN (insn);
363 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
364 int len = XVECLEN (pat, diff_vec_p);
365 rtx dispatch = prev_real_insn (insn);
366 rtx set;
367
368 for (i = 0; i < len; i++)
369 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
370 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
371 break;
372
373 if (i == len
374 && dispatch != 0
375 && GET_CODE (dispatch) == JUMP_INSN
376 && JUMP_LABEL (dispatch) != 0
377 /* Don't mess with a casesi insn.
378 XXX according to the comment before computed_jump_p(),
379 all casesi insns should be a parallel of the jump
380 and a USE of a LABEL_REF. */
381 && ! ((set = single_set (dispatch)) != NULL
382 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
383 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
384 {
385 redirect_tablejump (dispatch,
386 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
387 changed = 1;
388 }
389 }
390
391 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
392
393 /* Detect jump to following insn. */
394 if (reallabelprev == insn
395 && (this_is_any_condjump || this_is_any_uncondjump)
396 && this_is_onlyjump)
397 {
398 next = next_real_insn (JUMP_LABEL (insn));
399 delete_jump (insn);
400
401 /* Remove the "inactive" but "real" insns (i.e. uses and
402 clobbers) in between here and there. */
403 temp = insn;
404 while ((temp = next_real_insn (temp)) != next)
405 delete_insn (temp);
406
407 changed = 1;
408 continue;
409 }
410
411 /* Detect a conditional jump going to the same place
412 as an immediately following unconditional jump. */
413 else if (this_is_any_condjump && this_is_onlyjump
414 && (temp = next_active_insn (insn)) != 0
415 && simplejump_p (temp)
416 && (next_active_insn (JUMP_LABEL (insn))
417 == next_active_insn (JUMP_LABEL (temp))))
418 {
419 /* Don't mess up test coverage analysis. */
420 temp2 = temp;
421 if (flag_test_coverage && !reload_completed)
422 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
423 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
424 break;
425
426 if (temp2 == temp)
427 {
428 delete_jump (insn);
429 changed = 1;
430 continue;
431 }
432 }
433
434 /* Detect a conditional jump jumping over an unconditional jump. */
435
436 else if (this_is_any_condjump
437 && reallabelprev != 0
438 && GET_CODE (reallabelprev) == JUMP_INSN
439 && prev_active_insn (reallabelprev) == insn
440 && no_labels_between_p (insn, reallabelprev)
441 && any_uncondjump_p (reallabelprev)
442 && onlyjump_p (reallabelprev))
443 {
444 /* When we invert the unconditional jump, we will be
445 decrementing the usage count of its old label.
446 Make sure that we don't delete it now because that
447 might cause the following code to be deleted. */
448 rtx prev_uses = prev_nonnote_insn (reallabelprev);
449 rtx prev_label = JUMP_LABEL (insn);
450
451 if (prev_label)
452 ++LABEL_NUSES (prev_label);
453
454 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
455 {
456 /* It is very likely that if there are USE insns before
457 this jump, they hold REG_DEAD notes. These REG_DEAD
458 notes are no longer valid due to this optimization,
459 and will cause the life-analysis that following passes
460 (notably delayed-branch scheduling) to think that
461 these registers are dead when they are not.
462
463 To prevent this trouble, we just remove the USE insns
464 from the insn chain. */
465
466 while (prev_uses && GET_CODE (prev_uses) == INSN
467 && GET_CODE (PATTERN (prev_uses)) == USE)
468 {
469 rtx useless = prev_uses;
470 prev_uses = prev_nonnote_insn (prev_uses);
471 delete_insn (useless);
472 }
473
474 delete_insn (reallabelprev);
475 changed = 1;
476 }
477
478 /* We can now safely delete the label if it is unreferenced
479 since the delete_insn above has deleted the BARRIER. */
480 if (prev_label && --LABEL_NUSES (prev_label) == 0)
481 delete_insn (prev_label);
482
483 next = NEXT_INSN (insn);
484 }
485
486 /* If we have an unconditional jump preceded by a USE, try to put
487 the USE before the target and jump there. This simplifies many
488 of the optimizations below since we don't have to worry about
489 dealing with these USE insns. We only do this if the label
490 being branch to already has the identical USE or if code
491 never falls through to that label. */
492
493 else if (this_is_any_uncondjump
494 && (temp = prev_nonnote_insn (insn)) != 0
495 && GET_CODE (temp) == INSN
496 && GET_CODE (PATTERN (temp)) == USE
497 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
498 && (GET_CODE (temp1) == BARRIER
499 || (GET_CODE (temp1) == INSN
500 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
501 /* Don't do this optimization if we have a loop containing
502 only the USE instruction, and the loop start label has
503 a usage count of 1. This is because we will redo this
504 optimization everytime through the outer loop, and jump
505 opt will never exit. */
506 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
507 && temp2 == JUMP_LABEL (insn)
508 && LABEL_NUSES (temp2) == 1))
509 {
510 if (GET_CODE (temp1) == BARRIER)
511 {
512 emit_insn_after (PATTERN (temp), temp1);
513 temp1 = NEXT_INSN (temp1);
514 }
515
516 delete_insn (temp);
517 redirect_jump (insn, get_label_before (temp1), 1);
518 reallabelprev = prev_real_insn (temp1);
519 changed = 1;
520 next = NEXT_INSN (insn);
521 }
522
523 #ifdef HAVE_trap
524 /* Detect a conditional jump jumping over an unconditional trap. */
525 if (HAVE_trap
526 && this_is_any_condjump && this_is_onlyjump
527 && reallabelprev != 0
528 && GET_CODE (reallabelprev) == INSN
529 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
530 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
531 && prev_active_insn (reallabelprev) == insn
532 && no_labels_between_p (insn, reallabelprev)
533 && (temp2 = get_condition (insn, &temp4))
534 && can_reverse_comparison_p (temp2, insn))
535 {
536 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
537 XEXP (temp2, 0), XEXP (temp2, 1),
538 TRAP_CODE (PATTERN (reallabelprev)));
539
540 if (new)
541 {
542 emit_insn_before (new, temp4);
543 delete_insn (reallabelprev);
544 delete_jump (insn);
545 changed = 1;
546 continue;
547 }
548 }
549 /* Detect a jump jumping to an unconditional trap. */
550 else if (HAVE_trap && this_is_onlyjump
551 && (temp = next_active_insn (JUMP_LABEL (insn)))
552 && GET_CODE (temp) == INSN
553 && GET_CODE (PATTERN (temp)) == TRAP_IF
554 && (this_is_any_uncondjump
555 || (this_is_any_condjump
556 && (temp2 = get_condition (insn, &temp4)))))
557 {
558 rtx tc = TRAP_CONDITION (PATTERN (temp));
559
560 if (tc == const_true_rtx
561 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
562 {
563 rtx new;
564 /* Replace an unconditional jump to a trap with a trap. */
565 if (this_is_any_uncondjump)
566 {
567 emit_barrier_after (emit_insn_before (gen_trap (), insn));
568 delete_jump (insn);
569 changed = 1;
570 continue;
571 }
572 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
573 XEXP (temp2, 1),
574 TRAP_CODE (PATTERN (temp)));
575 if (new)
576 {
577 emit_insn_before (new, temp4);
578 delete_jump (insn);
579 changed = 1;
580 continue;
581 }
582 }
583 /* If the trap condition and jump condition are mutually
584 exclusive, redirect the jump to the following insn. */
585 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
586 && this_is_any_condjump
587 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
588 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
589 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
590 && redirect_jump (insn, get_label_after (temp), 1))
591 {
592 changed = 1;
593 continue;
594 }
595 }
596 #endif
597 else
598 {
599 /* Now that the jump has been tensioned,
600 try cross jumping: check for identical code
601 before the jump and before its target label. */
602
603 /* First, cross jumping of conditional jumps: */
604
605 if (cross_jump && condjump_p (insn))
606 {
607 rtx newjpos, newlpos;
608 rtx x = prev_real_insn (JUMP_LABEL (insn));
609
610 /* A conditional jump may be crossjumped
611 only if the place it jumps to follows
612 an opposing jump that comes back here. */
613
614 if (x != 0 && ! jump_back_p (x, insn))
615 /* We have no opposing jump;
616 cannot cross jump this insn. */
617 x = 0;
618
619 newjpos = 0;
620 /* TARGET is nonzero if it is ok to cross jump
621 to code before TARGET. If so, see if matches. */
622 if (x != 0)
623 find_cross_jump (insn, x, 2,
624 &newjpos, &newlpos);
625
626 if (newjpos != 0)
627 {
628 do_cross_jump (insn, newjpos, newlpos);
629 /* Make the old conditional jump
630 into an unconditional one. */
631 SET_SRC (PATTERN (insn))
632 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
633 INSN_CODE (insn) = -1;
634 emit_barrier_after (insn);
635 /* Add to jump_chain unless this is a new label
636 whose UID is too large. */
637 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
638 {
639 jump_chain[INSN_UID (insn)]
640 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
641 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
642 }
643 changed = 1;
644 next = insn;
645 }
646 }
647
648 /* Cross jumping of unconditional jumps:
649 a few differences. */
650
651 if (cross_jump && simplejump_p (insn))
652 {
653 rtx newjpos, newlpos;
654 rtx target;
655
656 newjpos = 0;
657
658 /* TARGET is nonzero if it is ok to cross jump
659 to code before TARGET. If so, see if matches. */
660 find_cross_jump (insn, JUMP_LABEL (insn), 1,
661 &newjpos, &newlpos);
662
663 /* If cannot cross jump to code before the label,
664 see if we can cross jump to another jump to
665 the same label. */
666 /* Try each other jump to this label. */
667 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
668 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
669 target != 0 && newjpos == 0;
670 target = jump_chain[INSN_UID (target)])
671 if (target != insn
672 && JUMP_LABEL (target) == JUMP_LABEL (insn)
673 /* Ignore TARGET if it's deleted. */
674 && ! INSN_DELETED_P (target))
675 find_cross_jump (insn, target, 2,
676 &newjpos, &newlpos);
677
678 if (newjpos != 0)
679 {
680 do_cross_jump (insn, newjpos, newlpos);
681 changed = 1;
682 next = insn;
683 }
684 }
685
686 /* This code was dead in the previous jump.c! */
687 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
688 {
689 /* Return insns all "jump to the same place"
690 so we can cross-jump between any two of them. */
691
692 rtx newjpos, newlpos, target;
693
694 newjpos = 0;
695
696 /* If cannot cross jump to code before the label,
697 see if we can cross jump to another jump to
698 the same label. */
699 /* Try each other jump to this label. */
700 for (target = jump_chain[0];
701 target != 0 && newjpos == 0;
702 target = jump_chain[INSN_UID (target)])
703 if (target != insn
704 && ! INSN_DELETED_P (target)
705 && GET_CODE (PATTERN (target)) == RETURN)
706 find_cross_jump (insn, target, 2,
707 &newjpos, &newlpos);
708
709 if (newjpos != 0)
710 {
711 do_cross_jump (insn, newjpos, newlpos);
712 changed = 1;
713 next = insn;
714 }
715 }
716 }
717 }
718
719 first = 0;
720 }
721
722 /* Delete extraneous line number notes.
723 Note that two consecutive notes for different lines are not really
724 extraneous. There should be some indication where that line belonged,
725 even if it became empty. */
726
727 {
728 rtx last_note = 0;
729
730 for (insn = f; insn; insn = NEXT_INSN (insn))
731 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
732 {
733 /* Delete this note if it is identical to previous note. */
734 if (last_note
735 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
736 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
737 {
738 delete_insn (insn);
739 continue;
740 }
741
742 last_note = insn;
743 }
744 }
745
746 /* CAN_REACH_END is persistent for each function. Once set it should
747 not be cleared. This is especially true for the case where we
748 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
749 the front-end before compiling each function. */
750 if (! minimal && calculate_can_reach_end (last_insn, optimize != 0))
751 can_reach_end = 1;
752
753 end:
754 /* Clean up. */
755 free (jump_chain);
756 jump_chain = 0;
757 }
758 \f
759 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
760 notes whose labels don't occur in the insn any more. Returns the
761 largest INSN_UID found. */
762 static int
763 init_label_info (f)
764 rtx f;
765 {
766 int largest_uid = 0;
767 rtx insn;
768
769 for (insn = f; insn; insn = NEXT_INSN (insn))
770 {
771 if (GET_CODE (insn) == CODE_LABEL)
772 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
773 else if (GET_CODE (insn) == JUMP_INSN)
774 JUMP_LABEL (insn) = 0;
775 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
776 {
777 rtx note, next;
778
779 for (note = REG_NOTES (insn); note; note = next)
780 {
781 next = XEXP (note, 1);
782 if (REG_NOTE_KIND (note) == REG_LABEL
783 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
784 remove_note (insn, note);
785 }
786 }
787 if (INSN_UID (insn) > largest_uid)
788 largest_uid = INSN_UID (insn);
789 }
790
791 return largest_uid;
792 }
793
794 /* Delete insns following barriers, up to next label.
795
796 Also delete no-op jumps created by gcse. */
797
798 static void
799 delete_barrier_successors (f)
800 rtx f;
801 {
802 rtx insn;
803 rtx set;
804
805 for (insn = f; insn;)
806 {
807 if (GET_CODE (insn) == BARRIER)
808 {
809 insn = NEXT_INSN (insn);
810
811 never_reached_warning (insn);
812
813 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
814 {
815 if (GET_CODE (insn) == NOTE
816 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
817 insn = NEXT_INSN (insn);
818 else
819 insn = delete_insn (insn);
820 }
821 /* INSN is now the code_label. */
822 }
823
824 /* Also remove (set (pc) (pc)) insns which can be created by
825 gcse. We eliminate such insns now to avoid having them
826 cause problems later. */
827 else if (GET_CODE (insn) == JUMP_INSN
828 && (set = pc_set (insn)) != NULL
829 && SET_SRC (set) == pc_rtx
830 && SET_DEST (set) == pc_rtx
831 && onlyjump_p (insn))
832 insn = delete_insn (insn);
833
834 else
835 insn = NEXT_INSN (insn);
836 }
837 }
838
839 /* Mark the label each jump jumps to.
840 Combine consecutive labels, and count uses of labels.
841
842 For each label, make a chain (using `jump_chain')
843 of all the *unconditional* jumps that jump to it;
844 also make a chain of all returns.
845
846 CROSS_JUMP indicates whether we are doing cross jumping
847 and if we are whether we will be paying attention to
848 death notes or not. */
849
850 static void
851 mark_all_labels (f, cross_jump)
852 rtx f;
853 int cross_jump;
854 {
855 rtx insn;
856
857 for (insn = f; insn; insn = NEXT_INSN (insn))
858 if (INSN_P (insn))
859 {
860 if (GET_CODE (insn) == CALL_INSN
861 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
862 {
863 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
864 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
865 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
866 continue;
867 }
868
869 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
870 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
871 {
872 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
873 {
874 jump_chain[INSN_UID (insn)]
875 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
876 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
877 }
878 if (GET_CODE (PATTERN (insn)) == RETURN)
879 {
880 jump_chain[INSN_UID (insn)] = jump_chain[0];
881 jump_chain[0] = insn;
882 }
883 }
884 }
885 }
886
887 /* Delete all labels already not referenced.
888 Also find and return the last insn. */
889
890 static rtx
891 delete_unreferenced_labels (f)
892 rtx f;
893 {
894 rtx final = NULL_RTX;
895 rtx insn;
896
897 for (insn = f; insn;)
898 {
899 if (GET_CODE (insn) == CODE_LABEL
900 && LABEL_NUSES (insn) == 0
901 && LABEL_ALTERNATE_NAME (insn) == NULL)
902 insn = delete_insn (insn);
903 else
904 {
905 final = insn;
906 insn = NEXT_INSN (insn);
907 }
908 }
909
910 return final;
911 }
912
913 /* Delete various simple forms of moves which have no necessary
914 side effect. */
915
916 static void
917 delete_noop_moves (f)
918 rtx f;
919 {
920 rtx insn, next;
921
922 for (insn = f; insn;)
923 {
924 next = NEXT_INSN (insn);
925
926 if (GET_CODE (insn) == INSN)
927 {
928 register rtx body = PATTERN (insn);
929
930 /* Detect and delete no-op move instructions
931 resulting from not allocating a parameter in a register. */
932
933 if (GET_CODE (body) == SET
934 && (SET_DEST (body) == SET_SRC (body)
935 || (GET_CODE (SET_DEST (body)) == MEM
936 && GET_CODE (SET_SRC (body)) == MEM
937 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
938 && ! (GET_CODE (SET_DEST (body)) == MEM
939 && MEM_VOLATILE_P (SET_DEST (body)))
940 && ! (GET_CODE (SET_SRC (body)) == MEM
941 && MEM_VOLATILE_P (SET_SRC (body))))
942 delete_computation (insn);
943
944 /* Detect and ignore no-op move instructions
945 resulting from smart or fortuitous register allocation. */
946
947 else if (GET_CODE (body) == SET)
948 {
949 int sreg = true_regnum (SET_SRC (body));
950 int dreg = true_regnum (SET_DEST (body));
951
952 if (sreg == dreg && sreg >= 0)
953 delete_insn (insn);
954 else if (sreg >= 0 && dreg >= 0)
955 {
956 rtx trial;
957 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
958 sreg, NULL_PTR, dreg,
959 GET_MODE (SET_SRC (body)));
960
961 if (tem != 0
962 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
963 {
964 /* DREG may have been the target of a REG_DEAD note in
965 the insn which makes INSN redundant. If so, reorg
966 would still think it is dead. So search for such a
967 note and delete it if we find it. */
968 if (! find_regno_note (insn, REG_UNUSED, dreg))
969 for (trial = prev_nonnote_insn (insn);
970 trial && GET_CODE (trial) != CODE_LABEL;
971 trial = prev_nonnote_insn (trial))
972 if (find_regno_note (trial, REG_DEAD, dreg))
973 {
974 remove_death (dreg, trial);
975 break;
976 }
977
978 /* Deleting insn could lose a death-note for SREG. */
979 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
980 {
981 /* Change this into a USE so that we won't emit
982 code for it, but still can keep the note. */
983 PATTERN (insn)
984 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
985 INSN_CODE (insn) = -1;
986 /* Remove all reg notes but the REG_DEAD one. */
987 REG_NOTES (insn) = trial;
988 XEXP (trial, 1) = NULL_RTX;
989 }
990 else
991 delete_insn (insn);
992 }
993 }
994 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
995 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
996 NULL_PTR, 0,
997 GET_MODE (SET_DEST (body))))
998 {
999 /* This handles the case where we have two consecutive
1000 assignments of the same constant to pseudos that didn't
1001 get a hard reg. Each SET from the constant will be
1002 converted into a SET of the spill register and an
1003 output reload will be made following it. This produces
1004 two loads of the same constant into the same spill
1005 register. */
1006
1007 rtx in_insn = insn;
1008
1009 /* Look back for a death note for the first reg.
1010 If there is one, it is no longer accurate. */
1011 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1012 {
1013 if ((GET_CODE (in_insn) == INSN
1014 || GET_CODE (in_insn) == JUMP_INSN)
1015 && find_regno_note (in_insn, REG_DEAD, dreg))
1016 {
1017 remove_death (dreg, in_insn);
1018 break;
1019 }
1020 in_insn = PREV_INSN (in_insn);
1021 }
1022
1023 /* Delete the second load of the value. */
1024 delete_insn (insn);
1025 }
1026 }
1027 else if (GET_CODE (body) == PARALLEL)
1028 {
1029 /* If each part is a set between two identical registers or
1030 a USE or CLOBBER, delete the insn. */
1031 int i, sreg, dreg;
1032 rtx tem;
1033
1034 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1035 {
1036 tem = XVECEXP (body, 0, i);
1037 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1038 continue;
1039
1040 if (GET_CODE (tem) != SET
1041 || (sreg = true_regnum (SET_SRC (tem))) < 0
1042 || (dreg = true_regnum (SET_DEST (tem))) < 0
1043 || dreg != sreg)
1044 break;
1045 }
1046
1047 if (i < 0)
1048 delete_insn (insn);
1049 }
1050 /* Also delete insns to store bit fields if they are no-ops. */
1051 /* Not worth the hair to detect this in the big-endian case. */
1052 else if (! BYTES_BIG_ENDIAN
1053 && GET_CODE (body) == SET
1054 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
1055 && XEXP (SET_DEST (body), 2) == const0_rtx
1056 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
1057 && ! (GET_CODE (SET_SRC (body)) == MEM
1058 && MEM_VOLATILE_P (SET_SRC (body))))
1059 delete_insn (insn);
1060 }
1061 insn = next;
1062 }
1063 }
1064
1065 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
1066 If so indicate that this function can drop off the end by returning
1067 1, else return 0.
1068
1069 CHECK_DELETED indicates whether we must check if the note being
1070 searched for has the deleted flag set.
1071
1072 DELETE_FINAL_NOTE indicates whether we should delete the note
1073 if we find it. */
1074
1075 static int
1076 calculate_can_reach_end (last, delete_final_note)
1077 rtx last;
1078 int delete_final_note;
1079 {
1080 rtx insn = last;
1081 int n_labels = 1;
1082
1083 while (insn != NULL_RTX)
1084 {
1085 int ok = 0;
1086
1087 /* One label can follow the end-note: the return label. */
1088 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
1089 ok = 1;
1090 /* Ordinary insns can follow it if returning a structure. */
1091 else if (GET_CODE (insn) == INSN)
1092 ok = 1;
1093 /* If machine uses explicit RETURN insns, no epilogue,
1094 then one of them follows the note. */
1095 else if (GET_CODE (insn) == JUMP_INSN
1096 && GET_CODE (PATTERN (insn)) == RETURN)
1097 ok = 1;
1098 /* A barrier can follow the return insn. */
1099 else if (GET_CODE (insn) == BARRIER)
1100 ok = 1;
1101 /* Other kinds of notes can follow also. */
1102 else if (GET_CODE (insn) == NOTE
1103 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
1104 ok = 1;
1105
1106 if (ok != 1)
1107 break;
1108
1109 insn = PREV_INSN (insn);
1110 }
1111
1112 /* See if we backed up to the appropriate type of note. */
1113 if (insn != NULL_RTX
1114 && GET_CODE (insn) == NOTE
1115 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
1116 {
1117 if (delete_final_note)
1118 delete_insn (insn);
1119 return 1;
1120 }
1121
1122 return 0;
1123 }
1124
1125 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1126 jump. Assume that this unconditional jump is to the exit test code. If
1127 the code is sufficiently simple, make a copy of it before INSN,
1128 followed by a jump to the exit of the loop. Then delete the unconditional
1129 jump after INSN.
1130
1131 Return 1 if we made the change, else 0.
1132
1133 This is only safe immediately after a regscan pass because it uses the
1134 values of regno_first_uid and regno_last_uid. */
1135
1136 static int
1137 duplicate_loop_exit_test (loop_start)
1138 rtx loop_start;
1139 {
1140 rtx insn, set, reg, p, link;
1141 rtx copy = 0, first_copy = 0;
1142 int num_insns = 0;
1143 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1144 rtx lastexit;
1145 int max_reg = max_reg_num ();
1146 rtx *reg_map = 0;
1147
1148 /* Scan the exit code. We do not perform this optimization if any insn:
1149
1150 is a CALL_INSN
1151 is a CODE_LABEL
1152 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1153 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1154 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1155 is not valid.
1156
1157 We also do not do this if we find an insn with ASM_OPERANDS. While
1158 this restriction should not be necessary, copying an insn with
1159 ASM_OPERANDS can confuse asm_noperands in some cases.
1160
1161 Also, don't do this if the exit code is more than 20 insns. */
1162
1163 for (insn = exitcode;
1164 insn
1165 && ! (GET_CODE (insn) == NOTE
1166 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1167 insn = NEXT_INSN (insn))
1168 {
1169 switch (GET_CODE (insn))
1170 {
1171 case CODE_LABEL:
1172 case CALL_INSN:
1173 return 0;
1174 case NOTE:
1175 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1176 a jump immediately after the loop start that branches outside
1177 the loop but within an outer loop, near the exit test.
1178 If we copied this exit test and created a phony
1179 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1180 before the exit test look like these could be safely moved
1181 out of the loop even if they actually may be never executed.
1182 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1183
1184 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1185 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1186 return 0;
1187
1188 if (optimize < 2
1189 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1190 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1191 /* If we were to duplicate this code, we would not move
1192 the BLOCK notes, and so debugging the moved code would
1193 be difficult. Thus, we only move the code with -O2 or
1194 higher. */
1195 return 0;
1196
1197 break;
1198 case JUMP_INSN:
1199 case INSN:
1200 /* The code below would grossly mishandle REG_WAS_0 notes,
1201 so get rid of them here. */
1202 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1203 remove_note (insn, p);
1204 if (++num_insns > 20
1205 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1206 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1207 return 0;
1208 break;
1209 default:
1210 break;
1211 }
1212 }
1213
1214 /* Unless INSN is zero, we can do the optimization. */
1215 if (insn == 0)
1216 return 0;
1217
1218 lastexit = insn;
1219
1220 /* See if any insn sets a register only used in the loop exit code and
1221 not a user variable. If so, replace it with a new register. */
1222 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1223 if (GET_CODE (insn) == INSN
1224 && (set = single_set (insn)) != 0
1225 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1226 || (GET_CODE (reg) == SUBREG
1227 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1228 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1229 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1230 {
1231 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1232 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1233 break;
1234
1235 if (p != lastexit)
1236 {
1237 /* We can do the replacement. Allocate reg_map if this is the
1238 first replacement we found. */
1239 if (reg_map == 0)
1240 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1241
1242 REG_LOOP_TEST_P (reg) = 1;
1243
1244 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1245 }
1246 }
1247
1248 /* Now copy each insn. */
1249 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1250 {
1251 switch (GET_CODE (insn))
1252 {
1253 case BARRIER:
1254 copy = emit_barrier_before (loop_start);
1255 break;
1256 case NOTE:
1257 /* Only copy line-number notes. */
1258 if (NOTE_LINE_NUMBER (insn) >= 0)
1259 {
1260 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1261 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1262 }
1263 break;
1264
1265 case INSN:
1266 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1267 if (reg_map)
1268 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1269
1270 mark_jump_label (PATTERN (copy), copy, 0, 0);
1271
1272 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1273 make them. */
1274 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1275 if (REG_NOTE_KIND (link) != REG_LABEL)
1276 {
1277 if (GET_CODE (link) == EXPR_LIST)
1278 REG_NOTES (copy)
1279 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1280 XEXP (link, 0),
1281 REG_NOTES (copy)));
1282 else
1283 REG_NOTES (copy)
1284 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1285 XEXP (link, 0),
1286 REG_NOTES (copy)));
1287 }
1288
1289 if (reg_map && REG_NOTES (copy))
1290 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1291 break;
1292
1293 case JUMP_INSN:
1294 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1295 loop_start);
1296 if (reg_map)
1297 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1298 mark_jump_label (PATTERN (copy), copy, 0, 0);
1299 if (REG_NOTES (insn))
1300 {
1301 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1302 if (reg_map)
1303 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1304 }
1305
1306 /* If this is a simple jump, add it to the jump chain. */
1307
1308 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1309 && simplejump_p (copy))
1310 {
1311 jump_chain[INSN_UID (copy)]
1312 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1313 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1314 }
1315 break;
1316
1317 default:
1318 abort ();
1319 }
1320
1321 /* Record the first insn we copied. We need it so that we can
1322 scan the copied insns for new pseudo registers. */
1323 if (! first_copy)
1324 first_copy = copy;
1325 }
1326
1327 /* Now clean up by emitting a jump to the end label and deleting the jump
1328 at the start of the loop. */
1329 if (! copy || GET_CODE (copy) != BARRIER)
1330 {
1331 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1332 loop_start);
1333
1334 /* Record the first insn we copied. We need it so that we can
1335 scan the copied insns for new pseudo registers. This may not
1336 be strictly necessary since we should have copied at least one
1337 insn above. But I am going to be safe. */
1338 if (! first_copy)
1339 first_copy = copy;
1340
1341 mark_jump_label (PATTERN (copy), copy, 0, 0);
1342 if (INSN_UID (copy) < max_jump_chain
1343 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1344 {
1345 jump_chain[INSN_UID (copy)]
1346 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1347 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1348 }
1349 emit_barrier_before (loop_start);
1350 }
1351
1352 /* Now scan from the first insn we copied to the last insn we copied
1353 (copy) for new pseudo registers. Do this after the code to jump to
1354 the end label since that might create a new pseudo too. */
1355 reg_scan_update (first_copy, copy, max_reg);
1356
1357 /* Mark the exit code as the virtual top of the converted loop. */
1358 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1359
1360 delete_insn (next_nonnote_insn (loop_start));
1361
1362 /* Clean up. */
1363 if (reg_map)
1364 free (reg_map);
1365
1366 return 1;
1367 }
1368 \f
1369 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1370 eh-beg, eh-end notes between START and END out before START. Assume that
1371 END is not such a note. START may be such a note. Returns the value
1372 of the new starting insn, which may be different if the original start
1373 was such a note. */
1374
1375 rtx
1376 squeeze_notes (start, end)
1377 rtx start, end;
1378 {
1379 rtx insn;
1380 rtx next;
1381
1382 for (insn = start; insn != end; insn = next)
1383 {
1384 next = NEXT_INSN (insn);
1385 if (GET_CODE (insn) == NOTE
1386 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1387 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1388 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1389 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1390 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1391 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP
1392 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1393 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1394 {
1395 if (insn == start)
1396 start = next;
1397 else
1398 {
1399 rtx prev = PREV_INSN (insn);
1400 PREV_INSN (insn) = PREV_INSN (start);
1401 NEXT_INSN (insn) = start;
1402 NEXT_INSN (PREV_INSN (insn)) = insn;
1403 PREV_INSN (NEXT_INSN (insn)) = insn;
1404 NEXT_INSN (prev) = next;
1405 PREV_INSN (next) = prev;
1406 }
1407 }
1408 }
1409
1410 return start;
1411 }
1412 \f
1413 /* Compare the instructions before insn E1 with those before E2
1414 to find an opportunity for cross jumping.
1415 (This means detecting identical sequences of insns followed by
1416 jumps to the same place, or followed by a label and a jump
1417 to that label, and replacing one with a jump to the other.)
1418
1419 Assume E1 is a jump that jumps to label E2
1420 (that is not always true but it might as well be).
1421 Find the longest possible equivalent sequences
1422 and store the first insns of those sequences into *F1 and *F2.
1423 Store zero there if no equivalent preceding instructions are found.
1424
1425 We give up if we find a label in stream 1.
1426 Actually we could transfer that label into stream 2. */
1427
1428 static void
1429 find_cross_jump (e1, e2, minimum, f1, f2)
1430 rtx e1, e2;
1431 int minimum;
1432 rtx *f1, *f2;
1433 {
1434 register rtx i1 = e1, i2 = e2;
1435 register rtx p1, p2;
1436 int lose = 0;
1437
1438 rtx last1 = 0, last2 = 0;
1439 rtx afterlast1 = 0, afterlast2 = 0;
1440
1441 *f1 = 0;
1442 *f2 = 0;
1443
1444 while (1)
1445 {
1446 i1 = prev_nonnote_insn (i1);
1447
1448 i2 = PREV_INSN (i2);
1449 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1450 i2 = PREV_INSN (i2);
1451
1452 if (i1 == 0)
1453 break;
1454
1455 /* Don't allow the range of insns preceding E1 or E2
1456 to include the other (E2 or E1). */
1457 if (i2 == e1 || i1 == e2)
1458 break;
1459
1460 /* If we will get to this code by jumping, those jumps will be
1461 tensioned to go directly to the new label (before I2),
1462 so this cross-jumping won't cost extra. So reduce the minimum. */
1463 if (GET_CODE (i1) == CODE_LABEL)
1464 {
1465 --minimum;
1466 break;
1467 }
1468
1469 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1470 break;
1471
1472 /* Avoid moving insns across EH regions if either of the insns
1473 can throw. */
1474 if (flag_exceptions
1475 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
1476 && !in_same_eh_region (i1, i2))
1477 break;
1478
1479 p1 = PATTERN (i1);
1480 p2 = PATTERN (i2);
1481
1482 /* If this is a CALL_INSN, compare register usage information.
1483 If we don't check this on stack register machines, the two
1484 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1485 numbers of stack registers in the same basic block.
1486 If we don't check this on machines with delay slots, a delay slot may
1487 be filled that clobbers a parameter expected by the subroutine.
1488
1489 ??? We take the simple route for now and assume that if they're
1490 equal, they were constructed identically. */
1491
1492 if (GET_CODE (i1) == CALL_INSN
1493 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1494 CALL_INSN_FUNCTION_USAGE (i2)))
1495 lose = 1;
1496
1497 #ifdef STACK_REGS
1498 /* If cross_jump_death_matters is not 0, the insn's mode
1499 indicates whether or not the insn contains any stack-like
1500 regs. */
1501
1502 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1503 {
1504 /* If register stack conversion has already been done, then
1505 death notes must also be compared before it is certain that
1506 the two instruction streams match. */
1507
1508 rtx note;
1509 HARD_REG_SET i1_regset, i2_regset;
1510
1511 CLEAR_HARD_REG_SET (i1_regset);
1512 CLEAR_HARD_REG_SET (i2_regset);
1513
1514 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1515 if (REG_NOTE_KIND (note) == REG_DEAD
1516 && STACK_REG_P (XEXP (note, 0)))
1517 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1518
1519 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1520 if (REG_NOTE_KIND (note) == REG_DEAD
1521 && STACK_REG_P (XEXP (note, 0)))
1522 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1523
1524 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1525
1526 lose = 1;
1527
1528 done:
1529 ;
1530 }
1531 #endif
1532
1533 /* Don't allow old-style asm or volatile extended asms to be accepted
1534 for cross jumping purposes. It is conceptually correct to allow
1535 them, since cross-jumping preserves the dynamic instruction order
1536 even though it is changing the static instruction order. However,
1537 if an asm is being used to emit an assembler pseudo-op, such as
1538 the MIPS `.set reorder' pseudo-op, then the static instruction order
1539 matters and it must be preserved. */
1540 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1541 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1542 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1543 lose = 1;
1544
1545 if (lose || GET_CODE (p1) != GET_CODE (p2)
1546 || ! rtx_renumbered_equal_p (p1, p2))
1547 {
1548 /* The following code helps take care of G++ cleanups. */
1549 rtx equiv1;
1550 rtx equiv2;
1551
1552 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1553 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1554 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1555 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1556 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1557 /* If the equivalences are not to a constant, they may
1558 reference pseudos that no longer exist, so we can't
1559 use them. */
1560 && CONSTANT_P (XEXP (equiv1, 0))
1561 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1562 {
1563 rtx s1 = single_set (i1);
1564 rtx s2 = single_set (i2);
1565 if (s1 != 0 && s2 != 0
1566 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1567 {
1568 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1569 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1570 if (! rtx_renumbered_equal_p (p1, p2))
1571 cancel_changes (0);
1572 else if (apply_change_group ())
1573 goto win;
1574 }
1575 }
1576
1577 /* Insns fail to match; cross jumping is limited to the following
1578 insns. */
1579
1580 #ifdef HAVE_cc0
1581 /* Don't allow the insn after a compare to be shared by
1582 cross-jumping unless the compare is also shared.
1583 Here, if either of these non-matching insns is a compare,
1584 exclude the following insn from possible cross-jumping. */
1585 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1586 last1 = afterlast1, last2 = afterlast2, ++minimum;
1587 #endif
1588
1589 /* If cross-jumping here will feed a jump-around-jump
1590 optimization, this jump won't cost extra, so reduce
1591 the minimum. */
1592 if (GET_CODE (i1) == JUMP_INSN
1593 && JUMP_LABEL (i1)
1594 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1595 --minimum;
1596 break;
1597 }
1598
1599 win:
1600 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1601 {
1602 /* Ok, this insn is potentially includable in a cross-jump here. */
1603 afterlast1 = last1, afterlast2 = last2;
1604 last1 = i1, last2 = i2, --minimum;
1605 }
1606 }
1607
1608 if (minimum <= 0 && last1 != 0 && last1 != e1)
1609 *f1 = last1, *f2 = last2;
1610 }
1611
1612 static void
1613 do_cross_jump (insn, newjpos, newlpos)
1614 rtx insn, newjpos, newlpos;
1615 {
1616 /* Find an existing label at this point
1617 or make a new one if there is none. */
1618 register rtx label = get_label_before (newlpos);
1619
1620 /* Make the same jump insn jump to the new point. */
1621 if (GET_CODE (PATTERN (insn)) == RETURN)
1622 {
1623 /* Remove from jump chain of returns. */
1624 delete_from_jump_chain (insn);
1625 /* Change the insn. */
1626 PATTERN (insn) = gen_jump (label);
1627 INSN_CODE (insn) = -1;
1628 JUMP_LABEL (insn) = label;
1629 LABEL_NUSES (label)++;
1630 /* Add to new the jump chain. */
1631 if (INSN_UID (label) < max_jump_chain
1632 && INSN_UID (insn) < max_jump_chain)
1633 {
1634 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1635 jump_chain[INSN_UID (label)] = insn;
1636 }
1637 }
1638 else
1639 redirect_jump (insn, label, 1);
1640
1641 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1642 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1643 the NEWJPOS stream. */
1644
1645 while (newjpos != insn)
1646 {
1647 rtx lnote;
1648
1649 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1650 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1651 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1652 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1653 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1654 remove_note (newlpos, lnote);
1655
1656 delete_insn (newjpos);
1657 newjpos = next_real_insn (newjpos);
1658 newlpos = next_real_insn (newlpos);
1659 }
1660 }
1661 \f
1662 /* Return the label before INSN, or put a new label there. */
1663
1664 rtx
1665 get_label_before (insn)
1666 rtx insn;
1667 {
1668 rtx label;
1669
1670 /* Find an existing label at this point
1671 or make a new one if there is none. */
1672 label = prev_nonnote_insn (insn);
1673
1674 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1675 {
1676 rtx prev = PREV_INSN (insn);
1677
1678 label = gen_label_rtx ();
1679 emit_label_after (label, prev);
1680 LABEL_NUSES (label) = 0;
1681 }
1682 return label;
1683 }
1684
1685 /* Return the label after INSN, or put a new label there. */
1686
1687 rtx
1688 get_label_after (insn)
1689 rtx insn;
1690 {
1691 rtx label;
1692
1693 /* Find an existing label at this point
1694 or make a new one if there is none. */
1695 label = next_nonnote_insn (insn);
1696
1697 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1698 {
1699 label = gen_label_rtx ();
1700 emit_label_after (label, insn);
1701 LABEL_NUSES (label) = 0;
1702 }
1703 return label;
1704 }
1705 \f
1706 /* Return 1 if INSN is a jump that jumps to right after TARGET
1707 only on the condition that TARGET itself would drop through.
1708 Assumes that TARGET is a conditional jump. */
1709
1710 static int
1711 jump_back_p (insn, target)
1712 rtx insn, target;
1713 {
1714 rtx cinsn, ctarget;
1715 enum rtx_code codei, codet;
1716 rtx set, tset;
1717
1718 if (! any_condjump_p (insn)
1719 || any_uncondjump_p (target)
1720 || target != prev_real_insn (JUMP_LABEL (insn)))
1721 return 0;
1722 set = pc_set (insn);
1723 tset = pc_set (target);
1724
1725 cinsn = XEXP (SET_SRC (set), 0);
1726 ctarget = XEXP (SET_SRC (tset), 0);
1727
1728 codei = GET_CODE (cinsn);
1729 codet = GET_CODE (ctarget);
1730
1731 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1732 {
1733 if (! can_reverse_comparison_p (cinsn, insn))
1734 return 0;
1735 codei = reverse_condition (codei);
1736 }
1737
1738 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1739 {
1740 if (! can_reverse_comparison_p (ctarget, target))
1741 return 0;
1742 codet = reverse_condition (codet);
1743 }
1744
1745 return (codei == codet
1746 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1747 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1748 }
1749 \f
1750 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
1751 return non-zero if it is safe to reverse this comparison. It is if our
1752 floating-point is not IEEE, if this is an NE or EQ comparison, or if
1753 this is known to be an integer comparison. */
1754
1755 int
1756 can_reverse_comparison_p (comparison, insn)
1757 rtx comparison;
1758 rtx insn;
1759 {
1760 rtx arg0;
1761
1762 /* If this is not actually a comparison, we can't reverse it. */
1763 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1764 return 0;
1765
1766 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1767 /* If this is an NE comparison, it is safe to reverse it to an EQ
1768 comparison and vice versa, even for floating point. If no operands
1769 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
1770 always false and NE is always true, so the reversal is also valid. */
1771 || flag_fast_math
1772 || GET_CODE (comparison) == NE
1773 || GET_CODE (comparison) == EQ)
1774 return 1;
1775
1776 arg0 = XEXP (comparison, 0);
1777
1778 /* Make sure ARG0 is one of the actual objects being compared. If we
1779 can't do this, we can't be sure the comparison can be reversed.
1780
1781 Handle cc0 and a MODE_CC register. */
1782 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
1783 #ifdef HAVE_cc0
1784 || arg0 == cc0_rtx
1785 #endif
1786 )
1787 {
1788 rtx prev, set;
1789
1790 /* First see if the condition code mode alone if enough to say we can
1791 reverse the condition. If not, then search backwards for a set of
1792 ARG0. We do not need to check for an insn clobbering it since valid
1793 code will contain set a set with no intervening clobber. But
1794 stop when we reach a label. */
1795 #ifdef REVERSIBLE_CC_MODE
1796 if (GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC
1797 && REVERSIBLE_CC_MODE (GET_MODE (arg0)))
1798 return 1;
1799 #endif
1800
1801 if (! insn)
1802 return 0;
1803
1804 for (prev = prev_nonnote_insn (insn);
1805 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1806 prev = prev_nonnote_insn (prev))
1807 if ((set = single_set (prev)) != 0
1808 && rtx_equal_p (SET_DEST (set), arg0))
1809 {
1810 arg0 = SET_SRC (set);
1811
1812 if (GET_CODE (arg0) == COMPARE)
1813 arg0 = XEXP (arg0, 0);
1814 break;
1815 }
1816 }
1817
1818 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
1819 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
1820 return (GET_CODE (arg0) == CONST_INT
1821 || (GET_MODE (arg0) != VOIDmode
1822 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
1823 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
1824 }
1825
1826 /* Given an rtx-code for a comparison, return the code for the negated
1827 comparison. If no such code exists, return UNKNOWN.
1828
1829 WATCH OUT! reverse_condition is not safe to use on a jump that might
1830 be acting on the results of an IEEE floating point comparison, because
1831 of the special treatment of non-signaling nans in comparisons.
1832 Use can_reverse_comparison_p to be sure. */
1833
1834 enum rtx_code
1835 reverse_condition (code)
1836 enum rtx_code code;
1837 {
1838 switch (code)
1839 {
1840 case EQ:
1841 return NE;
1842 case NE:
1843 return EQ;
1844 case GT:
1845 return LE;
1846 case GE:
1847 return LT;
1848 case LT:
1849 return GE;
1850 case LE:
1851 return GT;
1852 case GTU:
1853 return LEU;
1854 case GEU:
1855 return LTU;
1856 case LTU:
1857 return GEU;
1858 case LEU:
1859 return GTU;
1860 case UNORDERED:
1861 return ORDERED;
1862 case ORDERED:
1863 return UNORDERED;
1864
1865 case UNLT:
1866 case UNLE:
1867 case UNGT:
1868 case UNGE:
1869 case UNEQ:
1870 case LTGT:
1871 return UNKNOWN;
1872
1873 default:
1874 abort ();
1875 }
1876 }
1877
1878 /* Similar, but we're allowed to generate unordered comparisons, which
1879 makes it safe for IEEE floating-point. Of course, we have to recognize
1880 that the target will support them too... */
1881
1882 enum rtx_code
1883 reverse_condition_maybe_unordered (code)
1884 enum rtx_code code;
1885 {
1886 /* Non-IEEE formats don't have unordered conditions. */
1887 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1888 return reverse_condition (code);
1889
1890 switch (code)
1891 {
1892 case EQ:
1893 return NE;
1894 case NE:
1895 return EQ;
1896 case GT:
1897 return UNLE;
1898 case GE:
1899 return UNLT;
1900 case LT:
1901 return UNGE;
1902 case LE:
1903 return UNGT;
1904 case LTGT:
1905 return UNEQ;
1906 case GTU:
1907 return LEU;
1908 case GEU:
1909 return LTU;
1910 case LTU:
1911 return GEU;
1912 case LEU:
1913 return GTU;
1914 case UNORDERED:
1915 return ORDERED;
1916 case ORDERED:
1917 return UNORDERED;
1918 case UNLT:
1919 return GE;
1920 case UNLE:
1921 return GT;
1922 case UNGT:
1923 return LE;
1924 case UNGE:
1925 return LT;
1926 case UNEQ:
1927 return LTGT;
1928
1929 default:
1930 abort ();
1931 }
1932 }
1933
1934 /* Similar, but return the code when two operands of a comparison are swapped.
1935 This IS safe for IEEE floating-point. */
1936
1937 enum rtx_code
1938 swap_condition (code)
1939 enum rtx_code code;
1940 {
1941 switch (code)
1942 {
1943 case EQ:
1944 case NE:
1945 case UNORDERED:
1946 case ORDERED:
1947 case UNEQ:
1948 case LTGT:
1949 return code;
1950
1951 case GT:
1952 return LT;
1953 case GE:
1954 return LE;
1955 case LT:
1956 return GT;
1957 case LE:
1958 return GE;
1959 case GTU:
1960 return LTU;
1961 case GEU:
1962 return LEU;
1963 case LTU:
1964 return GTU;
1965 case LEU:
1966 return GEU;
1967 case UNLT:
1968 return UNGT;
1969 case UNLE:
1970 return UNGE;
1971 case UNGT:
1972 return UNLT;
1973 case UNGE:
1974 return UNLE;
1975
1976 default:
1977 abort ();
1978 }
1979 }
1980
1981 /* Given a comparison CODE, return the corresponding unsigned comparison.
1982 If CODE is an equality comparison or already an unsigned comparison,
1983 CODE is returned. */
1984
1985 enum rtx_code
1986 unsigned_condition (code)
1987 enum rtx_code code;
1988 {
1989 switch (code)
1990 {
1991 case EQ:
1992 case NE:
1993 case GTU:
1994 case GEU:
1995 case LTU:
1996 case LEU:
1997 return code;
1998
1999 case GT:
2000 return GTU;
2001 case GE:
2002 return GEU;
2003 case LT:
2004 return LTU;
2005 case LE:
2006 return LEU;
2007
2008 default:
2009 abort ();
2010 }
2011 }
2012
2013 /* Similarly, return the signed version of a comparison. */
2014
2015 enum rtx_code
2016 signed_condition (code)
2017 enum rtx_code code;
2018 {
2019 switch (code)
2020 {
2021 case EQ:
2022 case NE:
2023 case GT:
2024 case GE:
2025 case LT:
2026 case LE:
2027 return code;
2028
2029 case GTU:
2030 return GT;
2031 case GEU:
2032 return GE;
2033 case LTU:
2034 return LT;
2035 case LEU:
2036 return LE;
2037
2038 default:
2039 abort ();
2040 }
2041 }
2042 \f
2043 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2044 truth of CODE1 implies the truth of CODE2. */
2045
2046 int
2047 comparison_dominates_p (code1, code2)
2048 enum rtx_code code1, code2;
2049 {
2050 if (code1 == code2)
2051 return 1;
2052
2053 switch (code1)
2054 {
2055 case EQ:
2056 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
2057 || code2 == ORDERED)
2058 return 1;
2059 break;
2060
2061 case LT:
2062 if (code2 == LE || code2 == NE || code2 == ORDERED)
2063 return 1;
2064 break;
2065
2066 case GT:
2067 if (code2 == GE || code2 == NE || code2 == ORDERED)
2068 return 1;
2069 break;
2070
2071 case GE:
2072 case LE:
2073 if (code2 == ORDERED)
2074 return 1;
2075 break;
2076
2077 case LTGT:
2078 if (code2 == NE || code2 == ORDERED)
2079 return 1;
2080 break;
2081
2082 case LTU:
2083 if (code2 == LEU || code2 == NE)
2084 return 1;
2085 break;
2086
2087 case GTU:
2088 if (code2 == GEU || code2 == NE)
2089 return 1;
2090 break;
2091
2092 case UNORDERED:
2093 if (code2 == NE)
2094 return 1;
2095 break;
2096
2097 default:
2098 break;
2099 }
2100
2101 return 0;
2102 }
2103 \f
2104 /* Return 1 if INSN is an unconditional jump and nothing else. */
2105
2106 int
2107 simplejump_p (insn)
2108 rtx insn;
2109 {
2110 return (GET_CODE (insn) == JUMP_INSN
2111 && GET_CODE (PATTERN (insn)) == SET
2112 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2113 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2114 }
2115
2116 /* Return nonzero if INSN is a (possibly) conditional jump
2117 and nothing more.
2118
2119 Use this function is deprecated, since we need to support combined
2120 branch and compare insns. Use any_condjump_p instead whenever possible. */
2121
2122 int
2123 condjump_p (insn)
2124 rtx insn;
2125 {
2126 register rtx x = PATTERN (insn);
2127
2128 if (GET_CODE (x) != SET
2129 || GET_CODE (SET_DEST (x)) != PC)
2130 return 0;
2131
2132 x = SET_SRC (x);
2133 if (GET_CODE (x) == LABEL_REF)
2134 return 1;
2135 else
2136 return (GET_CODE (x) == IF_THEN_ELSE
2137 && ((GET_CODE (XEXP (x, 2)) == PC
2138 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2139 || GET_CODE (XEXP (x, 1)) == RETURN))
2140 || (GET_CODE (XEXP (x, 1)) == PC
2141 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2142 || GET_CODE (XEXP (x, 2)) == RETURN))));
2143
2144 return 0;
2145 }
2146
2147 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2148 PARALLEL.
2149
2150 Use this function is deprecated, since we need to support combined
2151 branch and compare insns. Use any_condjump_p instead whenever possible. */
2152
2153 int
2154 condjump_in_parallel_p (insn)
2155 rtx insn;
2156 {
2157 register rtx x = PATTERN (insn);
2158
2159 if (GET_CODE (x) != PARALLEL)
2160 return 0;
2161 else
2162 x = XVECEXP (x, 0, 0);
2163
2164 if (GET_CODE (x) != SET)
2165 return 0;
2166 if (GET_CODE (SET_DEST (x)) != PC)
2167 return 0;
2168 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2169 return 1;
2170 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2171 return 0;
2172 if (XEXP (SET_SRC (x), 2) == pc_rtx
2173 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2174 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2175 return 1;
2176 if (XEXP (SET_SRC (x), 1) == pc_rtx
2177 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2178 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2179 return 1;
2180 return 0;
2181 }
2182
2183 /* Return set of PC, otherwise NULL. */
2184
2185 rtx
2186 pc_set (insn)
2187 rtx insn;
2188 {
2189 rtx pat;
2190 if (GET_CODE (insn) != JUMP_INSN)
2191 return NULL_RTX;
2192 pat = PATTERN (insn);
2193
2194 /* The set is allowed to appear either as the insn pattern or
2195 the first set in a PARALLEL. */
2196 if (GET_CODE (pat) == PARALLEL)
2197 pat = XVECEXP (pat, 0, 0);
2198 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2199 return pat;
2200
2201 return NULL_RTX;
2202 }
2203
2204 /* Return true when insn is an unconditional direct jump,
2205 possibly bundled inside a PARALLEL. */
2206
2207 int
2208 any_uncondjump_p (insn)
2209 rtx insn;
2210 {
2211 rtx x = pc_set (insn);
2212 if (!x)
2213 return 0;
2214 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2215 return 0;
2216 return 1;
2217 }
2218
2219 /* Return true when insn is a conditional jump. This function works for
2220 instructions containing PC sets in PARALLELs. The instruction may have
2221 various other effects so before removing the jump you must verify
2222 onlyjump_p.
2223
2224 Note that unlike condjump_p it returns false for unconditional jumps. */
2225
2226 int
2227 any_condjump_p (insn)
2228 rtx insn;
2229 {
2230 rtx x = pc_set (insn);
2231 enum rtx_code a, b;
2232
2233 if (!x)
2234 return 0;
2235 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2236 return 0;
2237
2238 a = GET_CODE (XEXP (SET_SRC (x), 1));
2239 b = GET_CODE (XEXP (SET_SRC (x), 2));
2240
2241 return ((b == PC && (a == LABEL_REF || a == RETURN))
2242 || (a == PC && (b == LABEL_REF || b == RETURN)));
2243 }
2244
2245 /* Return the label of a conditional jump. */
2246
2247 rtx
2248 condjump_label (insn)
2249 rtx insn;
2250 {
2251 rtx x = pc_set (insn);
2252
2253 if (!x)
2254 return NULL_RTX;
2255 x = SET_SRC (x);
2256 if (GET_CODE (x) == LABEL_REF)
2257 return x;
2258 if (GET_CODE (x) != IF_THEN_ELSE)
2259 return NULL_RTX;
2260 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2261 return XEXP (x, 1);
2262 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2263 return XEXP (x, 2);
2264 return NULL_RTX;
2265 }
2266
2267 /* Return true if INSN is a (possibly conditional) return insn. */
2268
2269 static int
2270 returnjump_p_1 (loc, data)
2271 rtx *loc;
2272 void *data ATTRIBUTE_UNUSED;
2273 {
2274 rtx x = *loc;
2275 return x && GET_CODE (x) == RETURN;
2276 }
2277
2278 int
2279 returnjump_p (insn)
2280 rtx insn;
2281 {
2282 if (GET_CODE (insn) != JUMP_INSN)
2283 return 0;
2284 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2285 }
2286
2287 /* Return true if INSN is a jump that only transfers control and
2288 nothing more. */
2289
2290 int
2291 onlyjump_p (insn)
2292 rtx insn;
2293 {
2294 rtx set;
2295
2296 if (GET_CODE (insn) != JUMP_INSN)
2297 return 0;
2298
2299 set = single_set (insn);
2300 if (set == NULL)
2301 return 0;
2302 if (GET_CODE (SET_DEST (set)) != PC)
2303 return 0;
2304 if (side_effects_p (SET_SRC (set)))
2305 return 0;
2306
2307 return 1;
2308 }
2309
2310 #ifdef HAVE_cc0
2311
2312 /* Return 1 if X is an RTX that does nothing but set the condition codes
2313 and CLOBBER or USE registers.
2314 Return -1 if X does explicitly set the condition codes,
2315 but also does other things. */
2316
2317 int
2318 sets_cc0_p (x)
2319 rtx x ATTRIBUTE_UNUSED;
2320 {
2321 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2322 return 1;
2323 if (GET_CODE (x) == PARALLEL)
2324 {
2325 int i;
2326 int sets_cc0 = 0;
2327 int other_things = 0;
2328 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2329 {
2330 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2331 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2332 sets_cc0 = 1;
2333 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2334 other_things = 1;
2335 }
2336 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2337 }
2338 return 0;
2339 }
2340 #endif
2341 \f
2342 /* Follow any unconditional jump at LABEL;
2343 return the ultimate label reached by any such chain of jumps.
2344 If LABEL is not followed by a jump, return LABEL.
2345 If the chain loops or we can't find end, return LABEL,
2346 since that tells caller to avoid changing the insn.
2347
2348 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2349 a USE or CLOBBER. */
2350
2351 rtx
2352 follow_jumps (label)
2353 rtx label;
2354 {
2355 register rtx insn;
2356 register rtx next;
2357 register rtx value = label;
2358 register int depth;
2359
2360 for (depth = 0;
2361 (depth < 10
2362 && (insn = next_active_insn (value)) != 0
2363 && GET_CODE (insn) == JUMP_INSN
2364 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2365 && onlyjump_p (insn))
2366 || GET_CODE (PATTERN (insn)) == RETURN)
2367 && (next = NEXT_INSN (insn))
2368 && GET_CODE (next) == BARRIER);
2369 depth++)
2370 {
2371 /* Don't chain through the insn that jumps into a loop
2372 from outside the loop,
2373 since that would create multiple loop entry jumps
2374 and prevent loop optimization. */
2375 rtx tem;
2376 if (!reload_completed)
2377 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2378 if (GET_CODE (tem) == NOTE
2379 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2380 /* ??? Optional. Disables some optimizations, but makes
2381 gcov output more accurate with -O. */
2382 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2383 return value;
2384
2385 /* If we have found a cycle, make the insn jump to itself. */
2386 if (JUMP_LABEL (insn) == label)
2387 return label;
2388
2389 tem = next_active_insn (JUMP_LABEL (insn));
2390 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2391 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2392 break;
2393
2394 value = JUMP_LABEL (insn);
2395 }
2396 if (depth == 10)
2397 return label;
2398 return value;
2399 }
2400
2401 /* Assuming that field IDX of X is a vector of label_refs,
2402 replace each of them by the ultimate label reached by it.
2403 Return nonzero if a change is made.
2404 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2405
2406 static int
2407 tension_vector_labels (x, idx)
2408 register rtx x;
2409 register int idx;
2410 {
2411 int changed = 0;
2412 register int i;
2413 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2414 {
2415 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2416 register rtx nlabel = follow_jumps (olabel);
2417 if (nlabel && nlabel != olabel)
2418 {
2419 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2420 ++LABEL_NUSES (nlabel);
2421 if (--LABEL_NUSES (olabel) == 0)
2422 delete_insn (olabel);
2423 changed = 1;
2424 }
2425 }
2426 return changed;
2427 }
2428 \f
2429 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2430 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2431 in INSN, then store one of them in JUMP_LABEL (INSN).
2432 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2433 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2434 Also, when there are consecutive labels, canonicalize on the last of them.
2435
2436 Note that two labels separated by a loop-beginning note
2437 must be kept distinct if we have not yet done loop-optimization,
2438 because the gap between them is where loop-optimize
2439 will want to move invariant code to. CROSS_JUMP tells us
2440 that loop-optimization is done with.
2441
2442 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2443 two labels distinct if they are separated by only USE or CLOBBER insns. */
2444
2445 static void
2446 mark_jump_label (x, insn, cross_jump, in_mem)
2447 register rtx x;
2448 rtx insn;
2449 int cross_jump;
2450 int in_mem;
2451 {
2452 register RTX_CODE code = GET_CODE (x);
2453 register int i;
2454 register const char *fmt;
2455
2456 switch (code)
2457 {
2458 case PC:
2459 case CC0:
2460 case REG:
2461 case SUBREG:
2462 case CONST_INT:
2463 case CONST_DOUBLE:
2464 case CLOBBER:
2465 case CALL:
2466 return;
2467
2468 case MEM:
2469 in_mem = 1;
2470 break;
2471
2472 case SYMBOL_REF:
2473 if (!in_mem)
2474 return;
2475
2476 /* If this is a constant-pool reference, see if it is a label. */
2477 if (CONSTANT_POOL_ADDRESS_P (x))
2478 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2479 break;
2480
2481 case LABEL_REF:
2482 {
2483 rtx label = XEXP (x, 0);
2484 rtx olabel = label;
2485 rtx note;
2486 rtx next;
2487
2488 /* Ignore remaining references to unreachable labels that
2489 have been deleted. */
2490 if (GET_CODE (label) == NOTE
2491 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2492 break;
2493
2494 if (GET_CODE (label) != CODE_LABEL)
2495 abort ();
2496
2497 /* Ignore references to labels of containing functions. */
2498 if (LABEL_REF_NONLOCAL_P (x))
2499 break;
2500
2501 /* If there are other labels following this one,
2502 replace it with the last of the consecutive labels. */
2503 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2504 {
2505 if (GET_CODE (next) == CODE_LABEL)
2506 label = next;
2507 else if (cross_jump && GET_CODE (next) == INSN
2508 && (GET_CODE (PATTERN (next)) == USE
2509 || GET_CODE (PATTERN (next)) == CLOBBER))
2510 continue;
2511 else if (GET_CODE (next) != NOTE)
2512 break;
2513 else if (! cross_jump
2514 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2515 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2516 /* ??? Optional. Disables some optimizations, but
2517 makes gcov output more accurate with -O. */
2518 || (flag_test_coverage
2519 && NOTE_LINE_NUMBER (next) > 0)))
2520 break;
2521 }
2522
2523 XEXP (x, 0) = label;
2524 if (! insn || ! INSN_DELETED_P (insn))
2525 ++LABEL_NUSES (label);
2526
2527 if (insn)
2528 {
2529 if (GET_CODE (insn) == JUMP_INSN)
2530 JUMP_LABEL (insn) = label;
2531
2532 /* If we've changed OLABEL and we had a REG_LABEL note
2533 for it, update it as well. */
2534 else if (label != olabel
2535 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
2536 XEXP (note, 0) = label;
2537
2538 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2539 is one. */
2540 else if (! find_reg_note (insn, REG_LABEL, label))
2541 {
2542 /* This code used to ignore labels which refered to dispatch
2543 tables to avoid flow.c generating worse code.
2544
2545 However, in the presense of global optimizations like
2546 gcse which call find_basic_blocks without calling
2547 life_analysis, not recording such labels will lead
2548 to compiler aborts because of inconsistencies in the
2549 flow graph. So we go ahead and record the label.
2550
2551 It may also be the case that the optimization argument
2552 is no longer valid because of the more accurate cfg
2553 we build in find_basic_blocks -- it no longer pessimizes
2554 code when it finds a REG_LABEL note. */
2555 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2556 REG_NOTES (insn));
2557 }
2558 }
2559 return;
2560 }
2561
2562 /* Do walk the labels in a vector, but not the first operand of an
2563 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2564 case ADDR_VEC:
2565 case ADDR_DIFF_VEC:
2566 if (! INSN_DELETED_P (insn))
2567 {
2568 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2569
2570 for (i = 0; i < XVECLEN (x, eltnum); i++)
2571 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2572 cross_jump, in_mem);
2573 }
2574 return;
2575
2576 default:
2577 break;
2578 }
2579
2580 fmt = GET_RTX_FORMAT (code);
2581 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2582 {
2583 if (fmt[i] == 'e')
2584 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2585 else if (fmt[i] == 'E')
2586 {
2587 register int j;
2588 for (j = 0; j < XVECLEN (x, i); j++)
2589 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2590 }
2591 }
2592 }
2593
2594 /* If all INSN does is set the pc, delete it,
2595 and delete the insn that set the condition codes for it
2596 if that's what the previous thing was. */
2597
2598 void
2599 delete_jump (insn)
2600 rtx insn;
2601 {
2602 register rtx set = single_set (insn);
2603
2604 if (set && GET_CODE (SET_DEST (set)) == PC)
2605 delete_computation (insn);
2606 }
2607
2608 /* Verify INSN is a BARRIER and delete it. */
2609
2610 void
2611 delete_barrier (insn)
2612 rtx insn;
2613 {
2614 if (GET_CODE (insn) != BARRIER)
2615 abort ();
2616
2617 delete_insn (insn);
2618 }
2619
2620 /* Recursively delete prior insns that compute the value (used only by INSN
2621 which the caller is deleting) stored in the register mentioned by NOTE
2622 which is a REG_DEAD note associated with INSN. */
2623
2624 static void
2625 delete_prior_computation (note, insn)
2626 rtx note;
2627 rtx insn;
2628 {
2629 rtx our_prev;
2630 rtx reg = XEXP (note, 0);
2631
2632 for (our_prev = prev_nonnote_insn (insn);
2633 our_prev && (GET_CODE (our_prev) == INSN
2634 || GET_CODE (our_prev) == CALL_INSN);
2635 our_prev = prev_nonnote_insn (our_prev))
2636 {
2637 rtx pat = PATTERN (our_prev);
2638
2639 /* If we reach a CALL which is not calling a const function
2640 or the callee pops the arguments, then give up. */
2641 if (GET_CODE (our_prev) == CALL_INSN
2642 && (! CONST_CALL_P (our_prev)
2643 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2644 break;
2645
2646 /* If we reach a SEQUENCE, it is too complex to try to
2647 do anything with it, so give up. */
2648 if (GET_CODE (pat) == SEQUENCE)
2649 break;
2650
2651 if (GET_CODE (pat) == USE
2652 && GET_CODE (XEXP (pat, 0)) == INSN)
2653 /* reorg creates USEs that look like this. We leave them
2654 alone because reorg needs them for its own purposes. */
2655 break;
2656
2657 if (reg_set_p (reg, pat))
2658 {
2659 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2660 break;
2661
2662 if (GET_CODE (pat) == PARALLEL)
2663 {
2664 /* If we find a SET of something else, we can't
2665 delete the insn. */
2666
2667 int i;
2668
2669 for (i = 0; i < XVECLEN (pat, 0); i++)
2670 {
2671 rtx part = XVECEXP (pat, 0, i);
2672
2673 if (GET_CODE (part) == SET
2674 && SET_DEST (part) != reg)
2675 break;
2676 }
2677
2678 if (i == XVECLEN (pat, 0))
2679 delete_computation (our_prev);
2680 }
2681 else if (GET_CODE (pat) == SET
2682 && GET_CODE (SET_DEST (pat)) == REG)
2683 {
2684 int dest_regno = REGNO (SET_DEST (pat));
2685 int dest_endregno
2686 = (dest_regno
2687 + (dest_regno < FIRST_PSEUDO_REGISTER
2688 ? HARD_REGNO_NREGS (dest_regno,
2689 GET_MODE (SET_DEST (pat))) : 1));
2690 int regno = REGNO (reg);
2691 int endregno
2692 = (regno
2693 + (regno < FIRST_PSEUDO_REGISTER
2694 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2695
2696 if (dest_regno >= regno
2697 && dest_endregno <= endregno)
2698 delete_computation (our_prev);
2699
2700 /* We may have a multi-word hard register and some, but not
2701 all, of the words of the register are needed in subsequent
2702 insns. Write REG_UNUSED notes for those parts that were not
2703 needed. */
2704 else if (dest_regno <= regno
2705 && dest_endregno >= endregno)
2706 {
2707 int i;
2708
2709 REG_NOTES (our_prev)
2710 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2711 REG_NOTES (our_prev));
2712
2713 for (i = dest_regno; i < dest_endregno; i++)
2714 if (! find_regno_note (our_prev, REG_UNUSED, i))
2715 break;
2716
2717 if (i == dest_endregno)
2718 delete_computation (our_prev);
2719 }
2720 }
2721
2722 break;
2723 }
2724
2725 /* If PAT references the register that dies here, it is an
2726 additional use. Hence any prior SET isn't dead. However, this
2727 insn becomes the new place for the REG_DEAD note. */
2728 if (reg_overlap_mentioned_p (reg, pat))
2729 {
2730 XEXP (note, 1) = REG_NOTES (our_prev);
2731 REG_NOTES (our_prev) = note;
2732 break;
2733 }
2734 }
2735 }
2736
2737 /* Delete INSN and recursively delete insns that compute values used only
2738 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2739 If we are running before flow.c, we need do nothing since flow.c will
2740 delete dead code. We also can't know if the registers being used are
2741 dead or not at this point.
2742
2743 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2744 nothing other than set a register that dies in this insn, we can delete
2745 that insn as well.
2746
2747 On machines with CC0, if CC0 is used in this insn, we may be able to
2748 delete the insn that set it. */
2749
2750 static void
2751 delete_computation (insn)
2752 rtx insn;
2753 {
2754 rtx note, next;
2755 rtx set;
2756
2757 #ifdef HAVE_cc0
2758 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2759 {
2760 rtx prev = prev_nonnote_insn (insn);
2761 /* We assume that at this stage
2762 CC's are always set explicitly
2763 and always immediately before the jump that
2764 will use them. So if the previous insn
2765 exists to set the CC's, delete it
2766 (unless it performs auto-increments, etc.). */
2767 if (prev && GET_CODE (prev) == INSN
2768 && sets_cc0_p (PATTERN (prev)))
2769 {
2770 if (sets_cc0_p (PATTERN (prev)) > 0
2771 && ! side_effects_p (PATTERN (prev)))
2772 delete_computation (prev);
2773 else
2774 /* Otherwise, show that cc0 won't be used. */
2775 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2776 cc0_rtx, REG_NOTES (prev));
2777 }
2778 }
2779 #endif
2780
2781 #ifdef INSN_SCHEDULING
2782 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
2783 reload has completed. The schedulers need to be fixed. Until
2784 they are, we must not rely on the death notes here. */
2785 if (reload_completed && flag_schedule_insns_after_reload)
2786 {
2787 delete_insn (insn);
2788 return;
2789 }
2790 #endif
2791
2792 /* The REG_DEAD note may have been omitted for a register
2793 which is both set and used by the insn. */
2794 set = single_set (insn);
2795 if (set && GET_CODE (SET_DEST (set)) == REG)
2796 {
2797 int dest_regno = REGNO (SET_DEST (set));
2798 int dest_endregno
2799 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
2800 ? HARD_REGNO_NREGS (dest_regno,
2801 GET_MODE (SET_DEST (set))) : 1);
2802 int i;
2803
2804 for (i = dest_regno; i < dest_endregno; i++)
2805 {
2806 if (! refers_to_regno_p (i, i + 1, SET_SRC (set), NULL_PTR)
2807 || find_regno_note (insn, REG_DEAD, i))
2808 continue;
2809
2810 note = gen_rtx_EXPR_LIST (REG_DEAD,
2811 (i < FIRST_PSEUDO_REGISTER
2812 ? gen_rtx_REG (reg_raw_mode[i], i)
2813 : SET_DEST (set)), NULL_RTX);
2814 delete_prior_computation (note, insn);
2815 }
2816 }
2817
2818 for (note = REG_NOTES (insn); note; note = next)
2819 {
2820 next = XEXP (note, 1);
2821
2822 if (REG_NOTE_KIND (note) != REG_DEAD
2823 /* Verify that the REG_NOTE is legitimate. */
2824 || GET_CODE (XEXP (note, 0)) != REG)
2825 continue;
2826
2827 delete_prior_computation (note, insn);
2828 }
2829
2830 delete_insn (insn);
2831 }
2832 \f
2833 /* Delete insn INSN from the chain of insns and update label ref counts.
2834 May delete some following insns as a consequence; may even delete
2835 a label elsewhere and insns that follow it.
2836
2837 Returns the first insn after INSN that was not deleted. */
2838
2839 rtx
2840 delete_insn (insn)
2841 register rtx insn;
2842 {
2843 register rtx next = NEXT_INSN (insn);
2844 register rtx prev = PREV_INSN (insn);
2845 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2846 register int dont_really_delete = 0;
2847
2848 while (next && INSN_DELETED_P (next))
2849 next = NEXT_INSN (next);
2850
2851 /* This insn is already deleted => return first following nondeleted. */
2852 if (INSN_DELETED_P (insn))
2853 return next;
2854
2855 if (was_code_label)
2856 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2857
2858 /* Don't delete user-declared labels. When optimizing, convert them
2859 to special NOTEs instead. When not optimizing, leave them alone. */
2860 if (was_code_label && LABEL_NAME (insn) != 0)
2861 {
2862 if (! optimize)
2863 dont_really_delete = 1;
2864 else if (! dont_really_delete)
2865 {
2866 const char *name = LABEL_NAME (insn);
2867 PUT_CODE (insn, NOTE);
2868 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2869 NOTE_SOURCE_FILE (insn) = name;
2870 dont_really_delete = 1;
2871 }
2872 }
2873 else
2874 /* Mark this insn as deleted. */
2875 INSN_DELETED_P (insn) = 1;
2876
2877 /* If this is an unconditional jump, delete it from the jump chain. */
2878 if (simplejump_p (insn))
2879 delete_from_jump_chain (insn);
2880
2881 /* If instruction is followed by a barrier,
2882 delete the barrier too. */
2883
2884 if (next != 0 && GET_CODE (next) == BARRIER)
2885 {
2886 INSN_DELETED_P (next) = 1;
2887 next = NEXT_INSN (next);
2888 }
2889
2890 /* Patch out INSN (and the barrier if any) */
2891
2892 if (! dont_really_delete)
2893 {
2894 if (prev)
2895 {
2896 NEXT_INSN (prev) = next;
2897 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2898 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2899 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2900 }
2901
2902 if (next)
2903 {
2904 PREV_INSN (next) = prev;
2905 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2906 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2907 }
2908
2909 if (prev && NEXT_INSN (prev) == 0)
2910 set_last_insn (prev);
2911 }
2912
2913 /* If deleting a jump, decrement the count of the label,
2914 and delete the label if it is now unused. */
2915
2916 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2917 {
2918 rtx lab = JUMP_LABEL (insn), lab_next;
2919
2920 if (--LABEL_NUSES (lab) == 0)
2921 {
2922 /* This can delete NEXT or PREV,
2923 either directly if NEXT is JUMP_LABEL (INSN),
2924 or indirectly through more levels of jumps. */
2925 delete_insn (lab);
2926
2927 /* I feel a little doubtful about this loop,
2928 but I see no clean and sure alternative way
2929 to find the first insn after INSN that is not now deleted.
2930 I hope this works. */
2931 while (next && INSN_DELETED_P (next))
2932 next = NEXT_INSN (next);
2933 return next;
2934 }
2935 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2936 && GET_CODE (lab_next) == JUMP_INSN
2937 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2938 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2939 {
2940 /* If we're deleting the tablejump, delete the dispatch table.
2941 We may not be able to kill the label immediately preceeding
2942 just yet, as it might be referenced in code leading up to
2943 the tablejump. */
2944 delete_insn (lab_next);
2945 }
2946 }
2947
2948 /* Likewise if we're deleting a dispatch table. */
2949
2950 if (GET_CODE (insn) == JUMP_INSN
2951 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2952 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2953 {
2954 rtx pat = PATTERN (insn);
2955 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2956 int len = XVECLEN (pat, diff_vec_p);
2957
2958 for (i = 0; i < len; i++)
2959 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2960 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2961 while (next && INSN_DELETED_P (next))
2962 next = NEXT_INSN (next);
2963 return next;
2964 }
2965
2966 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
2967 prev = PREV_INSN (prev);
2968
2969 /* If INSN was a label and a dispatch table follows it,
2970 delete the dispatch table. The tablejump must have gone already.
2971 It isn't useful to fall through into a table. */
2972
2973 if (was_code_label
2974 && NEXT_INSN (insn) != 0
2975 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
2976 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
2977 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
2978 next = delete_insn (NEXT_INSN (insn));
2979
2980 /* If INSN was a label, delete insns following it if now unreachable. */
2981
2982 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
2983 {
2984 register RTX_CODE code;
2985 while (next != 0
2986 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
2987 || code == NOTE || code == BARRIER
2988 || (code == CODE_LABEL && INSN_DELETED_P (next))))
2989 {
2990 if (code == NOTE
2991 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
2992 next = NEXT_INSN (next);
2993 /* Keep going past other deleted labels to delete what follows. */
2994 else if (code == CODE_LABEL && INSN_DELETED_P (next))
2995 next = NEXT_INSN (next);
2996 else
2997 /* Note: if this deletes a jump, it can cause more
2998 deletion of unreachable code, after a different label.
2999 As long as the value from this recursive call is correct,
3000 this invocation functions correctly. */
3001 next = delete_insn (next);
3002 }
3003 }
3004
3005 return next;
3006 }
3007
3008 /* Advance from INSN till reaching something not deleted
3009 then return that. May return INSN itself. */
3010
3011 rtx
3012 next_nondeleted_insn (insn)
3013 rtx insn;
3014 {
3015 while (INSN_DELETED_P (insn))
3016 insn = NEXT_INSN (insn);
3017 return insn;
3018 }
3019 \f
3020 /* Delete a range of insns from FROM to TO, inclusive.
3021 This is for the sake of peephole optimization, so assume
3022 that whatever these insns do will still be done by a new
3023 peephole insn that will replace them. */
3024
3025 void
3026 delete_for_peephole (from, to)
3027 register rtx from, to;
3028 {
3029 register rtx insn = from;
3030
3031 while (1)
3032 {
3033 register rtx next = NEXT_INSN (insn);
3034 register rtx prev = PREV_INSN (insn);
3035
3036 if (GET_CODE (insn) != NOTE)
3037 {
3038 INSN_DELETED_P (insn) = 1;
3039
3040 /* Patch this insn out of the chain. */
3041 /* We don't do this all at once, because we
3042 must preserve all NOTEs. */
3043 if (prev)
3044 NEXT_INSN (prev) = next;
3045
3046 if (next)
3047 PREV_INSN (next) = prev;
3048 }
3049
3050 if (insn == to)
3051 break;
3052 insn = next;
3053 }
3054
3055 /* Note that if TO is an unconditional jump
3056 we *do not* delete the BARRIER that follows,
3057 since the peephole that replaces this sequence
3058 is also an unconditional jump in that case. */
3059 }
3060 \f
3061 /* We have determined that INSN is never reached, and are about to
3062 delete it. Print a warning if the user asked for one.
3063
3064 To try to make this warning more useful, this should only be called
3065 once per basic block not reached, and it only warns when the basic
3066 block contains more than one line from the current function, and
3067 contains at least one operation. CSE and inlining can duplicate insns,
3068 so it's possible to get spurious warnings from this. */
3069
3070 void
3071 never_reached_warning (avoided_insn)
3072 rtx avoided_insn;
3073 {
3074 rtx insn;
3075 rtx a_line_note = NULL;
3076 int two_avoided_lines = 0;
3077 int contains_insn = 0;
3078
3079 if (! warn_notreached)
3080 return;
3081
3082 /* Scan forwards, looking at LINE_NUMBER notes, until
3083 we hit a LABEL or we run out of insns. */
3084
3085 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3086 {
3087 if (GET_CODE (insn) == CODE_LABEL)
3088 break;
3089 else if (GET_CODE (insn) == NOTE /* A line number note? */
3090 && NOTE_LINE_NUMBER (insn) >= 0)
3091 {
3092 if (a_line_note == NULL)
3093 a_line_note = insn;
3094 else
3095 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3096 != NOTE_LINE_NUMBER (insn));
3097 }
3098 else if (INSN_P (insn))
3099 contains_insn = 1;
3100 }
3101 if (two_avoided_lines && contains_insn)
3102 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3103 NOTE_LINE_NUMBER (a_line_note),
3104 "will never be executed");
3105 }
3106 \f
3107 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3108 NLABEL as a return. Accrue modifications into the change group. */
3109
3110 static void
3111 redirect_exp_1 (loc, olabel, nlabel, insn)
3112 rtx *loc;
3113 rtx olabel, nlabel;
3114 rtx insn;
3115 {
3116 register rtx x = *loc;
3117 register RTX_CODE code = GET_CODE (x);
3118 register int i;
3119 register const char *fmt;
3120
3121 if (code == LABEL_REF)
3122 {
3123 if (XEXP (x, 0) == olabel)
3124 {
3125 rtx n;
3126 if (nlabel)
3127 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3128 else
3129 n = gen_rtx_RETURN (VOIDmode);
3130
3131 validate_change (insn, loc, n, 1);
3132 return;
3133 }
3134 }
3135 else if (code == RETURN && olabel == 0)
3136 {
3137 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3138 if (loc == &PATTERN (insn))
3139 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3140 validate_change (insn, loc, x, 1);
3141 return;
3142 }
3143
3144 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3145 && GET_CODE (SET_SRC (x)) == LABEL_REF
3146 && XEXP (SET_SRC (x), 0) == olabel)
3147 {
3148 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3149 return;
3150 }
3151
3152 fmt = GET_RTX_FORMAT (code);
3153 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3154 {
3155 if (fmt[i] == 'e')
3156 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3157 else if (fmt[i] == 'E')
3158 {
3159 register int j;
3160 for (j = 0; j < XVECLEN (x, i); j++)
3161 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3162 }
3163 }
3164 }
3165
3166 /* Similar, but apply the change group and report success or failure. */
3167
3168 static int
3169 redirect_exp (olabel, nlabel, insn)
3170 rtx olabel, nlabel;
3171 rtx insn;
3172 {
3173 rtx *loc;
3174
3175 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3176 loc = &XVECEXP (PATTERN (insn), 0, 0);
3177 else
3178 loc = &PATTERN (insn);
3179
3180 redirect_exp_1 (loc, olabel, nlabel, insn);
3181 if (num_validated_changes () == 0)
3182 return 0;
3183
3184 return apply_change_group ();
3185 }
3186
3187 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3188 the modifications into the change group. Return false if we did
3189 not see how to do that. */
3190
3191 int
3192 redirect_jump_1 (jump, nlabel)
3193 rtx jump, nlabel;
3194 {
3195 int ochanges = num_validated_changes ();
3196 rtx *loc;
3197
3198 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3199 loc = &XVECEXP (PATTERN (jump), 0, 0);
3200 else
3201 loc = &PATTERN (jump);
3202
3203 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3204 return num_validated_changes () > ochanges;
3205 }
3206
3207 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3208 jump target label is unused as a result, it and the code following
3209 it may be deleted.
3210
3211 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3212 RETURN insn.
3213
3214 The return value will be 1 if the change was made, 0 if it wasn't
3215 (this can only occur for NLABEL == 0). */
3216
3217 int
3218 redirect_jump (jump, nlabel, delete_unused)
3219 rtx jump, nlabel;
3220 int delete_unused;
3221 {
3222 register rtx olabel = JUMP_LABEL (jump);
3223
3224 if (nlabel == olabel)
3225 return 1;
3226
3227 if (! redirect_exp (olabel, nlabel, jump))
3228 return 0;
3229
3230 /* If this is an unconditional branch, delete it from the jump_chain of
3231 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3232 have UID's in range and JUMP_CHAIN is valid). */
3233 if (jump_chain && (simplejump_p (jump)
3234 || GET_CODE (PATTERN (jump)) == RETURN))
3235 {
3236 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3237
3238 delete_from_jump_chain (jump);
3239 if (label_index < max_jump_chain
3240 && INSN_UID (jump) < max_jump_chain)
3241 {
3242 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3243 jump_chain[label_index] = jump;
3244 }
3245 }
3246
3247 JUMP_LABEL (jump) = nlabel;
3248 if (nlabel)
3249 ++LABEL_NUSES (nlabel);
3250
3251 /* If we're eliding the jump over exception cleanups at the end of a
3252 function, move the function end note so that -Wreturn-type works. */
3253 if (olabel && nlabel
3254 && NEXT_INSN (olabel)
3255 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3256 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3257 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3258
3259 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3260 delete_insn (olabel);
3261
3262 return 1;
3263 }
3264
3265 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3266 Accrue the modifications into the change group. */
3267
3268 static void
3269 invert_exp_1 (insn)
3270 rtx insn;
3271 {
3272 register RTX_CODE code;
3273 rtx x = pc_set (insn);
3274
3275 if (!x)
3276 abort ();
3277 x = SET_SRC (x);
3278
3279 code = GET_CODE (x);
3280
3281 if (code == IF_THEN_ELSE)
3282 {
3283 register rtx comp = XEXP (x, 0);
3284 register rtx tem;
3285
3286 /* We can do this in two ways: The preferable way, which can only
3287 be done if this is not an integer comparison, is to reverse
3288 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3289 of the IF_THEN_ELSE. If we can't do either, fail. */
3290
3291 if (can_reverse_comparison_p (comp, insn))
3292 {
3293 validate_change (insn, &XEXP (x, 0),
3294 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
3295 GET_MODE (comp), XEXP (comp, 0),
3296 XEXP (comp, 1)),
3297 1);
3298 return;
3299 }
3300
3301 tem = XEXP (x, 1);
3302 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3303 validate_change (insn, &XEXP (x, 2), tem, 1);
3304 }
3305 else
3306 abort ();
3307 }
3308
3309 /* Invert the jump condition of conditional jump insn, INSN.
3310
3311 Return 1 if we can do so, 0 if we cannot find a way to do so that
3312 matches a pattern. */
3313
3314 static int
3315 invert_exp (insn)
3316 rtx insn;
3317 {
3318 invert_exp_1 (insn);
3319 if (num_validated_changes () == 0)
3320 return 0;
3321
3322 return apply_change_group ();
3323 }
3324
3325 /* Invert the condition of the jump JUMP, and make it jump to label
3326 NLABEL instead of where it jumps now. Accrue changes into the
3327 change group. Return false if we didn't see how to perform the
3328 inversion and redirection. */
3329
3330 int
3331 invert_jump_1 (jump, nlabel)
3332 rtx jump, nlabel;
3333 {
3334 int ochanges;
3335
3336 ochanges = num_validated_changes ();
3337 invert_exp_1 (jump);
3338 if (num_validated_changes () == ochanges)
3339 return 0;
3340
3341 return redirect_jump_1 (jump, nlabel);
3342 }
3343
3344 /* Invert the condition of the jump JUMP, and make it jump to label
3345 NLABEL instead of where it jumps now. Return true if successful. */
3346
3347 int
3348 invert_jump (jump, nlabel, delete_unused)
3349 rtx jump, nlabel;
3350 int delete_unused;
3351 {
3352 /* We have to either invert the condition and change the label or
3353 do neither. Either operation could fail. We first try to invert
3354 the jump. If that succeeds, we try changing the label. If that fails,
3355 we invert the jump back to what it was. */
3356
3357 if (! invert_exp (jump))
3358 return 0;
3359
3360 if (redirect_jump (jump, nlabel, delete_unused))
3361 {
3362 /* An inverted jump means that a probability taken becomes a
3363 probability not taken. Subtract the branch probability from the
3364 probability base to convert it back to a taken probability. */
3365
3366 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3367 if (note)
3368 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3369
3370 return 1;
3371 }
3372
3373 if (! invert_exp (jump))
3374 /* This should just be putting it back the way it was. */
3375 abort ();
3376
3377 return 0;
3378 }
3379
3380 /* Delete the instruction JUMP from any jump chain it might be on. */
3381
3382 static void
3383 delete_from_jump_chain (jump)
3384 rtx jump;
3385 {
3386 int index;
3387 rtx olabel = JUMP_LABEL (jump);
3388
3389 /* Handle unconditional jumps. */
3390 if (jump_chain && olabel != 0
3391 && INSN_UID (olabel) < max_jump_chain
3392 && simplejump_p (jump))
3393 index = INSN_UID (olabel);
3394 /* Handle return insns. */
3395 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3396 index = 0;
3397 else
3398 return;
3399
3400 if (jump_chain[index] == jump)
3401 jump_chain[index] = jump_chain[INSN_UID (jump)];
3402 else
3403 {
3404 rtx insn;
3405
3406 for (insn = jump_chain[index];
3407 insn != 0;
3408 insn = jump_chain[INSN_UID (insn)])
3409 if (jump_chain[INSN_UID (insn)] == jump)
3410 {
3411 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3412 break;
3413 }
3414 }
3415 }
3416 \f
3417 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3418
3419 If the old jump target label (before the dispatch table) becomes unused,
3420 it and the dispatch table may be deleted. In that case, find the insn
3421 before the jump references that label and delete it and logical successors
3422 too. */
3423
3424 static void
3425 redirect_tablejump (jump, nlabel)
3426 rtx jump, nlabel;
3427 {
3428 register rtx olabel = JUMP_LABEL (jump);
3429
3430 /* Add this jump to the jump_chain of NLABEL. */
3431 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3432 && INSN_UID (jump) < max_jump_chain)
3433 {
3434 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3435 jump_chain[INSN_UID (nlabel)] = jump;
3436 }
3437
3438 PATTERN (jump) = gen_jump (nlabel);
3439 JUMP_LABEL (jump) = nlabel;
3440 ++LABEL_NUSES (nlabel);
3441 INSN_CODE (jump) = -1;
3442
3443 if (--LABEL_NUSES (olabel) == 0)
3444 {
3445 delete_labelref_insn (jump, olabel, 0);
3446 delete_insn (olabel);
3447 }
3448 }
3449
3450 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3451 If we found one, delete it and then delete this insn if DELETE_THIS is
3452 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3453
3454 static int
3455 delete_labelref_insn (insn, label, delete_this)
3456 rtx insn, label;
3457 int delete_this;
3458 {
3459 int deleted = 0;
3460 rtx link;
3461
3462 if (GET_CODE (insn) != NOTE
3463 && reg_mentioned_p (label, PATTERN (insn)))
3464 {
3465 if (delete_this)
3466 {
3467 delete_insn (insn);
3468 deleted = 1;
3469 }
3470 else
3471 return 1;
3472 }
3473
3474 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3475 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3476 {
3477 if (delete_this)
3478 {
3479 delete_insn (insn);
3480 deleted = 1;
3481 }
3482 else
3483 return 1;
3484 }
3485
3486 return deleted;
3487 }
3488 \f
3489 /* Like rtx_equal_p except that it considers two REGs as equal
3490 if they renumber to the same value and considers two commutative
3491 operations to be the same if the order of the operands has been
3492 reversed.
3493
3494 ??? Addition is not commutative on the PA due to the weird implicit
3495 space register selection rules for memory addresses. Therefore, we
3496 don't consider a + b == b + a.
3497
3498 We could/should make this test a little tighter. Possibly only
3499 disabling it on the PA via some backend macro or only disabling this
3500 case when the PLUS is inside a MEM. */
3501
3502 int
3503 rtx_renumbered_equal_p (x, y)
3504 rtx x, y;
3505 {
3506 register int i;
3507 register RTX_CODE code = GET_CODE (x);
3508 register const char *fmt;
3509
3510 if (x == y)
3511 return 1;
3512
3513 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3514 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3515 && GET_CODE (SUBREG_REG (y)) == REG)))
3516 {
3517 int reg_x = -1, reg_y = -1;
3518 int word_x = 0, word_y = 0;
3519
3520 if (GET_MODE (x) != GET_MODE (y))
3521 return 0;
3522
3523 /* If we haven't done any renumbering, don't
3524 make any assumptions. */
3525 if (reg_renumber == 0)
3526 return rtx_equal_p (x, y);
3527
3528 if (code == SUBREG)
3529 {
3530 reg_x = REGNO (SUBREG_REG (x));
3531 word_x = SUBREG_WORD (x);
3532
3533 if (reg_renumber[reg_x] >= 0)
3534 {
3535 reg_x = reg_renumber[reg_x] + word_x;
3536 word_x = 0;
3537 }
3538 }
3539
3540 else
3541 {
3542 reg_x = REGNO (x);
3543 if (reg_renumber[reg_x] >= 0)
3544 reg_x = reg_renumber[reg_x];
3545 }
3546
3547 if (GET_CODE (y) == SUBREG)
3548 {
3549 reg_y = REGNO (SUBREG_REG (y));
3550 word_y = SUBREG_WORD (y);
3551
3552 if (reg_renumber[reg_y] >= 0)
3553 {
3554 reg_y = reg_renumber[reg_y];
3555 word_y = 0;
3556 }
3557 }
3558
3559 else
3560 {
3561 reg_y = REGNO (y);
3562 if (reg_renumber[reg_y] >= 0)
3563 reg_y = reg_renumber[reg_y];
3564 }
3565
3566 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
3567 }
3568
3569 /* Now we have disposed of all the cases
3570 in which different rtx codes can match. */
3571 if (code != GET_CODE (y))
3572 return 0;
3573
3574 switch (code)
3575 {
3576 case PC:
3577 case CC0:
3578 case ADDR_VEC:
3579 case ADDR_DIFF_VEC:
3580 return 0;
3581
3582 case CONST_INT:
3583 return INTVAL (x) == INTVAL (y);
3584
3585 case LABEL_REF:
3586 /* We can't assume nonlocal labels have their following insns yet. */
3587 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3588 return XEXP (x, 0) == XEXP (y, 0);
3589
3590 /* Two label-refs are equivalent if they point at labels
3591 in the same position in the instruction stream. */
3592 return (next_real_insn (XEXP (x, 0))
3593 == next_real_insn (XEXP (y, 0)));
3594
3595 case SYMBOL_REF:
3596 return XSTR (x, 0) == XSTR (y, 0);
3597
3598 case CODE_LABEL:
3599 /* If we didn't match EQ equality above, they aren't the same. */
3600 return 0;
3601
3602 default:
3603 break;
3604 }
3605
3606 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3607
3608 if (GET_MODE (x) != GET_MODE (y))
3609 return 0;
3610
3611 /* For commutative operations, the RTX match if the operand match in any
3612 order. Also handle the simple binary and unary cases without a loop.
3613
3614 ??? Don't consider PLUS a commutative operator; see comments above. */
3615 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3616 && code != PLUS)
3617 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3618 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3619 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3620 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3621 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3622 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3623 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3624 else if (GET_RTX_CLASS (code) == '1')
3625 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3626
3627 /* Compare the elements. If any pair of corresponding elements
3628 fail to match, return 0 for the whole things. */
3629
3630 fmt = GET_RTX_FORMAT (code);
3631 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3632 {
3633 register int j;
3634 switch (fmt[i])
3635 {
3636 case 'w':
3637 if (XWINT (x, i) != XWINT (y, i))
3638 return 0;
3639 break;
3640
3641 case 'i':
3642 if (XINT (x, i) != XINT (y, i))
3643 return 0;
3644 break;
3645
3646 case 's':
3647 if (strcmp (XSTR (x, i), XSTR (y, i)))
3648 return 0;
3649 break;
3650
3651 case 'e':
3652 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3653 return 0;
3654 break;
3655
3656 case 'u':
3657 if (XEXP (x, i) != XEXP (y, i))
3658 return 0;
3659 /* fall through. */
3660 case '0':
3661 break;
3662
3663 case 'E':
3664 if (XVECLEN (x, i) != XVECLEN (y, i))
3665 return 0;
3666 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3667 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3668 return 0;
3669 break;
3670
3671 default:
3672 abort ();
3673 }
3674 }
3675 return 1;
3676 }
3677 \f
3678 /* If X is a hard register or equivalent to one or a subregister of one,
3679 return the hard register number. If X is a pseudo register that was not
3680 assigned a hard register, return the pseudo register number. Otherwise,
3681 return -1. Any rtx is valid for X. */
3682
3683 int
3684 true_regnum (x)
3685 rtx x;
3686 {
3687 if (GET_CODE (x) == REG)
3688 {
3689 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3690 return reg_renumber[REGNO (x)];
3691 return REGNO (x);
3692 }
3693 if (GET_CODE (x) == SUBREG)
3694 {
3695 int base = true_regnum (SUBREG_REG (x));
3696 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3697 return SUBREG_WORD (x) + base;
3698 }
3699 return -1;
3700 }
3701 \f
3702 /* Optimize code of the form:
3703
3704 for (x = a[i]; x; ...)
3705 ...
3706 for (x = a[i]; x; ...)
3707 ...
3708 foo:
3709
3710 Loop optimize will change the above code into
3711
3712 if (x = a[i])
3713 for (;;)
3714 { ...; if (! (x = ...)) break; }
3715 if (x = a[i])
3716 for (;;)
3717 { ...; if (! (x = ...)) break; }
3718 foo:
3719
3720 In general, if the first test fails, the program can branch
3721 directly to `foo' and skip the second try which is doomed to fail.
3722 We run this after loop optimization and before flow analysis. */
3723
3724 /* When comparing the insn patterns, we track the fact that different
3725 pseudo-register numbers may have been used in each computation.
3726 The following array stores an equivalence -- same_regs[I] == J means
3727 that pseudo register I was used in the first set of tests in a context
3728 where J was used in the second set. We also count the number of such
3729 pending equivalences. If nonzero, the expressions really aren't the
3730 same. */
3731
3732 static int *same_regs;
3733
3734 static int num_same_regs;
3735
3736 /* Track any registers modified between the target of the first jump and
3737 the second jump. They never compare equal. */
3738
3739 static char *modified_regs;
3740
3741 /* Record if memory was modified. */
3742
3743 static int modified_mem;
3744
3745 /* Called via note_stores on each insn between the target of the first
3746 branch and the second branch. It marks any changed registers. */
3747
3748 static void
3749 mark_modified_reg (dest, x, data)
3750 rtx dest;
3751 rtx x ATTRIBUTE_UNUSED;
3752 void *data ATTRIBUTE_UNUSED;
3753 {
3754 int regno;
3755 unsigned int i;
3756
3757 if (GET_CODE (dest) == SUBREG)
3758 dest = SUBREG_REG (dest);
3759
3760 if (GET_CODE (dest) == MEM)
3761 modified_mem = 1;
3762
3763 if (GET_CODE (dest) != REG)
3764 return;
3765
3766 regno = REGNO (dest);
3767 if (regno >= FIRST_PSEUDO_REGISTER)
3768 modified_regs[regno] = 1;
3769 else
3770 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3771 modified_regs[regno + i] = 1;
3772 }
3773
3774 /* F is the first insn in the chain of insns. */
3775
3776 void
3777 thread_jumps (f, max_reg, flag_before_loop)
3778 rtx f;
3779 int max_reg;
3780 int flag_before_loop;
3781 {
3782 /* Basic algorithm is to find a conditional branch,
3783 the label it may branch to, and the branch after
3784 that label. If the two branches test the same condition,
3785 walk back from both branch paths until the insn patterns
3786 differ, or code labels are hit. If we make it back to
3787 the target of the first branch, then we know that the first branch
3788 will either always succeed or always fail depending on the relative
3789 senses of the two branches. So adjust the first branch accordingly
3790 in this case. */
3791
3792 rtx label, b1, b2, t1, t2;
3793 enum rtx_code code1, code2;
3794 rtx b1op0, b1op1, b2op0, b2op1;
3795 int changed = 1;
3796 int i;
3797 int *all_reset;
3798
3799 /* Allocate register tables and quick-reset table. */
3800 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3801 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3802 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3803 for (i = 0; i < max_reg; i++)
3804 all_reset[i] = -1;
3805
3806 while (changed)
3807 {
3808 changed = 0;
3809
3810 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3811 {
3812 rtx set;
3813 rtx set2;
3814
3815 /* Get to a candidate branch insn. */
3816 if (GET_CODE (b1) != JUMP_INSN
3817 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3818 continue;
3819
3820 bzero (modified_regs, max_reg * sizeof (char));
3821 modified_mem = 0;
3822
3823 bcopy ((char *) all_reset, (char *) same_regs,
3824 max_reg * sizeof (int));
3825 num_same_regs = 0;
3826
3827 label = JUMP_LABEL (b1);
3828
3829 /* Look for a branch after the target. Record any registers and
3830 memory modified between the target and the branch. Stop when we
3831 get to a label since we can't know what was changed there. */
3832 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3833 {
3834 if (GET_CODE (b2) == CODE_LABEL)
3835 break;
3836
3837 else if (GET_CODE (b2) == JUMP_INSN)
3838 {
3839 /* If this is an unconditional jump and is the only use of
3840 its target label, we can follow it. */
3841 if (any_uncondjump_p (b2)
3842 && onlyjump_p (b2)
3843 && JUMP_LABEL (b2) != 0
3844 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3845 {
3846 b2 = JUMP_LABEL (b2);
3847 continue;
3848 }
3849 else
3850 break;
3851 }
3852
3853 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3854 continue;
3855
3856 if (GET_CODE (b2) == CALL_INSN)
3857 {
3858 modified_mem = 1;
3859 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3860 if (call_used_regs[i] && ! fixed_regs[i]
3861 && i != STACK_POINTER_REGNUM
3862 && i != FRAME_POINTER_REGNUM
3863 && i != HARD_FRAME_POINTER_REGNUM
3864 && i != ARG_POINTER_REGNUM)
3865 modified_regs[i] = 1;
3866 }
3867
3868 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3869 }
3870
3871 /* Check the next candidate branch insn from the label
3872 of the first. */
3873 if (b2 == 0
3874 || GET_CODE (b2) != JUMP_INSN
3875 || b2 == b1
3876 || !any_condjump_p (b2)
3877 || !onlyjump_p (b2))
3878 continue;
3879 set = pc_set (b1);
3880 set2 = pc_set (b2);
3881
3882 /* Get the comparison codes and operands, reversing the
3883 codes if appropriate. If we don't have comparison codes,
3884 we can't do anything. */
3885 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3886 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3887 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3888 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3889 code1 = reverse_condition (code1);
3890
3891 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3892 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3893 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3894 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3895 code2 = reverse_condition (code2);
3896
3897 /* If they test the same things and knowing that B1 branches
3898 tells us whether or not B2 branches, check if we
3899 can thread the branch. */
3900 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3901 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3902 && (comparison_dominates_p (code1, code2)
3903 || (can_reverse_comparison_p (XEXP (SET_SRC (set), 0), b1)
3904 && comparison_dominates_p (code1,
3905 reverse_condition (code2)))))
3906
3907 {
3908 t1 = prev_nonnote_insn (b1);
3909 t2 = prev_nonnote_insn (b2);
3910
3911 while (t1 != 0 && t2 != 0)
3912 {
3913 if (t2 == label)
3914 {
3915 /* We have reached the target of the first branch.
3916 If there are no pending register equivalents,
3917 we know that this branch will either always
3918 succeed (if the senses of the two branches are
3919 the same) or always fail (if not). */
3920 rtx new_label;
3921
3922 if (num_same_regs != 0)
3923 break;
3924
3925 if (comparison_dominates_p (code1, code2))
3926 new_label = JUMP_LABEL (b2);
3927 else
3928 new_label = get_label_after (b2);
3929
3930 if (JUMP_LABEL (b1) != new_label)
3931 {
3932 rtx prev = PREV_INSN (new_label);
3933
3934 if (flag_before_loop
3935 && GET_CODE (prev) == NOTE
3936 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3937 {
3938 /* Don't thread to the loop label. If a loop
3939 label is reused, loop optimization will
3940 be disabled for that loop. */
3941 new_label = gen_label_rtx ();
3942 emit_label_after (new_label, PREV_INSN (prev));
3943 }
3944 changed |= redirect_jump (b1, new_label, 1);
3945 }
3946 break;
3947 }
3948
3949 /* If either of these is not a normal insn (it might be
3950 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
3951 have already been skipped above.) Similarly, fail
3952 if the insns are different. */
3953 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
3954 || recog_memoized (t1) != recog_memoized (t2)
3955 || ! rtx_equal_for_thread_p (PATTERN (t1),
3956 PATTERN (t2), t2))
3957 break;
3958
3959 t1 = prev_nonnote_insn (t1);
3960 t2 = prev_nonnote_insn (t2);
3961 }
3962 }
3963 }
3964 }
3965
3966 /* Clean up. */
3967 free (modified_regs);
3968 free (same_regs);
3969 free (all_reset);
3970 }
3971 \f
3972 /* This is like RTX_EQUAL_P except that it knows about our handling of
3973 possibly equivalent registers and knows to consider volatile and
3974 modified objects as not equal.
3975
3976 YINSN is the insn containing Y. */
3977
3978 int
3979 rtx_equal_for_thread_p (x, y, yinsn)
3980 rtx x, y;
3981 rtx yinsn;
3982 {
3983 register int i;
3984 register int j;
3985 register enum rtx_code code;
3986 register const char *fmt;
3987
3988 code = GET_CODE (x);
3989 /* Rtx's of different codes cannot be equal. */
3990 if (code != GET_CODE (y))
3991 return 0;
3992
3993 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
3994 (REG:SI x) and (REG:HI x) are NOT equivalent. */
3995
3996 if (GET_MODE (x) != GET_MODE (y))
3997 return 0;
3998
3999 /* For floating-point, consider everything unequal. This is a bit
4000 pessimistic, but this pass would only rarely do anything for FP
4001 anyway. */
4002 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4003 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
4004 return 0;
4005
4006 /* For commutative operations, the RTX match if the operand match in any
4007 order. Also handle the simple binary and unary cases without a loop. */
4008 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4009 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4010 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4011 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4012 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4013 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4014 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4015 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4016 else if (GET_RTX_CLASS (code) == '1')
4017 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4018
4019 /* Handle special-cases first. */
4020 switch (code)
4021 {
4022 case REG:
4023 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4024 return 1;
4025
4026 /* If neither is user variable or hard register, check for possible
4027 equivalence. */
4028 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4029 || REGNO (x) < FIRST_PSEUDO_REGISTER
4030 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4031 return 0;
4032
4033 if (same_regs[REGNO (x)] == -1)
4034 {
4035 same_regs[REGNO (x)] = REGNO (y);
4036 num_same_regs++;
4037
4038 /* If this is the first time we are seeing a register on the `Y'
4039 side, see if it is the last use. If not, we can't thread the
4040 jump, so mark it as not equivalent. */
4041 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4042 return 0;
4043
4044 return 1;
4045 }
4046 else
4047 return (same_regs[REGNO (x)] == (int) REGNO (y));
4048
4049 break;
4050
4051 case MEM:
4052 /* If memory modified or either volatile, not equivalent.
4053 Else, check address. */
4054 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4055 return 0;
4056
4057 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4058
4059 case ASM_INPUT:
4060 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4061 return 0;
4062
4063 break;
4064
4065 case SET:
4066 /* Cancel a pending `same_regs' if setting equivalenced registers.
4067 Then process source. */
4068 if (GET_CODE (SET_DEST (x)) == REG
4069 && GET_CODE (SET_DEST (y)) == REG)
4070 {
4071 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4072 {
4073 same_regs[REGNO (SET_DEST (x))] = -1;
4074 num_same_regs--;
4075 }
4076 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4077 return 0;
4078 }
4079 else
4080 {
4081 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4082 return 0;
4083 }
4084
4085 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4086
4087 case LABEL_REF:
4088 return XEXP (x, 0) == XEXP (y, 0);
4089
4090 case SYMBOL_REF:
4091 return XSTR (x, 0) == XSTR (y, 0);
4092
4093 default:
4094 break;
4095 }
4096
4097 if (x == y)
4098 return 1;
4099
4100 fmt = GET_RTX_FORMAT (code);
4101 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4102 {
4103 switch (fmt[i])
4104 {
4105 case 'w':
4106 if (XWINT (x, i) != XWINT (y, i))
4107 return 0;
4108 break;
4109
4110 case 'n':
4111 case 'i':
4112 if (XINT (x, i) != XINT (y, i))
4113 return 0;
4114 break;
4115
4116 case 'V':
4117 case 'E':
4118 /* Two vectors must have the same length. */
4119 if (XVECLEN (x, i) != XVECLEN (y, i))
4120 return 0;
4121
4122 /* And the corresponding elements must match. */
4123 for (j = 0; j < XVECLEN (x, i); j++)
4124 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4125 XVECEXP (y, i, j), yinsn) == 0)
4126 return 0;
4127 break;
4128
4129 case 'e':
4130 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4131 return 0;
4132 break;
4133
4134 case 'S':
4135 case 's':
4136 if (strcmp (XSTR (x, i), XSTR (y, i)))
4137 return 0;
4138 break;
4139
4140 case 'u':
4141 /* These are just backpointers, so they don't matter. */
4142 break;
4143
4144 case '0':
4145 case 't':
4146 break;
4147
4148 /* It is believed that rtx's at this level will never
4149 contain anything but integers and other rtx's,
4150 except for within LABEL_REFs and SYMBOL_REFs. */
4151 default:
4152 abort ();
4153 }
4154 }
4155 return 1;
4156 }