Makefile.in (C_AND_OBJC_OBJS): Add c-dump.o.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
70
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
83
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
91
92 static rtx *jump_chain;
93
94 /* Maximum index in jump_chain. */
95
96 static int max_jump_chain;
97
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
103
104 static int cross_jump_death_matters = 0;
105
106 static int init_label_info PARAMS ((rtx));
107 static void delete_barrier_successors PARAMS ((rtx));
108 static void mark_all_labels PARAMS ((rtx, int));
109 static rtx delete_unreferenced_labels PARAMS ((rtx));
110 static void delete_noop_moves PARAMS ((rtx));
111 static int duplicate_loop_exit_test PARAMS ((rtx));
112 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
113 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
114 static int jump_back_p PARAMS ((rtx, rtx));
115 static int tension_vector_labels PARAMS ((rtx, int));
116 static void mark_jump_label PARAMS ((rtx, rtx, int, int));
117 static void delete_computation PARAMS ((rtx));
118 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
119 static int redirect_exp PARAMS ((rtx, rtx, rtx));
120 static void invert_exp_1 PARAMS ((rtx));
121 static int invert_exp PARAMS ((rtx));
122 static void delete_from_jump_chain PARAMS ((rtx));
123 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
124 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
125 static void redirect_tablejump PARAMS ((rtx, rtx));
126 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
127 static int returnjump_p_1 PARAMS ((rtx *, void *));
128 static void delete_prior_computation PARAMS ((rtx, rtx));
129 \f
130 /* Main external entry point into the jump optimizer. See comments before
131 jump_optimize_1 for descriptions of the arguments. */
132 void
133 jump_optimize (f, cross_jump, noop_moves, after_regscan)
134 rtx f;
135 int cross_jump;
136 int noop_moves;
137 int after_regscan;
138 {
139 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
140 }
141
142 /* Alternate entry into the jump optimizer. This entry point only rebuilds
143 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
144 instructions. */
145 void
146 rebuild_jump_labels (f)
147 rtx f;
148 {
149 jump_optimize_1 (f, 0, 0, 0, 1, 0);
150 }
151
152 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
153
154 void
155 jump_optimize_minimal (f)
156 rtx f;
157 {
158 jump_optimize_1 (f, 0, 0, 0, 0, 1);
159 }
160 \f
161 /* Delete no-op jumps and optimize jumps to jumps
162 and jumps around jumps.
163 Delete unused labels and unreachable code.
164
165 If CROSS_JUMP is 1, detect matching code
166 before a jump and its destination and unify them.
167 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
168
169 If NOOP_MOVES is nonzero, delete no-op move insns.
170
171 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
172 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
173
174 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
175 and JUMP_LABEL field for jumping insns.
176
177 If `optimize' is zero, don't change any code,
178 just determine whether control drops off the end of the function.
179 This case occurs when we have -W and not -O.
180 It works because `delete_insn' checks the value of `optimize'
181 and refrains from actually deleting when that is 0.
182
183 If MINIMAL is nonzero, then we only perform trivial optimizations:
184
185 * Removal of unreachable code after BARRIERs.
186 * Removal of unreferenced CODE_LABELs.
187 * Removal of a jump to the next instruction.
188 * Removal of a conditional jump followed by an unconditional jump
189 to the same target as the conditional jump.
190 * Simplify a conditional jump around an unconditional jump.
191 * Simplify a jump to a jump.
192 * Delete extraneous line number notes.
193 */
194
195 static void
196 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
197 mark_labels_only, minimal)
198 rtx f;
199 int cross_jump;
200 int noop_moves;
201 int after_regscan;
202 int mark_labels_only;
203 int minimal;
204 {
205 register rtx insn, next;
206 int changed;
207 int old_max_reg;
208 int first = 1;
209 int max_uid = 0;
210 rtx last_insn;
211
212 cross_jump_death_matters = (cross_jump == 2);
213 max_uid = init_label_info (f) + 1;
214
215 /* If we are performing cross jump optimizations, then initialize
216 tables mapping UIDs to EH regions to avoid incorrect movement
217 of insns from one EH region to another. */
218 if (flag_exceptions && cross_jump)
219 init_insn_eh_region (f, max_uid);
220
221 if (! mark_labels_only)
222 delete_barrier_successors (f);
223
224 /* Leave some extra room for labels and duplicate exit test insns
225 we make. */
226 max_jump_chain = max_uid * 14 / 10;
227 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
228
229 mark_all_labels (f, cross_jump);
230
231 /* Keep track of labels used from static data; we don't track them
232 closely enough to delete them here, so make sure their reference
233 count doesn't drop to zero. */
234
235 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
236 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
237 LABEL_NUSES (XEXP (insn, 0))++;
238
239 check_exception_handler_labels ();
240
241 /* Keep track of labels used for marking handlers for exception
242 regions; they cannot usually be deleted. */
243
244 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
245 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
246 LABEL_NUSES (XEXP (insn, 0))++;
247
248 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
249 notes and recompute LABEL_NUSES. */
250 if (mark_labels_only)
251 goto end;
252
253 if (! minimal)
254 exception_optimize ();
255
256 last_insn = delete_unreferenced_labels (f);
257
258 if (noop_moves)
259 delete_noop_moves (f);
260
261 /* If we haven't yet gotten to reload and we have just run regscan,
262 delete any insn that sets a register that isn't used elsewhere.
263 This helps some of the optimizations below by having less insns
264 being jumped around. */
265
266 if (optimize && ! reload_completed && after_regscan)
267 for (insn = f; insn; insn = next)
268 {
269 rtx set = single_set (insn);
270
271 next = NEXT_INSN (insn);
272
273 if (set && GET_CODE (SET_DEST (set)) == REG
274 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
275 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
276 /* We use regno_last_note_uid so as not to delete the setting
277 of a reg that's used in notes. A subsequent optimization
278 might arrange to use that reg for real. */
279 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
280 && ! side_effects_p (SET_SRC (set))
281 && ! find_reg_note (insn, REG_RETVAL, 0)
282 /* An ADDRESSOF expression can turn into a use of the internal arg
283 pointer, so do not delete the initialization of the internal
284 arg pointer yet. If it is truly dead, flow will delete the
285 initializing insn. */
286 && SET_DEST (set) != current_function_internal_arg_pointer)
287 delete_insn (insn);
288 }
289
290 /* Now iterate optimizing jumps until nothing changes over one pass. */
291 changed = 1;
292 old_max_reg = max_reg_num ();
293 while (changed)
294 {
295 changed = 0;
296
297 for (insn = f; insn; insn = next)
298 {
299 rtx reallabelprev;
300 rtx temp, temp1, temp2 = NULL_RTX;
301 rtx temp4 ATTRIBUTE_UNUSED;
302 rtx nlabel;
303 int this_is_any_uncondjump;
304 int this_is_any_condjump;
305 int this_is_onlyjump;
306
307 next = NEXT_INSN (insn);
308
309 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
310 jump. Try to optimize by duplicating the loop exit test if so.
311 This is only safe immediately after regscan, because it uses
312 the values of regno_first_uid and regno_last_uid. */
313 if (after_regscan && GET_CODE (insn) == NOTE
314 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
315 && (temp1 = next_nonnote_insn (insn)) != 0
316 && any_uncondjump_p (temp1)
317 && onlyjump_p (temp1))
318 {
319 temp = PREV_INSN (insn);
320 if (duplicate_loop_exit_test (insn))
321 {
322 changed = 1;
323 next = NEXT_INSN (temp);
324 continue;
325 }
326 }
327
328 if (GET_CODE (insn) != JUMP_INSN)
329 continue;
330
331 this_is_any_condjump = any_condjump_p (insn);
332 this_is_any_uncondjump = any_uncondjump_p (insn);
333 this_is_onlyjump = onlyjump_p (insn);
334
335 /* Tension the labels in dispatch tables. */
336
337 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
338 changed |= tension_vector_labels (PATTERN (insn), 0);
339 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
340 changed |= tension_vector_labels (PATTERN (insn), 1);
341
342 /* See if this jump goes to another jump and redirect if so. */
343 nlabel = follow_jumps (JUMP_LABEL (insn));
344 if (nlabel != JUMP_LABEL (insn))
345 changed |= redirect_jump (insn, nlabel, 1);
346
347 if (! optimize || minimal)
348 continue;
349
350 /* If a dispatch table always goes to the same place,
351 get rid of it and replace the insn that uses it. */
352
353 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
354 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
355 {
356 int i;
357 rtx pat = PATTERN (insn);
358 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
359 int len = XVECLEN (pat, diff_vec_p);
360 rtx dispatch = prev_real_insn (insn);
361 rtx set;
362
363 for (i = 0; i < len; i++)
364 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
365 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
366 break;
367
368 if (i == len
369 && dispatch != 0
370 && GET_CODE (dispatch) == JUMP_INSN
371 && JUMP_LABEL (dispatch) != 0
372 /* Don't mess with a casesi insn.
373 XXX according to the comment before computed_jump_p(),
374 all casesi insns should be a parallel of the jump
375 and a USE of a LABEL_REF. */
376 && ! ((set = single_set (dispatch)) != NULL
377 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
378 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
379 {
380 redirect_tablejump (dispatch,
381 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
382 changed = 1;
383 }
384 }
385
386 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
387
388 /* Detect jump to following insn. */
389 if (reallabelprev == insn
390 && (this_is_any_condjump || this_is_any_uncondjump)
391 && this_is_onlyjump)
392 {
393 next = next_real_insn (JUMP_LABEL (insn));
394 delete_jump (insn);
395
396 /* Remove the "inactive" but "real" insns (i.e. uses and
397 clobbers) in between here and there. */
398 temp = insn;
399 while ((temp = next_real_insn (temp)) != next)
400 delete_insn (temp);
401
402 changed = 1;
403 continue;
404 }
405
406 /* Detect a conditional jump going to the same place
407 as an immediately following unconditional jump. */
408 else if (this_is_any_condjump && this_is_onlyjump
409 && (temp = next_active_insn (insn)) != 0
410 && simplejump_p (temp)
411 && (next_active_insn (JUMP_LABEL (insn))
412 == next_active_insn (JUMP_LABEL (temp))))
413 {
414 /* Don't mess up test coverage analysis. */
415 temp2 = temp;
416 if (flag_test_coverage && !reload_completed)
417 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
418 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
419 break;
420
421 if (temp2 == temp)
422 {
423 delete_jump (insn);
424 changed = 1;
425 continue;
426 }
427 }
428
429 /* Detect a conditional jump jumping over an unconditional jump. */
430
431 else if (this_is_any_condjump
432 && reallabelprev != 0
433 && GET_CODE (reallabelprev) == JUMP_INSN
434 && prev_active_insn (reallabelprev) == insn
435 && no_labels_between_p (insn, reallabelprev)
436 && any_uncondjump_p (reallabelprev)
437 && onlyjump_p (reallabelprev))
438 {
439 /* When we invert the unconditional jump, we will be
440 decrementing the usage count of its old label.
441 Make sure that we don't delete it now because that
442 might cause the following code to be deleted. */
443 rtx prev_uses = prev_nonnote_insn (reallabelprev);
444 rtx prev_label = JUMP_LABEL (insn);
445
446 if (prev_label)
447 ++LABEL_NUSES (prev_label);
448
449 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
450 {
451 /* It is very likely that if there are USE insns before
452 this jump, they hold REG_DEAD notes. These REG_DEAD
453 notes are no longer valid due to this optimization,
454 and will cause the life-analysis that following passes
455 (notably delayed-branch scheduling) to think that
456 these registers are dead when they are not.
457
458 To prevent this trouble, we just remove the USE insns
459 from the insn chain. */
460
461 while (prev_uses && GET_CODE (prev_uses) == INSN
462 && GET_CODE (PATTERN (prev_uses)) == USE)
463 {
464 rtx useless = prev_uses;
465 prev_uses = prev_nonnote_insn (prev_uses);
466 delete_insn (useless);
467 }
468
469 delete_insn (reallabelprev);
470 changed = 1;
471 }
472
473 /* We can now safely delete the label if it is unreferenced
474 since the delete_insn above has deleted the BARRIER. */
475 if (prev_label && --LABEL_NUSES (prev_label) == 0)
476 delete_insn (prev_label);
477
478 next = NEXT_INSN (insn);
479 }
480
481 /* If we have an unconditional jump preceded by a USE, try to put
482 the USE before the target and jump there. This simplifies many
483 of the optimizations below since we don't have to worry about
484 dealing with these USE insns. We only do this if the label
485 being branch to already has the identical USE or if code
486 never falls through to that label. */
487
488 else if (this_is_any_uncondjump
489 && (temp = prev_nonnote_insn (insn)) != 0
490 && GET_CODE (temp) == INSN
491 && GET_CODE (PATTERN (temp)) == USE
492 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
493 && (GET_CODE (temp1) == BARRIER
494 || (GET_CODE (temp1) == INSN
495 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
496 /* Don't do this optimization if we have a loop containing
497 only the USE instruction, and the loop start label has
498 a usage count of 1. This is because we will redo this
499 optimization everytime through the outer loop, and jump
500 opt will never exit. */
501 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
502 && temp2 == JUMP_LABEL (insn)
503 && LABEL_NUSES (temp2) == 1))
504 {
505 if (GET_CODE (temp1) == BARRIER)
506 {
507 emit_insn_after (PATTERN (temp), temp1);
508 temp1 = NEXT_INSN (temp1);
509 }
510
511 delete_insn (temp);
512 redirect_jump (insn, get_label_before (temp1), 1);
513 reallabelprev = prev_real_insn (temp1);
514 changed = 1;
515 next = NEXT_INSN (insn);
516 }
517
518 #ifdef HAVE_trap
519 /* Detect a conditional jump jumping over an unconditional trap. */
520 if (HAVE_trap
521 && this_is_any_condjump && this_is_onlyjump
522 && reallabelprev != 0
523 && GET_CODE (reallabelprev) == INSN
524 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
525 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
526 && prev_active_insn (reallabelprev) == insn
527 && no_labels_between_p (insn, reallabelprev)
528 && (temp2 = get_condition (insn, &temp4))
529 && can_reverse_comparison_p (temp2, insn))
530 {
531 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
532 XEXP (temp2, 0), XEXP (temp2, 1),
533 TRAP_CODE (PATTERN (reallabelprev)));
534
535 if (new)
536 {
537 emit_insn_before (new, temp4);
538 delete_insn (reallabelprev);
539 delete_jump (insn);
540 changed = 1;
541 continue;
542 }
543 }
544 /* Detect a jump jumping to an unconditional trap. */
545 else if (HAVE_trap && this_is_onlyjump
546 && (temp = next_active_insn (JUMP_LABEL (insn)))
547 && GET_CODE (temp) == INSN
548 && GET_CODE (PATTERN (temp)) == TRAP_IF
549 && (this_is_any_uncondjump
550 || (this_is_any_condjump
551 && (temp2 = get_condition (insn, &temp4)))))
552 {
553 rtx tc = TRAP_CONDITION (PATTERN (temp));
554
555 if (tc == const_true_rtx
556 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
557 {
558 rtx new;
559 /* Replace an unconditional jump to a trap with a trap. */
560 if (this_is_any_uncondjump)
561 {
562 emit_barrier_after (emit_insn_before (gen_trap (), insn));
563 delete_jump (insn);
564 changed = 1;
565 continue;
566 }
567 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
568 XEXP (temp2, 1),
569 TRAP_CODE (PATTERN (temp)));
570 if (new)
571 {
572 emit_insn_before (new, temp4);
573 delete_jump (insn);
574 changed = 1;
575 continue;
576 }
577 }
578 /* If the trap condition and jump condition are mutually
579 exclusive, redirect the jump to the following insn. */
580 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
581 && this_is_any_condjump
582 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
583 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
584 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
585 && redirect_jump (insn, get_label_after (temp), 1))
586 {
587 changed = 1;
588 continue;
589 }
590 }
591 #endif
592 else
593 {
594 /* Now that the jump has been tensioned,
595 try cross jumping: check for identical code
596 before the jump and before its target label. */
597
598 /* First, cross jumping of conditional jumps: */
599
600 if (cross_jump && condjump_p (insn))
601 {
602 rtx newjpos, newlpos;
603 rtx x = prev_real_insn (JUMP_LABEL (insn));
604
605 /* A conditional jump may be crossjumped
606 only if the place it jumps to follows
607 an opposing jump that comes back here. */
608
609 if (x != 0 && ! jump_back_p (x, insn))
610 /* We have no opposing jump;
611 cannot cross jump this insn. */
612 x = 0;
613
614 newjpos = 0;
615 /* TARGET is nonzero if it is ok to cross jump
616 to code before TARGET. If so, see if matches. */
617 if (x != 0)
618 find_cross_jump (insn, x, 2,
619 &newjpos, &newlpos);
620
621 if (newjpos != 0)
622 {
623 do_cross_jump (insn, newjpos, newlpos);
624 /* Make the old conditional jump
625 into an unconditional one. */
626 SET_SRC (PATTERN (insn))
627 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
628 INSN_CODE (insn) = -1;
629 emit_barrier_after (insn);
630 /* Add to jump_chain unless this is a new label
631 whose UID is too large. */
632 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
633 {
634 jump_chain[INSN_UID (insn)]
635 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
636 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
637 }
638 changed = 1;
639 next = insn;
640 }
641 }
642
643 /* Cross jumping of unconditional jumps:
644 a few differences. */
645
646 if (cross_jump && simplejump_p (insn))
647 {
648 rtx newjpos, newlpos;
649 rtx target;
650
651 newjpos = 0;
652
653 /* TARGET is nonzero if it is ok to cross jump
654 to code before TARGET. If so, see if matches. */
655 find_cross_jump (insn, JUMP_LABEL (insn), 1,
656 &newjpos, &newlpos);
657
658 /* If cannot cross jump to code before the label,
659 see if we can cross jump to another jump to
660 the same label. */
661 /* Try each other jump to this label. */
662 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
663 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
664 target != 0 && newjpos == 0;
665 target = jump_chain[INSN_UID (target)])
666 if (target != insn
667 && JUMP_LABEL (target) == JUMP_LABEL (insn)
668 /* Ignore TARGET if it's deleted. */
669 && ! INSN_DELETED_P (target))
670 find_cross_jump (insn, target, 2,
671 &newjpos, &newlpos);
672
673 if (newjpos != 0)
674 {
675 do_cross_jump (insn, newjpos, newlpos);
676 changed = 1;
677 next = insn;
678 }
679 }
680
681 /* This code was dead in the previous jump.c! */
682 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
683 {
684 /* Return insns all "jump to the same place"
685 so we can cross-jump between any two of them. */
686
687 rtx newjpos, newlpos, target;
688
689 newjpos = 0;
690
691 /* If cannot cross jump to code before the label,
692 see if we can cross jump to another jump to
693 the same label. */
694 /* Try each other jump to this label. */
695 for (target = jump_chain[0];
696 target != 0 && newjpos == 0;
697 target = jump_chain[INSN_UID (target)])
698 if (target != insn
699 && ! INSN_DELETED_P (target)
700 && GET_CODE (PATTERN (target)) == RETURN)
701 find_cross_jump (insn, target, 2,
702 &newjpos, &newlpos);
703
704 if (newjpos != 0)
705 {
706 do_cross_jump (insn, newjpos, newlpos);
707 changed = 1;
708 next = insn;
709 }
710 }
711 }
712 }
713
714 first = 0;
715 }
716
717 /* Delete extraneous line number notes.
718 Note that two consecutive notes for different lines are not really
719 extraneous. There should be some indication where that line belonged,
720 even if it became empty. */
721
722 {
723 rtx last_note = 0;
724
725 for (insn = f; insn; insn = NEXT_INSN (insn))
726 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
727 {
728 /* Delete this note if it is identical to previous note. */
729 if (last_note
730 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
731 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
732 {
733 delete_insn (insn);
734 continue;
735 }
736
737 last_note = insn;
738 }
739 }
740
741 end:
742 /* Clean up. */
743 free (jump_chain);
744 jump_chain = 0;
745 }
746 \f
747 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
748 notes whose labels don't occur in the insn any more. Returns the
749 largest INSN_UID found. */
750 static int
751 init_label_info (f)
752 rtx f;
753 {
754 int largest_uid = 0;
755 rtx insn;
756
757 for (insn = f; insn; insn = NEXT_INSN (insn))
758 {
759 if (GET_CODE (insn) == CODE_LABEL)
760 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
761 else if (GET_CODE (insn) == JUMP_INSN)
762 JUMP_LABEL (insn) = 0;
763 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
764 {
765 rtx note, next;
766
767 for (note = REG_NOTES (insn); note; note = next)
768 {
769 next = XEXP (note, 1);
770 if (REG_NOTE_KIND (note) == REG_LABEL
771 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
772 remove_note (insn, note);
773 }
774 }
775 if (INSN_UID (insn) > largest_uid)
776 largest_uid = INSN_UID (insn);
777 }
778
779 return largest_uid;
780 }
781
782 /* Delete insns following barriers, up to next label.
783
784 Also delete no-op jumps created by gcse. */
785
786 static void
787 delete_barrier_successors (f)
788 rtx f;
789 {
790 rtx insn;
791 rtx set;
792
793 for (insn = f; insn;)
794 {
795 if (GET_CODE (insn) == BARRIER)
796 {
797 insn = NEXT_INSN (insn);
798
799 never_reached_warning (insn);
800
801 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
802 {
803 if (GET_CODE (insn) == NOTE
804 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
805 insn = NEXT_INSN (insn);
806 else
807 insn = delete_insn (insn);
808 }
809 /* INSN is now the code_label. */
810 }
811
812 /* Also remove (set (pc) (pc)) insns which can be created by
813 gcse. We eliminate such insns now to avoid having them
814 cause problems later. */
815 else if (GET_CODE (insn) == JUMP_INSN
816 && (set = pc_set (insn)) != NULL
817 && SET_SRC (set) == pc_rtx
818 && SET_DEST (set) == pc_rtx
819 && onlyjump_p (insn))
820 insn = delete_insn (insn);
821
822 else
823 insn = NEXT_INSN (insn);
824 }
825 }
826
827 /* Mark the label each jump jumps to.
828 Combine consecutive labels, and count uses of labels.
829
830 For each label, make a chain (using `jump_chain')
831 of all the *unconditional* jumps that jump to it;
832 also make a chain of all returns.
833
834 CROSS_JUMP indicates whether we are doing cross jumping
835 and if we are whether we will be paying attention to
836 death notes or not. */
837
838 static void
839 mark_all_labels (f, cross_jump)
840 rtx f;
841 int cross_jump;
842 {
843 rtx insn;
844
845 for (insn = f; insn; insn = NEXT_INSN (insn))
846 if (INSN_P (insn))
847 {
848 if (GET_CODE (insn) == CALL_INSN
849 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
850 {
851 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
852 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
853 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
854 continue;
855 }
856
857 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
858 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
859 {
860 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
861 {
862 jump_chain[INSN_UID (insn)]
863 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
864 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
865 }
866 if (GET_CODE (PATTERN (insn)) == RETURN)
867 {
868 jump_chain[INSN_UID (insn)] = jump_chain[0];
869 jump_chain[0] = insn;
870 }
871 }
872 }
873 }
874
875 /* Delete all labels already not referenced.
876 Also find and return the last insn. */
877
878 static rtx
879 delete_unreferenced_labels (f)
880 rtx f;
881 {
882 rtx final = NULL_RTX;
883 rtx insn;
884
885 for (insn = f; insn;)
886 {
887 if (GET_CODE (insn) == CODE_LABEL
888 && LABEL_NUSES (insn) == 0
889 && LABEL_ALTERNATE_NAME (insn) == NULL)
890 insn = delete_insn (insn);
891 else
892 {
893 final = insn;
894 insn = NEXT_INSN (insn);
895 }
896 }
897
898 return final;
899 }
900
901 /* Delete various simple forms of moves which have no necessary
902 side effect. */
903
904 static void
905 delete_noop_moves (f)
906 rtx f;
907 {
908 rtx insn, next;
909
910 for (insn = f; insn;)
911 {
912 next = NEXT_INSN (insn);
913
914 if (GET_CODE (insn) == INSN)
915 {
916 register rtx body = PATTERN (insn);
917
918 /* Detect and delete no-op move instructions
919 resulting from not allocating a parameter in a register. */
920
921 if (GET_CODE (body) == SET
922 && (SET_DEST (body) == SET_SRC (body)
923 || (GET_CODE (SET_DEST (body)) == MEM
924 && GET_CODE (SET_SRC (body)) == MEM
925 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
926 && ! (GET_CODE (SET_DEST (body)) == MEM
927 && MEM_VOLATILE_P (SET_DEST (body)))
928 && ! (GET_CODE (SET_SRC (body)) == MEM
929 && MEM_VOLATILE_P (SET_SRC (body))))
930 delete_computation (insn);
931
932 /* Detect and ignore no-op move instructions
933 resulting from smart or fortuitous register allocation. */
934
935 else if (GET_CODE (body) == SET)
936 {
937 int sreg = true_regnum (SET_SRC (body));
938 int dreg = true_regnum (SET_DEST (body));
939
940 if (sreg == dreg && sreg >= 0)
941 delete_insn (insn);
942 else if (sreg >= 0 && dreg >= 0)
943 {
944 rtx trial;
945 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
946 sreg, NULL_PTR, dreg,
947 GET_MODE (SET_SRC (body)));
948
949 if (tem != 0
950 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
951 {
952 /* DREG may have been the target of a REG_DEAD note in
953 the insn which makes INSN redundant. If so, reorg
954 would still think it is dead. So search for such a
955 note and delete it if we find it. */
956 if (! find_regno_note (insn, REG_UNUSED, dreg))
957 for (trial = prev_nonnote_insn (insn);
958 trial && GET_CODE (trial) != CODE_LABEL;
959 trial = prev_nonnote_insn (trial))
960 if (find_regno_note (trial, REG_DEAD, dreg))
961 {
962 remove_death (dreg, trial);
963 break;
964 }
965
966 /* Deleting insn could lose a death-note for SREG. */
967 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
968 {
969 /* Change this into a USE so that we won't emit
970 code for it, but still can keep the note. */
971 PATTERN (insn)
972 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
973 INSN_CODE (insn) = -1;
974 /* Remove all reg notes but the REG_DEAD one. */
975 REG_NOTES (insn) = trial;
976 XEXP (trial, 1) = NULL_RTX;
977 }
978 else
979 delete_insn (insn);
980 }
981 }
982 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
983 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
984 NULL_PTR, 0,
985 GET_MODE (SET_DEST (body))))
986 {
987 /* This handles the case where we have two consecutive
988 assignments of the same constant to pseudos that didn't
989 get a hard reg. Each SET from the constant will be
990 converted into a SET of the spill register and an
991 output reload will be made following it. This produces
992 two loads of the same constant into the same spill
993 register. */
994
995 rtx in_insn = insn;
996
997 /* Look back for a death note for the first reg.
998 If there is one, it is no longer accurate. */
999 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1000 {
1001 if ((GET_CODE (in_insn) == INSN
1002 || GET_CODE (in_insn) == JUMP_INSN)
1003 && find_regno_note (in_insn, REG_DEAD, dreg))
1004 {
1005 remove_death (dreg, in_insn);
1006 break;
1007 }
1008 in_insn = PREV_INSN (in_insn);
1009 }
1010
1011 /* Delete the second load of the value. */
1012 delete_insn (insn);
1013 }
1014 }
1015 else if (GET_CODE (body) == PARALLEL)
1016 {
1017 /* If each part is a set between two identical registers or
1018 a USE or CLOBBER, delete the insn. */
1019 int i, sreg, dreg;
1020 rtx tem;
1021
1022 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1023 {
1024 tem = XVECEXP (body, 0, i);
1025 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1026 continue;
1027
1028 if (GET_CODE (tem) != SET
1029 || (sreg = true_regnum (SET_SRC (tem))) < 0
1030 || (dreg = true_regnum (SET_DEST (tem))) < 0
1031 || dreg != sreg)
1032 break;
1033 }
1034
1035 if (i < 0)
1036 delete_insn (insn);
1037 }
1038 /* Also delete insns to store bit fields if they are no-ops. */
1039 /* Not worth the hair to detect this in the big-endian case. */
1040 else if (! BYTES_BIG_ENDIAN
1041 && GET_CODE (body) == SET
1042 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
1043 && XEXP (SET_DEST (body), 2) == const0_rtx
1044 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
1045 && ! (GET_CODE (SET_SRC (body)) == MEM
1046 && MEM_VOLATILE_P (SET_SRC (body))))
1047 delete_insn (insn);
1048 }
1049 insn = next;
1050 }
1051 }
1052
1053 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1054 jump. Assume that this unconditional jump is to the exit test code. If
1055 the code is sufficiently simple, make a copy of it before INSN,
1056 followed by a jump to the exit of the loop. Then delete the unconditional
1057 jump after INSN.
1058
1059 Return 1 if we made the change, else 0.
1060
1061 This is only safe immediately after a regscan pass because it uses the
1062 values of regno_first_uid and regno_last_uid. */
1063
1064 static int
1065 duplicate_loop_exit_test (loop_start)
1066 rtx loop_start;
1067 {
1068 rtx insn, set, reg, p, link;
1069 rtx copy = 0, first_copy = 0;
1070 int num_insns = 0;
1071 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1072 rtx lastexit;
1073 int max_reg = max_reg_num ();
1074 rtx *reg_map = 0;
1075
1076 /* Scan the exit code. We do not perform this optimization if any insn:
1077
1078 is a CALL_INSN
1079 is a CODE_LABEL
1080 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1081 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1082 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1083 is not valid.
1084
1085 We also do not do this if we find an insn with ASM_OPERANDS. While
1086 this restriction should not be necessary, copying an insn with
1087 ASM_OPERANDS can confuse asm_noperands in some cases.
1088
1089 Also, don't do this if the exit code is more than 20 insns. */
1090
1091 for (insn = exitcode;
1092 insn
1093 && ! (GET_CODE (insn) == NOTE
1094 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1095 insn = NEXT_INSN (insn))
1096 {
1097 switch (GET_CODE (insn))
1098 {
1099 case CODE_LABEL:
1100 case CALL_INSN:
1101 return 0;
1102 case NOTE:
1103 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1104 a jump immediately after the loop start that branches outside
1105 the loop but within an outer loop, near the exit test.
1106 If we copied this exit test and created a phony
1107 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1108 before the exit test look like these could be safely moved
1109 out of the loop even if they actually may be never executed.
1110 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1111
1112 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1113 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1114 return 0;
1115
1116 if (optimize < 2
1117 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1118 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1119 /* If we were to duplicate this code, we would not move
1120 the BLOCK notes, and so debugging the moved code would
1121 be difficult. Thus, we only move the code with -O2 or
1122 higher. */
1123 return 0;
1124
1125 break;
1126 case JUMP_INSN:
1127 case INSN:
1128 /* The code below would grossly mishandle REG_WAS_0 notes,
1129 so get rid of them here. */
1130 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1131 remove_note (insn, p);
1132 if (++num_insns > 20
1133 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1134 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1135 return 0;
1136 break;
1137 default:
1138 break;
1139 }
1140 }
1141
1142 /* Unless INSN is zero, we can do the optimization. */
1143 if (insn == 0)
1144 return 0;
1145
1146 lastexit = insn;
1147
1148 /* See if any insn sets a register only used in the loop exit code and
1149 not a user variable. If so, replace it with a new register. */
1150 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1151 if (GET_CODE (insn) == INSN
1152 && (set = single_set (insn)) != 0
1153 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1154 || (GET_CODE (reg) == SUBREG
1155 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1156 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1157 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1158 {
1159 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1160 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1161 break;
1162
1163 if (p != lastexit)
1164 {
1165 /* We can do the replacement. Allocate reg_map if this is the
1166 first replacement we found. */
1167 if (reg_map == 0)
1168 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1169
1170 REG_LOOP_TEST_P (reg) = 1;
1171
1172 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1173 }
1174 }
1175
1176 /* Now copy each insn. */
1177 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1178 {
1179 switch (GET_CODE (insn))
1180 {
1181 case BARRIER:
1182 copy = emit_barrier_before (loop_start);
1183 break;
1184 case NOTE:
1185 /* Only copy line-number notes. */
1186 if (NOTE_LINE_NUMBER (insn) >= 0)
1187 {
1188 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1189 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1190 }
1191 break;
1192
1193 case INSN:
1194 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1195 if (reg_map)
1196 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1197
1198 mark_jump_label (PATTERN (copy), copy, 0, 0);
1199
1200 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1201 make them. */
1202 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1203 if (REG_NOTE_KIND (link) != REG_LABEL)
1204 {
1205 if (GET_CODE (link) == EXPR_LIST)
1206 REG_NOTES (copy)
1207 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1208 XEXP (link, 0),
1209 REG_NOTES (copy)));
1210 else
1211 REG_NOTES (copy)
1212 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1213 XEXP (link, 0),
1214 REG_NOTES (copy)));
1215 }
1216
1217 if (reg_map && REG_NOTES (copy))
1218 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1219 break;
1220
1221 case JUMP_INSN:
1222 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1223 loop_start);
1224 if (reg_map)
1225 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1226 mark_jump_label (PATTERN (copy), copy, 0, 0);
1227 if (REG_NOTES (insn))
1228 {
1229 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1230 if (reg_map)
1231 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1232 }
1233
1234 /* If this is a simple jump, add it to the jump chain. */
1235
1236 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1237 && simplejump_p (copy))
1238 {
1239 jump_chain[INSN_UID (copy)]
1240 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1241 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1242 }
1243 break;
1244
1245 default:
1246 abort ();
1247 }
1248
1249 /* Record the first insn we copied. We need it so that we can
1250 scan the copied insns for new pseudo registers. */
1251 if (! first_copy)
1252 first_copy = copy;
1253 }
1254
1255 /* Now clean up by emitting a jump to the end label and deleting the jump
1256 at the start of the loop. */
1257 if (! copy || GET_CODE (copy) != BARRIER)
1258 {
1259 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1260 loop_start);
1261
1262 /* Record the first insn we copied. We need it so that we can
1263 scan the copied insns for new pseudo registers. This may not
1264 be strictly necessary since we should have copied at least one
1265 insn above. But I am going to be safe. */
1266 if (! first_copy)
1267 first_copy = copy;
1268
1269 mark_jump_label (PATTERN (copy), copy, 0, 0);
1270 if (INSN_UID (copy) < max_jump_chain
1271 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1272 {
1273 jump_chain[INSN_UID (copy)]
1274 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1275 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1276 }
1277 emit_barrier_before (loop_start);
1278 }
1279
1280 /* Now scan from the first insn we copied to the last insn we copied
1281 (copy) for new pseudo registers. Do this after the code to jump to
1282 the end label since that might create a new pseudo too. */
1283 reg_scan_update (first_copy, copy, max_reg);
1284
1285 /* Mark the exit code as the virtual top of the converted loop. */
1286 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1287
1288 delete_insn (next_nonnote_insn (loop_start));
1289
1290 /* Clean up. */
1291 if (reg_map)
1292 free (reg_map);
1293
1294 return 1;
1295 }
1296 \f
1297 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1298 eh-beg, eh-end notes between START and END out before START. Assume that
1299 END is not such a note. START may be such a note. Returns the value
1300 of the new starting insn, which may be different if the original start
1301 was such a note. */
1302
1303 rtx
1304 squeeze_notes (start, end)
1305 rtx start, end;
1306 {
1307 rtx insn;
1308 rtx next;
1309
1310 for (insn = start; insn != end; insn = next)
1311 {
1312 next = NEXT_INSN (insn);
1313 if (GET_CODE (insn) == NOTE
1314 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1315 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1316 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1317 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1318 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1319 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP
1320 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1321 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1322 {
1323 if (insn == start)
1324 start = next;
1325 else
1326 {
1327 rtx prev = PREV_INSN (insn);
1328 PREV_INSN (insn) = PREV_INSN (start);
1329 NEXT_INSN (insn) = start;
1330 NEXT_INSN (PREV_INSN (insn)) = insn;
1331 PREV_INSN (NEXT_INSN (insn)) = insn;
1332 NEXT_INSN (prev) = next;
1333 PREV_INSN (next) = prev;
1334 }
1335 }
1336 }
1337
1338 return start;
1339 }
1340 \f
1341 /* Compare the instructions before insn E1 with those before E2
1342 to find an opportunity for cross jumping.
1343 (This means detecting identical sequences of insns followed by
1344 jumps to the same place, or followed by a label and a jump
1345 to that label, and replacing one with a jump to the other.)
1346
1347 Assume E1 is a jump that jumps to label E2
1348 (that is not always true but it might as well be).
1349 Find the longest possible equivalent sequences
1350 and store the first insns of those sequences into *F1 and *F2.
1351 Store zero there if no equivalent preceding instructions are found.
1352
1353 We give up if we find a label in stream 1.
1354 Actually we could transfer that label into stream 2. */
1355
1356 static void
1357 find_cross_jump (e1, e2, minimum, f1, f2)
1358 rtx e1, e2;
1359 int minimum;
1360 rtx *f1, *f2;
1361 {
1362 register rtx i1 = e1, i2 = e2;
1363 register rtx p1, p2;
1364 int lose = 0;
1365
1366 rtx last1 = 0, last2 = 0;
1367 rtx afterlast1 = 0, afterlast2 = 0;
1368
1369 *f1 = 0;
1370 *f2 = 0;
1371
1372 while (1)
1373 {
1374 i1 = prev_nonnote_insn (i1);
1375
1376 i2 = PREV_INSN (i2);
1377 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1378 i2 = PREV_INSN (i2);
1379
1380 if (i1 == 0)
1381 break;
1382
1383 /* Don't allow the range of insns preceding E1 or E2
1384 to include the other (E2 or E1). */
1385 if (i2 == e1 || i1 == e2)
1386 break;
1387
1388 /* If we will get to this code by jumping, those jumps will be
1389 tensioned to go directly to the new label (before I2),
1390 so this cross-jumping won't cost extra. So reduce the minimum. */
1391 if (GET_CODE (i1) == CODE_LABEL)
1392 {
1393 --minimum;
1394 break;
1395 }
1396
1397 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1398 break;
1399
1400 /* Avoid moving insns across EH regions if either of the insns
1401 can throw. */
1402 if (flag_exceptions
1403 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
1404 && !in_same_eh_region (i1, i2))
1405 break;
1406
1407 p1 = PATTERN (i1);
1408 p2 = PATTERN (i2);
1409
1410 /* If this is a CALL_INSN, compare register usage information.
1411 If we don't check this on stack register machines, the two
1412 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1413 numbers of stack registers in the same basic block.
1414 If we don't check this on machines with delay slots, a delay slot may
1415 be filled that clobbers a parameter expected by the subroutine.
1416
1417 ??? We take the simple route for now and assume that if they're
1418 equal, they were constructed identically. */
1419
1420 if (GET_CODE (i1) == CALL_INSN
1421 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1422 CALL_INSN_FUNCTION_USAGE (i2)))
1423 lose = 1;
1424
1425 #ifdef STACK_REGS
1426 /* If cross_jump_death_matters is not 0, the insn's mode
1427 indicates whether or not the insn contains any stack-like
1428 regs. */
1429
1430 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1431 {
1432 /* If register stack conversion has already been done, then
1433 death notes must also be compared before it is certain that
1434 the two instruction streams match. */
1435
1436 rtx note;
1437 HARD_REG_SET i1_regset, i2_regset;
1438
1439 CLEAR_HARD_REG_SET (i1_regset);
1440 CLEAR_HARD_REG_SET (i2_regset);
1441
1442 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1443 if (REG_NOTE_KIND (note) == REG_DEAD
1444 && STACK_REG_P (XEXP (note, 0)))
1445 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1446
1447 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1448 if (REG_NOTE_KIND (note) == REG_DEAD
1449 && STACK_REG_P (XEXP (note, 0)))
1450 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1451
1452 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1453
1454 lose = 1;
1455
1456 done:
1457 ;
1458 }
1459 #endif
1460
1461 /* Don't allow old-style asm or volatile extended asms to be accepted
1462 for cross jumping purposes. It is conceptually correct to allow
1463 them, since cross-jumping preserves the dynamic instruction order
1464 even though it is changing the static instruction order. However,
1465 if an asm is being used to emit an assembler pseudo-op, such as
1466 the MIPS `.set reorder' pseudo-op, then the static instruction order
1467 matters and it must be preserved. */
1468 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1469 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1470 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1471 lose = 1;
1472
1473 if (lose || GET_CODE (p1) != GET_CODE (p2)
1474 || ! rtx_renumbered_equal_p (p1, p2))
1475 {
1476 /* The following code helps take care of G++ cleanups. */
1477 rtx equiv1;
1478 rtx equiv2;
1479
1480 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1481 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1482 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1483 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1484 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1485 /* If the equivalences are not to a constant, they may
1486 reference pseudos that no longer exist, so we can't
1487 use them. */
1488 && CONSTANT_P (XEXP (equiv1, 0))
1489 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1490 {
1491 rtx s1 = single_set (i1);
1492 rtx s2 = single_set (i2);
1493 if (s1 != 0 && s2 != 0
1494 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1495 {
1496 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1497 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1498 if (! rtx_renumbered_equal_p (p1, p2))
1499 cancel_changes (0);
1500 else if (apply_change_group ())
1501 goto win;
1502 }
1503 }
1504
1505 /* Insns fail to match; cross jumping is limited to the following
1506 insns. */
1507
1508 #ifdef HAVE_cc0
1509 /* Don't allow the insn after a compare to be shared by
1510 cross-jumping unless the compare is also shared.
1511 Here, if either of these non-matching insns is a compare,
1512 exclude the following insn from possible cross-jumping. */
1513 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1514 last1 = afterlast1, last2 = afterlast2, ++minimum;
1515 #endif
1516
1517 /* If cross-jumping here will feed a jump-around-jump
1518 optimization, this jump won't cost extra, so reduce
1519 the minimum. */
1520 if (GET_CODE (i1) == JUMP_INSN
1521 && JUMP_LABEL (i1)
1522 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1523 --minimum;
1524 break;
1525 }
1526
1527 win:
1528 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1529 {
1530 /* Ok, this insn is potentially includable in a cross-jump here. */
1531 afterlast1 = last1, afterlast2 = last2;
1532 last1 = i1, last2 = i2, --minimum;
1533 }
1534 }
1535
1536 if (minimum <= 0 && last1 != 0 && last1 != e1)
1537 *f1 = last1, *f2 = last2;
1538 }
1539
1540 static void
1541 do_cross_jump (insn, newjpos, newlpos)
1542 rtx insn, newjpos, newlpos;
1543 {
1544 /* Find an existing label at this point
1545 or make a new one if there is none. */
1546 register rtx label = get_label_before (newlpos);
1547
1548 /* Make the same jump insn jump to the new point. */
1549 if (GET_CODE (PATTERN (insn)) == RETURN)
1550 {
1551 /* Remove from jump chain of returns. */
1552 delete_from_jump_chain (insn);
1553 /* Change the insn. */
1554 PATTERN (insn) = gen_jump (label);
1555 INSN_CODE (insn) = -1;
1556 JUMP_LABEL (insn) = label;
1557 LABEL_NUSES (label)++;
1558 /* Add to new the jump chain. */
1559 if (INSN_UID (label) < max_jump_chain
1560 && INSN_UID (insn) < max_jump_chain)
1561 {
1562 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1563 jump_chain[INSN_UID (label)] = insn;
1564 }
1565 }
1566 else
1567 redirect_jump (insn, label, 1);
1568
1569 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1570 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1571 the NEWJPOS stream. */
1572
1573 while (newjpos != insn)
1574 {
1575 rtx lnote;
1576
1577 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1578 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1579 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1580 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1581 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1582 remove_note (newlpos, lnote);
1583
1584 delete_insn (newjpos);
1585 newjpos = next_real_insn (newjpos);
1586 newlpos = next_real_insn (newlpos);
1587 }
1588 }
1589 \f
1590 /* Return the label before INSN, or put a new label there. */
1591
1592 rtx
1593 get_label_before (insn)
1594 rtx insn;
1595 {
1596 rtx label;
1597
1598 /* Find an existing label at this point
1599 or make a new one if there is none. */
1600 label = prev_nonnote_insn (insn);
1601
1602 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1603 {
1604 rtx prev = PREV_INSN (insn);
1605
1606 label = gen_label_rtx ();
1607 emit_label_after (label, prev);
1608 LABEL_NUSES (label) = 0;
1609 }
1610 return label;
1611 }
1612
1613 /* Return the label after INSN, or put a new label there. */
1614
1615 rtx
1616 get_label_after (insn)
1617 rtx insn;
1618 {
1619 rtx label;
1620
1621 /* Find an existing label at this point
1622 or make a new one if there is none. */
1623 label = next_nonnote_insn (insn);
1624
1625 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1626 {
1627 label = gen_label_rtx ();
1628 emit_label_after (label, insn);
1629 LABEL_NUSES (label) = 0;
1630 }
1631 return label;
1632 }
1633 \f
1634 /* Return 1 if INSN is a jump that jumps to right after TARGET
1635 only on the condition that TARGET itself would drop through.
1636 Assumes that TARGET is a conditional jump. */
1637
1638 static int
1639 jump_back_p (insn, target)
1640 rtx insn, target;
1641 {
1642 rtx cinsn, ctarget;
1643 enum rtx_code codei, codet;
1644 rtx set, tset;
1645
1646 if (! any_condjump_p (insn)
1647 || any_uncondjump_p (target)
1648 || target != prev_real_insn (JUMP_LABEL (insn)))
1649 return 0;
1650 set = pc_set (insn);
1651 tset = pc_set (target);
1652
1653 cinsn = XEXP (SET_SRC (set), 0);
1654 ctarget = XEXP (SET_SRC (tset), 0);
1655
1656 codei = GET_CODE (cinsn);
1657 codet = GET_CODE (ctarget);
1658
1659 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1660 {
1661 if (! can_reverse_comparison_p (cinsn, insn))
1662 return 0;
1663 codei = reverse_condition (codei);
1664 }
1665
1666 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1667 {
1668 if (! can_reverse_comparison_p (ctarget, target))
1669 return 0;
1670 codet = reverse_condition (codet);
1671 }
1672
1673 return (codei == codet
1674 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1675 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1676 }
1677 \f
1678 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
1679 return non-zero if it is safe to reverse this comparison. It is if our
1680 floating-point is not IEEE, if this is an NE or EQ comparison, or if
1681 this is known to be an integer comparison. */
1682
1683 int
1684 can_reverse_comparison_p (comparison, insn)
1685 rtx comparison;
1686 rtx insn;
1687 {
1688 rtx arg0;
1689
1690 /* If this is not actually a comparison, we can't reverse it. */
1691 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1692 return 0;
1693
1694 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1695 /* If this is an NE comparison, it is safe to reverse it to an EQ
1696 comparison and vice versa, even for floating point. If no operands
1697 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
1698 always false and NE is always true, so the reversal is also valid. */
1699 || flag_fast_math
1700 || GET_CODE (comparison) == NE
1701 || GET_CODE (comparison) == EQ)
1702 return 1;
1703
1704 arg0 = XEXP (comparison, 0);
1705
1706 /* Make sure ARG0 is one of the actual objects being compared. If we
1707 can't do this, we can't be sure the comparison can be reversed.
1708
1709 Handle cc0 and a MODE_CC register. */
1710 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
1711 #ifdef HAVE_cc0
1712 || arg0 == cc0_rtx
1713 #endif
1714 )
1715 {
1716 rtx prev, set;
1717
1718 /* First see if the condition code mode alone if enough to say we can
1719 reverse the condition. If not, then search backwards for a set of
1720 ARG0. We do not need to check for an insn clobbering it since valid
1721 code will contain set a set with no intervening clobber. But
1722 stop when we reach a label. */
1723 #ifdef REVERSIBLE_CC_MODE
1724 if (GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC
1725 && REVERSIBLE_CC_MODE (GET_MODE (arg0)))
1726 return 1;
1727 #endif
1728
1729 if (! insn)
1730 return 0;
1731
1732 for (prev = prev_nonnote_insn (insn);
1733 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1734 prev = prev_nonnote_insn (prev))
1735 if ((set = single_set (prev)) != 0
1736 && rtx_equal_p (SET_DEST (set), arg0))
1737 {
1738 arg0 = SET_SRC (set);
1739
1740 if (GET_CODE (arg0) == COMPARE)
1741 arg0 = XEXP (arg0, 0);
1742 break;
1743 }
1744 }
1745
1746 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
1747 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
1748 return (GET_CODE (arg0) == CONST_INT
1749 || (GET_MODE (arg0) != VOIDmode
1750 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
1751 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
1752 }
1753
1754 /* Given an rtx-code for a comparison, return the code for the negated
1755 comparison. If no such code exists, return UNKNOWN.
1756
1757 WATCH OUT! reverse_condition is not safe to use on a jump that might
1758 be acting on the results of an IEEE floating point comparison, because
1759 of the special treatment of non-signaling nans in comparisons.
1760 Use can_reverse_comparison_p to be sure. */
1761
1762 enum rtx_code
1763 reverse_condition (code)
1764 enum rtx_code code;
1765 {
1766 switch (code)
1767 {
1768 case EQ:
1769 return NE;
1770 case NE:
1771 return EQ;
1772 case GT:
1773 return LE;
1774 case GE:
1775 return LT;
1776 case LT:
1777 return GE;
1778 case LE:
1779 return GT;
1780 case GTU:
1781 return LEU;
1782 case GEU:
1783 return LTU;
1784 case LTU:
1785 return GEU;
1786 case LEU:
1787 return GTU;
1788 case UNORDERED:
1789 return ORDERED;
1790 case ORDERED:
1791 return UNORDERED;
1792
1793 case UNLT:
1794 case UNLE:
1795 case UNGT:
1796 case UNGE:
1797 case UNEQ:
1798 case LTGT:
1799 return UNKNOWN;
1800
1801 default:
1802 abort ();
1803 }
1804 }
1805
1806 /* Similar, but we're allowed to generate unordered comparisons, which
1807 makes it safe for IEEE floating-point. Of course, we have to recognize
1808 that the target will support them too... */
1809
1810 enum rtx_code
1811 reverse_condition_maybe_unordered (code)
1812 enum rtx_code code;
1813 {
1814 /* Non-IEEE formats don't have unordered conditions. */
1815 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1816 return reverse_condition (code);
1817
1818 switch (code)
1819 {
1820 case EQ:
1821 return NE;
1822 case NE:
1823 return EQ;
1824 case GT:
1825 return UNLE;
1826 case GE:
1827 return UNLT;
1828 case LT:
1829 return UNGE;
1830 case LE:
1831 return UNGT;
1832 case LTGT:
1833 return UNEQ;
1834 case GTU:
1835 return LEU;
1836 case GEU:
1837 return LTU;
1838 case LTU:
1839 return GEU;
1840 case LEU:
1841 return GTU;
1842 case UNORDERED:
1843 return ORDERED;
1844 case ORDERED:
1845 return UNORDERED;
1846 case UNLT:
1847 return GE;
1848 case UNLE:
1849 return GT;
1850 case UNGT:
1851 return LE;
1852 case UNGE:
1853 return LT;
1854 case UNEQ:
1855 return LTGT;
1856
1857 default:
1858 abort ();
1859 }
1860 }
1861
1862 /* Similar, but return the code when two operands of a comparison are swapped.
1863 This IS safe for IEEE floating-point. */
1864
1865 enum rtx_code
1866 swap_condition (code)
1867 enum rtx_code code;
1868 {
1869 switch (code)
1870 {
1871 case EQ:
1872 case NE:
1873 case UNORDERED:
1874 case ORDERED:
1875 case UNEQ:
1876 case LTGT:
1877 return code;
1878
1879 case GT:
1880 return LT;
1881 case GE:
1882 return LE;
1883 case LT:
1884 return GT;
1885 case LE:
1886 return GE;
1887 case GTU:
1888 return LTU;
1889 case GEU:
1890 return LEU;
1891 case LTU:
1892 return GTU;
1893 case LEU:
1894 return GEU;
1895 case UNLT:
1896 return UNGT;
1897 case UNLE:
1898 return UNGE;
1899 case UNGT:
1900 return UNLT;
1901 case UNGE:
1902 return UNLE;
1903
1904 default:
1905 abort ();
1906 }
1907 }
1908
1909 /* Given a comparison CODE, return the corresponding unsigned comparison.
1910 If CODE is an equality comparison or already an unsigned comparison,
1911 CODE is returned. */
1912
1913 enum rtx_code
1914 unsigned_condition (code)
1915 enum rtx_code code;
1916 {
1917 switch (code)
1918 {
1919 case EQ:
1920 case NE:
1921 case GTU:
1922 case GEU:
1923 case LTU:
1924 case LEU:
1925 return code;
1926
1927 case GT:
1928 return GTU;
1929 case GE:
1930 return GEU;
1931 case LT:
1932 return LTU;
1933 case LE:
1934 return LEU;
1935
1936 default:
1937 abort ();
1938 }
1939 }
1940
1941 /* Similarly, return the signed version of a comparison. */
1942
1943 enum rtx_code
1944 signed_condition (code)
1945 enum rtx_code code;
1946 {
1947 switch (code)
1948 {
1949 case EQ:
1950 case NE:
1951 case GT:
1952 case GE:
1953 case LT:
1954 case LE:
1955 return code;
1956
1957 case GTU:
1958 return GT;
1959 case GEU:
1960 return GE;
1961 case LTU:
1962 return LT;
1963 case LEU:
1964 return LE;
1965
1966 default:
1967 abort ();
1968 }
1969 }
1970 \f
1971 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
1972 truth of CODE1 implies the truth of CODE2. */
1973
1974 int
1975 comparison_dominates_p (code1, code2)
1976 enum rtx_code code1, code2;
1977 {
1978 if (code1 == code2)
1979 return 1;
1980
1981 switch (code1)
1982 {
1983 case EQ:
1984 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1985 || code2 == ORDERED)
1986 return 1;
1987 break;
1988
1989 case LT:
1990 if (code2 == LE || code2 == NE || code2 == ORDERED)
1991 return 1;
1992 break;
1993
1994 case GT:
1995 if (code2 == GE || code2 == NE || code2 == ORDERED)
1996 return 1;
1997 break;
1998
1999 case GE:
2000 case LE:
2001 if (code2 == ORDERED)
2002 return 1;
2003 break;
2004
2005 case LTGT:
2006 if (code2 == NE || code2 == ORDERED)
2007 return 1;
2008 break;
2009
2010 case LTU:
2011 if (code2 == LEU || code2 == NE)
2012 return 1;
2013 break;
2014
2015 case GTU:
2016 if (code2 == GEU || code2 == NE)
2017 return 1;
2018 break;
2019
2020 case UNORDERED:
2021 if (code2 == NE)
2022 return 1;
2023 break;
2024
2025 default:
2026 break;
2027 }
2028
2029 return 0;
2030 }
2031 \f
2032 /* Return 1 if INSN is an unconditional jump and nothing else. */
2033
2034 int
2035 simplejump_p (insn)
2036 rtx insn;
2037 {
2038 return (GET_CODE (insn) == JUMP_INSN
2039 && GET_CODE (PATTERN (insn)) == SET
2040 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2041 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2042 }
2043
2044 /* Return nonzero if INSN is a (possibly) conditional jump
2045 and nothing more.
2046
2047 Use this function is deprecated, since we need to support combined
2048 branch and compare insns. Use any_condjump_p instead whenever possible. */
2049
2050 int
2051 condjump_p (insn)
2052 rtx insn;
2053 {
2054 register rtx x = PATTERN (insn);
2055
2056 if (GET_CODE (x) != SET
2057 || GET_CODE (SET_DEST (x)) != PC)
2058 return 0;
2059
2060 x = SET_SRC (x);
2061 if (GET_CODE (x) == LABEL_REF)
2062 return 1;
2063 else
2064 return (GET_CODE (x) == IF_THEN_ELSE
2065 && ((GET_CODE (XEXP (x, 2)) == PC
2066 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2067 || GET_CODE (XEXP (x, 1)) == RETURN))
2068 || (GET_CODE (XEXP (x, 1)) == PC
2069 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2070 || GET_CODE (XEXP (x, 2)) == RETURN))));
2071
2072 return 0;
2073 }
2074
2075 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2076 PARALLEL.
2077
2078 Use this function is deprecated, since we need to support combined
2079 branch and compare insns. Use any_condjump_p instead whenever possible. */
2080
2081 int
2082 condjump_in_parallel_p (insn)
2083 rtx insn;
2084 {
2085 register rtx x = PATTERN (insn);
2086
2087 if (GET_CODE (x) != PARALLEL)
2088 return 0;
2089 else
2090 x = XVECEXP (x, 0, 0);
2091
2092 if (GET_CODE (x) != SET)
2093 return 0;
2094 if (GET_CODE (SET_DEST (x)) != PC)
2095 return 0;
2096 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2097 return 1;
2098 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2099 return 0;
2100 if (XEXP (SET_SRC (x), 2) == pc_rtx
2101 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2102 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2103 return 1;
2104 if (XEXP (SET_SRC (x), 1) == pc_rtx
2105 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2106 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2107 return 1;
2108 return 0;
2109 }
2110
2111 /* Return set of PC, otherwise NULL. */
2112
2113 rtx
2114 pc_set (insn)
2115 rtx insn;
2116 {
2117 rtx pat;
2118 if (GET_CODE (insn) != JUMP_INSN)
2119 return NULL_RTX;
2120 pat = PATTERN (insn);
2121
2122 /* The set is allowed to appear either as the insn pattern or
2123 the first set in a PARALLEL. */
2124 if (GET_CODE (pat) == PARALLEL)
2125 pat = XVECEXP (pat, 0, 0);
2126 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2127 return pat;
2128
2129 return NULL_RTX;
2130 }
2131
2132 /* Return true when insn is an unconditional direct jump,
2133 possibly bundled inside a PARALLEL. */
2134
2135 int
2136 any_uncondjump_p (insn)
2137 rtx insn;
2138 {
2139 rtx x = pc_set (insn);
2140 if (!x)
2141 return 0;
2142 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2143 return 0;
2144 return 1;
2145 }
2146
2147 /* Return true when insn is a conditional jump. This function works for
2148 instructions containing PC sets in PARALLELs. The instruction may have
2149 various other effects so before removing the jump you must verify
2150 onlyjump_p.
2151
2152 Note that unlike condjump_p it returns false for unconditional jumps. */
2153
2154 int
2155 any_condjump_p (insn)
2156 rtx insn;
2157 {
2158 rtx x = pc_set (insn);
2159 enum rtx_code a, b;
2160
2161 if (!x)
2162 return 0;
2163 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2164 return 0;
2165
2166 a = GET_CODE (XEXP (SET_SRC (x), 1));
2167 b = GET_CODE (XEXP (SET_SRC (x), 2));
2168
2169 return ((b == PC && (a == LABEL_REF || a == RETURN))
2170 || (a == PC && (b == LABEL_REF || b == RETURN)));
2171 }
2172
2173 /* Return the label of a conditional jump. */
2174
2175 rtx
2176 condjump_label (insn)
2177 rtx insn;
2178 {
2179 rtx x = pc_set (insn);
2180
2181 if (!x)
2182 return NULL_RTX;
2183 x = SET_SRC (x);
2184 if (GET_CODE (x) == LABEL_REF)
2185 return x;
2186 if (GET_CODE (x) != IF_THEN_ELSE)
2187 return NULL_RTX;
2188 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2189 return XEXP (x, 1);
2190 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2191 return XEXP (x, 2);
2192 return NULL_RTX;
2193 }
2194
2195 /* Return true if INSN is a (possibly conditional) return insn. */
2196
2197 static int
2198 returnjump_p_1 (loc, data)
2199 rtx *loc;
2200 void *data ATTRIBUTE_UNUSED;
2201 {
2202 rtx x = *loc;
2203 return x && GET_CODE (x) == RETURN;
2204 }
2205
2206 int
2207 returnjump_p (insn)
2208 rtx insn;
2209 {
2210 if (GET_CODE (insn) != JUMP_INSN)
2211 return 0;
2212 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2213 }
2214
2215 /* Return true if INSN is a jump that only transfers control and
2216 nothing more. */
2217
2218 int
2219 onlyjump_p (insn)
2220 rtx insn;
2221 {
2222 rtx set;
2223
2224 if (GET_CODE (insn) != JUMP_INSN)
2225 return 0;
2226
2227 set = single_set (insn);
2228 if (set == NULL)
2229 return 0;
2230 if (GET_CODE (SET_DEST (set)) != PC)
2231 return 0;
2232 if (side_effects_p (SET_SRC (set)))
2233 return 0;
2234
2235 return 1;
2236 }
2237
2238 #ifdef HAVE_cc0
2239
2240 /* Return 1 if X is an RTX that does nothing but set the condition codes
2241 and CLOBBER or USE registers.
2242 Return -1 if X does explicitly set the condition codes,
2243 but also does other things. */
2244
2245 int
2246 sets_cc0_p (x)
2247 rtx x ATTRIBUTE_UNUSED;
2248 {
2249 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2250 return 1;
2251 if (GET_CODE (x) == PARALLEL)
2252 {
2253 int i;
2254 int sets_cc0 = 0;
2255 int other_things = 0;
2256 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2257 {
2258 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2259 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2260 sets_cc0 = 1;
2261 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2262 other_things = 1;
2263 }
2264 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2265 }
2266 return 0;
2267 }
2268 #endif
2269 \f
2270 /* Follow any unconditional jump at LABEL;
2271 return the ultimate label reached by any such chain of jumps.
2272 If LABEL is not followed by a jump, return LABEL.
2273 If the chain loops or we can't find end, return LABEL,
2274 since that tells caller to avoid changing the insn.
2275
2276 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2277 a USE or CLOBBER. */
2278
2279 rtx
2280 follow_jumps (label)
2281 rtx label;
2282 {
2283 register rtx insn;
2284 register rtx next;
2285 register rtx value = label;
2286 register int depth;
2287
2288 for (depth = 0;
2289 (depth < 10
2290 && (insn = next_active_insn (value)) != 0
2291 && GET_CODE (insn) == JUMP_INSN
2292 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2293 && onlyjump_p (insn))
2294 || GET_CODE (PATTERN (insn)) == RETURN)
2295 && (next = NEXT_INSN (insn))
2296 && GET_CODE (next) == BARRIER);
2297 depth++)
2298 {
2299 /* Don't chain through the insn that jumps into a loop
2300 from outside the loop,
2301 since that would create multiple loop entry jumps
2302 and prevent loop optimization. */
2303 rtx tem;
2304 if (!reload_completed)
2305 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2306 if (GET_CODE (tem) == NOTE
2307 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2308 /* ??? Optional. Disables some optimizations, but makes
2309 gcov output more accurate with -O. */
2310 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2311 return value;
2312
2313 /* If we have found a cycle, make the insn jump to itself. */
2314 if (JUMP_LABEL (insn) == label)
2315 return label;
2316
2317 tem = next_active_insn (JUMP_LABEL (insn));
2318 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2319 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2320 break;
2321
2322 value = JUMP_LABEL (insn);
2323 }
2324 if (depth == 10)
2325 return label;
2326 return value;
2327 }
2328
2329 /* Assuming that field IDX of X is a vector of label_refs,
2330 replace each of them by the ultimate label reached by it.
2331 Return nonzero if a change is made.
2332 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2333
2334 static int
2335 tension_vector_labels (x, idx)
2336 register rtx x;
2337 register int idx;
2338 {
2339 int changed = 0;
2340 register int i;
2341 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2342 {
2343 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2344 register rtx nlabel = follow_jumps (olabel);
2345 if (nlabel && nlabel != olabel)
2346 {
2347 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2348 ++LABEL_NUSES (nlabel);
2349 if (--LABEL_NUSES (olabel) == 0)
2350 delete_insn (olabel);
2351 changed = 1;
2352 }
2353 }
2354 return changed;
2355 }
2356 \f
2357 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2358 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2359 in INSN, then store one of them in JUMP_LABEL (INSN).
2360 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2361 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2362 Also, when there are consecutive labels, canonicalize on the last of them.
2363
2364 Note that two labels separated by a loop-beginning note
2365 must be kept distinct if we have not yet done loop-optimization,
2366 because the gap between them is where loop-optimize
2367 will want to move invariant code to. CROSS_JUMP tells us
2368 that loop-optimization is done with.
2369
2370 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2371 two labels distinct if they are separated by only USE or CLOBBER insns. */
2372
2373 static void
2374 mark_jump_label (x, insn, cross_jump, in_mem)
2375 register rtx x;
2376 rtx insn;
2377 int cross_jump;
2378 int in_mem;
2379 {
2380 register RTX_CODE code = GET_CODE (x);
2381 register int i;
2382 register const char *fmt;
2383
2384 switch (code)
2385 {
2386 case PC:
2387 case CC0:
2388 case REG:
2389 case SUBREG:
2390 case CONST_INT:
2391 case CONST_DOUBLE:
2392 case CLOBBER:
2393 case CALL:
2394 return;
2395
2396 case MEM:
2397 in_mem = 1;
2398 break;
2399
2400 case SYMBOL_REF:
2401 if (!in_mem)
2402 return;
2403
2404 /* If this is a constant-pool reference, see if it is a label. */
2405 if (CONSTANT_POOL_ADDRESS_P (x))
2406 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2407 break;
2408
2409 case LABEL_REF:
2410 {
2411 rtx label = XEXP (x, 0);
2412 rtx olabel = label;
2413 rtx note;
2414 rtx next;
2415
2416 /* Ignore remaining references to unreachable labels that
2417 have been deleted. */
2418 if (GET_CODE (label) == NOTE
2419 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2420 break;
2421
2422 if (GET_CODE (label) != CODE_LABEL)
2423 abort ();
2424
2425 /* Ignore references to labels of containing functions. */
2426 if (LABEL_REF_NONLOCAL_P (x))
2427 break;
2428
2429 /* If there are other labels following this one,
2430 replace it with the last of the consecutive labels. */
2431 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2432 {
2433 if (GET_CODE (next) == CODE_LABEL)
2434 label = next;
2435 else if (cross_jump && GET_CODE (next) == INSN
2436 && (GET_CODE (PATTERN (next)) == USE
2437 || GET_CODE (PATTERN (next)) == CLOBBER))
2438 continue;
2439 else if (GET_CODE (next) != NOTE)
2440 break;
2441 else if (! cross_jump
2442 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2443 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2444 /* ??? Optional. Disables some optimizations, but
2445 makes gcov output more accurate with -O. */
2446 || (flag_test_coverage
2447 && NOTE_LINE_NUMBER (next) > 0)))
2448 break;
2449 }
2450
2451 XEXP (x, 0) = label;
2452 if (! insn || ! INSN_DELETED_P (insn))
2453 ++LABEL_NUSES (label);
2454
2455 if (insn)
2456 {
2457 if (GET_CODE (insn) == JUMP_INSN)
2458 JUMP_LABEL (insn) = label;
2459
2460 /* If we've changed OLABEL and we had a REG_LABEL note
2461 for it, update it as well. */
2462 else if (label != olabel
2463 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
2464 XEXP (note, 0) = label;
2465
2466 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2467 is one. */
2468 else if (! find_reg_note (insn, REG_LABEL, label))
2469 {
2470 /* This code used to ignore labels which refered to dispatch
2471 tables to avoid flow.c generating worse code.
2472
2473 However, in the presense of global optimizations like
2474 gcse which call find_basic_blocks without calling
2475 life_analysis, not recording such labels will lead
2476 to compiler aborts because of inconsistencies in the
2477 flow graph. So we go ahead and record the label.
2478
2479 It may also be the case that the optimization argument
2480 is no longer valid because of the more accurate cfg
2481 we build in find_basic_blocks -- it no longer pessimizes
2482 code when it finds a REG_LABEL note. */
2483 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2484 REG_NOTES (insn));
2485 }
2486 }
2487 return;
2488 }
2489
2490 /* Do walk the labels in a vector, but not the first operand of an
2491 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2492 case ADDR_VEC:
2493 case ADDR_DIFF_VEC:
2494 if (! INSN_DELETED_P (insn))
2495 {
2496 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2497
2498 for (i = 0; i < XVECLEN (x, eltnum); i++)
2499 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2500 cross_jump, in_mem);
2501 }
2502 return;
2503
2504 default:
2505 break;
2506 }
2507
2508 fmt = GET_RTX_FORMAT (code);
2509 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2510 {
2511 if (fmt[i] == 'e')
2512 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2513 else if (fmt[i] == 'E')
2514 {
2515 register int j;
2516 for (j = 0; j < XVECLEN (x, i); j++)
2517 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2518 }
2519 }
2520 }
2521
2522 /* If all INSN does is set the pc, delete it,
2523 and delete the insn that set the condition codes for it
2524 if that's what the previous thing was. */
2525
2526 void
2527 delete_jump (insn)
2528 rtx insn;
2529 {
2530 register rtx set = single_set (insn);
2531
2532 if (set && GET_CODE (SET_DEST (set)) == PC)
2533 delete_computation (insn);
2534 }
2535
2536 /* Verify INSN is a BARRIER and delete it. */
2537
2538 void
2539 delete_barrier (insn)
2540 rtx insn;
2541 {
2542 if (GET_CODE (insn) != BARRIER)
2543 abort ();
2544
2545 delete_insn (insn);
2546 }
2547
2548 /* Recursively delete prior insns that compute the value (used only by INSN
2549 which the caller is deleting) stored in the register mentioned by NOTE
2550 which is a REG_DEAD note associated with INSN. */
2551
2552 static void
2553 delete_prior_computation (note, insn)
2554 rtx note;
2555 rtx insn;
2556 {
2557 rtx our_prev;
2558 rtx reg = XEXP (note, 0);
2559
2560 for (our_prev = prev_nonnote_insn (insn);
2561 our_prev && (GET_CODE (our_prev) == INSN
2562 || GET_CODE (our_prev) == CALL_INSN);
2563 our_prev = prev_nonnote_insn (our_prev))
2564 {
2565 rtx pat = PATTERN (our_prev);
2566
2567 /* If we reach a CALL which is not calling a const function
2568 or the callee pops the arguments, then give up. */
2569 if (GET_CODE (our_prev) == CALL_INSN
2570 && (! CONST_CALL_P (our_prev)
2571 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2572 break;
2573
2574 /* If we reach a SEQUENCE, it is too complex to try to
2575 do anything with it, so give up. */
2576 if (GET_CODE (pat) == SEQUENCE)
2577 break;
2578
2579 if (GET_CODE (pat) == USE
2580 && GET_CODE (XEXP (pat, 0)) == INSN)
2581 /* reorg creates USEs that look like this. We leave them
2582 alone because reorg needs them for its own purposes. */
2583 break;
2584
2585 if (reg_set_p (reg, pat))
2586 {
2587 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2588 break;
2589
2590 if (GET_CODE (pat) == PARALLEL)
2591 {
2592 /* If we find a SET of something else, we can't
2593 delete the insn. */
2594
2595 int i;
2596
2597 for (i = 0; i < XVECLEN (pat, 0); i++)
2598 {
2599 rtx part = XVECEXP (pat, 0, i);
2600
2601 if (GET_CODE (part) == SET
2602 && SET_DEST (part) != reg)
2603 break;
2604 }
2605
2606 if (i == XVECLEN (pat, 0))
2607 delete_computation (our_prev);
2608 }
2609 else if (GET_CODE (pat) == SET
2610 && GET_CODE (SET_DEST (pat)) == REG)
2611 {
2612 int dest_regno = REGNO (SET_DEST (pat));
2613 int dest_endregno
2614 = (dest_regno
2615 + (dest_regno < FIRST_PSEUDO_REGISTER
2616 ? HARD_REGNO_NREGS (dest_regno,
2617 GET_MODE (SET_DEST (pat))) : 1));
2618 int regno = REGNO (reg);
2619 int endregno
2620 = (regno
2621 + (regno < FIRST_PSEUDO_REGISTER
2622 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2623
2624 if (dest_regno >= regno
2625 && dest_endregno <= endregno)
2626 delete_computation (our_prev);
2627
2628 /* We may have a multi-word hard register and some, but not
2629 all, of the words of the register are needed in subsequent
2630 insns. Write REG_UNUSED notes for those parts that were not
2631 needed. */
2632 else if (dest_regno <= regno
2633 && dest_endregno >= endregno)
2634 {
2635 int i;
2636
2637 REG_NOTES (our_prev)
2638 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2639 REG_NOTES (our_prev));
2640
2641 for (i = dest_regno; i < dest_endregno; i++)
2642 if (! find_regno_note (our_prev, REG_UNUSED, i))
2643 break;
2644
2645 if (i == dest_endregno)
2646 delete_computation (our_prev);
2647 }
2648 }
2649
2650 break;
2651 }
2652
2653 /* If PAT references the register that dies here, it is an
2654 additional use. Hence any prior SET isn't dead. However, this
2655 insn becomes the new place for the REG_DEAD note. */
2656 if (reg_overlap_mentioned_p (reg, pat))
2657 {
2658 XEXP (note, 1) = REG_NOTES (our_prev);
2659 REG_NOTES (our_prev) = note;
2660 break;
2661 }
2662 }
2663 }
2664
2665 /* Delete INSN and recursively delete insns that compute values used only
2666 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2667 If we are running before flow.c, we need do nothing since flow.c will
2668 delete dead code. We also can't know if the registers being used are
2669 dead or not at this point.
2670
2671 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2672 nothing other than set a register that dies in this insn, we can delete
2673 that insn as well.
2674
2675 On machines with CC0, if CC0 is used in this insn, we may be able to
2676 delete the insn that set it. */
2677
2678 static void
2679 delete_computation (insn)
2680 rtx insn;
2681 {
2682 rtx note, next;
2683 rtx set;
2684
2685 #ifdef HAVE_cc0
2686 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2687 {
2688 rtx prev = prev_nonnote_insn (insn);
2689 /* We assume that at this stage
2690 CC's are always set explicitly
2691 and always immediately before the jump that
2692 will use them. So if the previous insn
2693 exists to set the CC's, delete it
2694 (unless it performs auto-increments, etc.). */
2695 if (prev && GET_CODE (prev) == INSN
2696 && sets_cc0_p (PATTERN (prev)))
2697 {
2698 if (sets_cc0_p (PATTERN (prev)) > 0
2699 && ! side_effects_p (PATTERN (prev)))
2700 delete_computation (prev);
2701 else
2702 /* Otherwise, show that cc0 won't be used. */
2703 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2704 cc0_rtx, REG_NOTES (prev));
2705 }
2706 }
2707 #endif
2708
2709 #ifdef INSN_SCHEDULING
2710 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
2711 reload has completed. The schedulers need to be fixed. Until
2712 they are, we must not rely on the death notes here. */
2713 if (reload_completed && flag_schedule_insns_after_reload)
2714 {
2715 delete_insn (insn);
2716 return;
2717 }
2718 #endif
2719
2720 /* The REG_DEAD note may have been omitted for a register
2721 which is both set and used by the insn. */
2722 set = single_set (insn);
2723 if (set && GET_CODE (SET_DEST (set)) == REG)
2724 {
2725 int dest_regno = REGNO (SET_DEST (set));
2726 int dest_endregno
2727 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
2728 ? HARD_REGNO_NREGS (dest_regno,
2729 GET_MODE (SET_DEST (set))) : 1);
2730 int i;
2731
2732 for (i = dest_regno; i < dest_endregno; i++)
2733 {
2734 if (! refers_to_regno_p (i, i + 1, SET_SRC (set), NULL_PTR)
2735 || find_regno_note (insn, REG_DEAD, i))
2736 continue;
2737
2738 note = gen_rtx_EXPR_LIST (REG_DEAD,
2739 (i < FIRST_PSEUDO_REGISTER
2740 ? gen_rtx_REG (reg_raw_mode[i], i)
2741 : SET_DEST (set)), NULL_RTX);
2742 delete_prior_computation (note, insn);
2743 }
2744 }
2745
2746 for (note = REG_NOTES (insn); note; note = next)
2747 {
2748 next = XEXP (note, 1);
2749
2750 if (REG_NOTE_KIND (note) != REG_DEAD
2751 /* Verify that the REG_NOTE is legitimate. */
2752 || GET_CODE (XEXP (note, 0)) != REG)
2753 continue;
2754
2755 delete_prior_computation (note, insn);
2756 }
2757
2758 delete_insn (insn);
2759 }
2760 \f
2761 /* Delete insn INSN from the chain of insns and update label ref counts.
2762 May delete some following insns as a consequence; may even delete
2763 a label elsewhere and insns that follow it.
2764
2765 Returns the first insn after INSN that was not deleted. */
2766
2767 rtx
2768 delete_insn (insn)
2769 register rtx insn;
2770 {
2771 register rtx next = NEXT_INSN (insn);
2772 register rtx prev = PREV_INSN (insn);
2773 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2774 register int dont_really_delete = 0;
2775
2776 while (next && INSN_DELETED_P (next))
2777 next = NEXT_INSN (next);
2778
2779 /* This insn is already deleted => return first following nondeleted. */
2780 if (INSN_DELETED_P (insn))
2781 return next;
2782
2783 if (was_code_label)
2784 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2785
2786 /* Don't delete user-declared labels. When optimizing, convert them
2787 to special NOTEs instead. When not optimizing, leave them alone. */
2788 if (was_code_label && LABEL_NAME (insn) != 0)
2789 {
2790 if (! optimize)
2791 dont_really_delete = 1;
2792 else if (! dont_really_delete)
2793 {
2794 const char *name = LABEL_NAME (insn);
2795 PUT_CODE (insn, NOTE);
2796 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2797 NOTE_SOURCE_FILE (insn) = name;
2798 dont_really_delete = 1;
2799 }
2800 }
2801 else
2802 /* Mark this insn as deleted. */
2803 INSN_DELETED_P (insn) = 1;
2804
2805 /* If this is an unconditional jump, delete it from the jump chain. */
2806 if (simplejump_p (insn))
2807 delete_from_jump_chain (insn);
2808
2809 /* If instruction is followed by a barrier,
2810 delete the barrier too. */
2811
2812 if (next != 0 && GET_CODE (next) == BARRIER)
2813 {
2814 INSN_DELETED_P (next) = 1;
2815 next = NEXT_INSN (next);
2816 }
2817
2818 /* Patch out INSN (and the barrier if any) */
2819
2820 if (! dont_really_delete)
2821 {
2822 if (prev)
2823 {
2824 NEXT_INSN (prev) = next;
2825 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2826 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2827 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2828 }
2829
2830 if (next)
2831 {
2832 PREV_INSN (next) = prev;
2833 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2834 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2835 }
2836
2837 if (prev && NEXT_INSN (prev) == 0)
2838 set_last_insn (prev);
2839 }
2840
2841 /* If deleting a jump, decrement the count of the label,
2842 and delete the label if it is now unused. */
2843
2844 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2845 {
2846 rtx lab = JUMP_LABEL (insn), lab_next;
2847
2848 if (--LABEL_NUSES (lab) == 0)
2849 {
2850 /* This can delete NEXT or PREV,
2851 either directly if NEXT is JUMP_LABEL (INSN),
2852 or indirectly through more levels of jumps. */
2853 delete_insn (lab);
2854
2855 /* I feel a little doubtful about this loop,
2856 but I see no clean and sure alternative way
2857 to find the first insn after INSN that is not now deleted.
2858 I hope this works. */
2859 while (next && INSN_DELETED_P (next))
2860 next = NEXT_INSN (next);
2861 return next;
2862 }
2863 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2864 && GET_CODE (lab_next) == JUMP_INSN
2865 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2866 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2867 {
2868 /* If we're deleting the tablejump, delete the dispatch table.
2869 We may not be able to kill the label immediately preceeding
2870 just yet, as it might be referenced in code leading up to
2871 the tablejump. */
2872 delete_insn (lab_next);
2873 }
2874 }
2875
2876 /* Likewise if we're deleting a dispatch table. */
2877
2878 if (GET_CODE (insn) == JUMP_INSN
2879 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2880 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2881 {
2882 rtx pat = PATTERN (insn);
2883 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2884 int len = XVECLEN (pat, diff_vec_p);
2885
2886 for (i = 0; i < len; i++)
2887 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2888 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2889 while (next && INSN_DELETED_P (next))
2890 next = NEXT_INSN (next);
2891 return next;
2892 }
2893
2894 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
2895 prev = PREV_INSN (prev);
2896
2897 /* If INSN was a label and a dispatch table follows it,
2898 delete the dispatch table. The tablejump must have gone already.
2899 It isn't useful to fall through into a table. */
2900
2901 if (was_code_label
2902 && NEXT_INSN (insn) != 0
2903 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
2904 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
2905 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
2906 next = delete_insn (NEXT_INSN (insn));
2907
2908 /* If INSN was a label, delete insns following it if now unreachable. */
2909
2910 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
2911 {
2912 register RTX_CODE code;
2913 while (next != 0
2914 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
2915 || code == NOTE || code == BARRIER
2916 || (code == CODE_LABEL && INSN_DELETED_P (next))))
2917 {
2918 if (code == NOTE
2919 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
2920 next = NEXT_INSN (next);
2921 /* Keep going past other deleted labels to delete what follows. */
2922 else if (code == CODE_LABEL && INSN_DELETED_P (next))
2923 next = NEXT_INSN (next);
2924 else
2925 /* Note: if this deletes a jump, it can cause more
2926 deletion of unreachable code, after a different label.
2927 As long as the value from this recursive call is correct,
2928 this invocation functions correctly. */
2929 next = delete_insn (next);
2930 }
2931 }
2932
2933 return next;
2934 }
2935
2936 /* Advance from INSN till reaching something not deleted
2937 then return that. May return INSN itself. */
2938
2939 rtx
2940 next_nondeleted_insn (insn)
2941 rtx insn;
2942 {
2943 while (INSN_DELETED_P (insn))
2944 insn = NEXT_INSN (insn);
2945 return insn;
2946 }
2947 \f
2948 /* Delete a range of insns from FROM to TO, inclusive.
2949 This is for the sake of peephole optimization, so assume
2950 that whatever these insns do will still be done by a new
2951 peephole insn that will replace them. */
2952
2953 void
2954 delete_for_peephole (from, to)
2955 register rtx from, to;
2956 {
2957 register rtx insn = from;
2958
2959 while (1)
2960 {
2961 register rtx next = NEXT_INSN (insn);
2962 register rtx prev = PREV_INSN (insn);
2963
2964 if (GET_CODE (insn) != NOTE)
2965 {
2966 INSN_DELETED_P (insn) = 1;
2967
2968 /* Patch this insn out of the chain. */
2969 /* We don't do this all at once, because we
2970 must preserve all NOTEs. */
2971 if (prev)
2972 NEXT_INSN (prev) = next;
2973
2974 if (next)
2975 PREV_INSN (next) = prev;
2976 }
2977
2978 if (insn == to)
2979 break;
2980 insn = next;
2981 }
2982
2983 /* Note that if TO is an unconditional jump
2984 we *do not* delete the BARRIER that follows,
2985 since the peephole that replaces this sequence
2986 is also an unconditional jump in that case. */
2987 }
2988 \f
2989 /* We have determined that INSN is never reached, and are about to
2990 delete it. Print a warning if the user asked for one.
2991
2992 To try to make this warning more useful, this should only be called
2993 once per basic block not reached, and it only warns when the basic
2994 block contains more than one line from the current function, and
2995 contains at least one operation. CSE and inlining can duplicate insns,
2996 so it's possible to get spurious warnings from this. */
2997
2998 void
2999 never_reached_warning (avoided_insn)
3000 rtx avoided_insn;
3001 {
3002 rtx insn;
3003 rtx a_line_note = NULL;
3004 int two_avoided_lines = 0;
3005 int contains_insn = 0;
3006
3007 if (! warn_notreached)
3008 return;
3009
3010 /* Scan forwards, looking at LINE_NUMBER notes, until
3011 we hit a LABEL or we run out of insns. */
3012
3013 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3014 {
3015 if (GET_CODE (insn) == CODE_LABEL)
3016 break;
3017 else if (GET_CODE (insn) == NOTE /* A line number note? */
3018 && NOTE_LINE_NUMBER (insn) >= 0)
3019 {
3020 if (a_line_note == NULL)
3021 a_line_note = insn;
3022 else
3023 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3024 != NOTE_LINE_NUMBER (insn));
3025 }
3026 else if (INSN_P (insn))
3027 contains_insn = 1;
3028 }
3029 if (two_avoided_lines && contains_insn)
3030 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3031 NOTE_LINE_NUMBER (a_line_note),
3032 "will never be executed");
3033 }
3034 \f
3035 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3036 NLABEL as a return. Accrue modifications into the change group. */
3037
3038 static void
3039 redirect_exp_1 (loc, olabel, nlabel, insn)
3040 rtx *loc;
3041 rtx olabel, nlabel;
3042 rtx insn;
3043 {
3044 register rtx x = *loc;
3045 register RTX_CODE code = GET_CODE (x);
3046 register int i;
3047 register const char *fmt;
3048
3049 if (code == LABEL_REF)
3050 {
3051 if (XEXP (x, 0) == olabel)
3052 {
3053 rtx n;
3054 if (nlabel)
3055 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3056 else
3057 n = gen_rtx_RETURN (VOIDmode);
3058
3059 validate_change (insn, loc, n, 1);
3060 return;
3061 }
3062 }
3063 else if (code == RETURN && olabel == 0)
3064 {
3065 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3066 if (loc == &PATTERN (insn))
3067 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3068 validate_change (insn, loc, x, 1);
3069 return;
3070 }
3071
3072 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3073 && GET_CODE (SET_SRC (x)) == LABEL_REF
3074 && XEXP (SET_SRC (x), 0) == olabel)
3075 {
3076 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3077 return;
3078 }
3079
3080 fmt = GET_RTX_FORMAT (code);
3081 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3082 {
3083 if (fmt[i] == 'e')
3084 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3085 else if (fmt[i] == 'E')
3086 {
3087 register int j;
3088 for (j = 0; j < XVECLEN (x, i); j++)
3089 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3090 }
3091 }
3092 }
3093
3094 /* Similar, but apply the change group and report success or failure. */
3095
3096 static int
3097 redirect_exp (olabel, nlabel, insn)
3098 rtx olabel, nlabel;
3099 rtx insn;
3100 {
3101 rtx *loc;
3102
3103 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3104 loc = &XVECEXP (PATTERN (insn), 0, 0);
3105 else
3106 loc = &PATTERN (insn);
3107
3108 redirect_exp_1 (loc, olabel, nlabel, insn);
3109 if (num_validated_changes () == 0)
3110 return 0;
3111
3112 return apply_change_group ();
3113 }
3114
3115 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3116 the modifications into the change group. Return false if we did
3117 not see how to do that. */
3118
3119 int
3120 redirect_jump_1 (jump, nlabel)
3121 rtx jump, nlabel;
3122 {
3123 int ochanges = num_validated_changes ();
3124 rtx *loc;
3125
3126 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3127 loc = &XVECEXP (PATTERN (jump), 0, 0);
3128 else
3129 loc = &PATTERN (jump);
3130
3131 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3132 return num_validated_changes () > ochanges;
3133 }
3134
3135 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3136 jump target label is unused as a result, it and the code following
3137 it may be deleted.
3138
3139 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3140 RETURN insn.
3141
3142 The return value will be 1 if the change was made, 0 if it wasn't
3143 (this can only occur for NLABEL == 0). */
3144
3145 int
3146 redirect_jump (jump, nlabel, delete_unused)
3147 rtx jump, nlabel;
3148 int delete_unused;
3149 {
3150 register rtx olabel = JUMP_LABEL (jump);
3151
3152 if (nlabel == olabel)
3153 return 1;
3154
3155 if (! redirect_exp (olabel, nlabel, jump))
3156 return 0;
3157
3158 /* If this is an unconditional branch, delete it from the jump_chain of
3159 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3160 have UID's in range and JUMP_CHAIN is valid). */
3161 if (jump_chain && (simplejump_p (jump)
3162 || GET_CODE (PATTERN (jump)) == RETURN))
3163 {
3164 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3165
3166 delete_from_jump_chain (jump);
3167 if (label_index < max_jump_chain
3168 && INSN_UID (jump) < max_jump_chain)
3169 {
3170 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3171 jump_chain[label_index] = jump;
3172 }
3173 }
3174
3175 JUMP_LABEL (jump) = nlabel;
3176 if (nlabel)
3177 ++LABEL_NUSES (nlabel);
3178
3179 /* If we're eliding the jump over exception cleanups at the end of a
3180 function, move the function end note so that -Wreturn-type works. */
3181 if (olabel && nlabel
3182 && NEXT_INSN (olabel)
3183 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3184 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3185 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3186
3187 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3188 delete_insn (olabel);
3189
3190 return 1;
3191 }
3192
3193 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3194 Accrue the modifications into the change group. */
3195
3196 static void
3197 invert_exp_1 (insn)
3198 rtx insn;
3199 {
3200 register RTX_CODE code;
3201 rtx x = pc_set (insn);
3202
3203 if (!x)
3204 abort ();
3205 x = SET_SRC (x);
3206
3207 code = GET_CODE (x);
3208
3209 if (code == IF_THEN_ELSE)
3210 {
3211 register rtx comp = XEXP (x, 0);
3212 register rtx tem;
3213
3214 /* We can do this in two ways: The preferable way, which can only
3215 be done if this is not an integer comparison, is to reverse
3216 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3217 of the IF_THEN_ELSE. If we can't do either, fail. */
3218
3219 if (can_reverse_comparison_p (comp, insn))
3220 {
3221 validate_change (insn, &XEXP (x, 0),
3222 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
3223 GET_MODE (comp), XEXP (comp, 0),
3224 XEXP (comp, 1)),
3225 1);
3226 return;
3227 }
3228
3229 tem = XEXP (x, 1);
3230 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3231 validate_change (insn, &XEXP (x, 2), tem, 1);
3232 }
3233 else
3234 abort ();
3235 }
3236
3237 /* Invert the jump condition of conditional jump insn, INSN.
3238
3239 Return 1 if we can do so, 0 if we cannot find a way to do so that
3240 matches a pattern. */
3241
3242 static int
3243 invert_exp (insn)
3244 rtx insn;
3245 {
3246 invert_exp_1 (insn);
3247 if (num_validated_changes () == 0)
3248 return 0;
3249
3250 return apply_change_group ();
3251 }
3252
3253 /* Invert the condition of the jump JUMP, and make it jump to label
3254 NLABEL instead of where it jumps now. Accrue changes into the
3255 change group. Return false if we didn't see how to perform the
3256 inversion and redirection. */
3257
3258 int
3259 invert_jump_1 (jump, nlabel)
3260 rtx jump, nlabel;
3261 {
3262 int ochanges;
3263
3264 ochanges = num_validated_changes ();
3265 invert_exp_1 (jump);
3266 if (num_validated_changes () == ochanges)
3267 return 0;
3268
3269 return redirect_jump_1 (jump, nlabel);
3270 }
3271
3272 /* Invert the condition of the jump JUMP, and make it jump to label
3273 NLABEL instead of where it jumps now. Return true if successful. */
3274
3275 int
3276 invert_jump (jump, nlabel, delete_unused)
3277 rtx jump, nlabel;
3278 int delete_unused;
3279 {
3280 /* We have to either invert the condition and change the label or
3281 do neither. Either operation could fail. We first try to invert
3282 the jump. If that succeeds, we try changing the label. If that fails,
3283 we invert the jump back to what it was. */
3284
3285 if (! invert_exp (jump))
3286 return 0;
3287
3288 if (redirect_jump (jump, nlabel, delete_unused))
3289 {
3290 /* An inverted jump means that a probability taken becomes a
3291 probability not taken. Subtract the branch probability from the
3292 probability base to convert it back to a taken probability. */
3293
3294 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3295 if (note)
3296 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3297
3298 return 1;
3299 }
3300
3301 if (! invert_exp (jump))
3302 /* This should just be putting it back the way it was. */
3303 abort ();
3304
3305 return 0;
3306 }
3307
3308 /* Delete the instruction JUMP from any jump chain it might be on. */
3309
3310 static void
3311 delete_from_jump_chain (jump)
3312 rtx jump;
3313 {
3314 int index;
3315 rtx olabel = JUMP_LABEL (jump);
3316
3317 /* Handle unconditional jumps. */
3318 if (jump_chain && olabel != 0
3319 && INSN_UID (olabel) < max_jump_chain
3320 && simplejump_p (jump))
3321 index = INSN_UID (olabel);
3322 /* Handle return insns. */
3323 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3324 index = 0;
3325 else
3326 return;
3327
3328 if (jump_chain[index] == jump)
3329 jump_chain[index] = jump_chain[INSN_UID (jump)];
3330 else
3331 {
3332 rtx insn;
3333
3334 for (insn = jump_chain[index];
3335 insn != 0;
3336 insn = jump_chain[INSN_UID (insn)])
3337 if (jump_chain[INSN_UID (insn)] == jump)
3338 {
3339 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3340 break;
3341 }
3342 }
3343 }
3344 \f
3345 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3346
3347 If the old jump target label (before the dispatch table) becomes unused,
3348 it and the dispatch table may be deleted. In that case, find the insn
3349 before the jump references that label and delete it and logical successors
3350 too. */
3351
3352 static void
3353 redirect_tablejump (jump, nlabel)
3354 rtx jump, nlabel;
3355 {
3356 register rtx olabel = JUMP_LABEL (jump);
3357
3358 /* Add this jump to the jump_chain of NLABEL. */
3359 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3360 && INSN_UID (jump) < max_jump_chain)
3361 {
3362 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3363 jump_chain[INSN_UID (nlabel)] = jump;
3364 }
3365
3366 PATTERN (jump) = gen_jump (nlabel);
3367 JUMP_LABEL (jump) = nlabel;
3368 ++LABEL_NUSES (nlabel);
3369 INSN_CODE (jump) = -1;
3370
3371 if (--LABEL_NUSES (olabel) == 0)
3372 {
3373 delete_labelref_insn (jump, olabel, 0);
3374 delete_insn (olabel);
3375 }
3376 }
3377
3378 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3379 If we found one, delete it and then delete this insn if DELETE_THIS is
3380 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3381
3382 static int
3383 delete_labelref_insn (insn, label, delete_this)
3384 rtx insn, label;
3385 int delete_this;
3386 {
3387 int deleted = 0;
3388 rtx link;
3389
3390 if (GET_CODE (insn) != NOTE
3391 && reg_mentioned_p (label, PATTERN (insn)))
3392 {
3393 if (delete_this)
3394 {
3395 delete_insn (insn);
3396 deleted = 1;
3397 }
3398 else
3399 return 1;
3400 }
3401
3402 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3403 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3404 {
3405 if (delete_this)
3406 {
3407 delete_insn (insn);
3408 deleted = 1;
3409 }
3410 else
3411 return 1;
3412 }
3413
3414 return deleted;
3415 }
3416 \f
3417 /* Like rtx_equal_p except that it considers two REGs as equal
3418 if they renumber to the same value and considers two commutative
3419 operations to be the same if the order of the operands has been
3420 reversed.
3421
3422 ??? Addition is not commutative on the PA due to the weird implicit
3423 space register selection rules for memory addresses. Therefore, we
3424 don't consider a + b == b + a.
3425
3426 We could/should make this test a little tighter. Possibly only
3427 disabling it on the PA via some backend macro or only disabling this
3428 case when the PLUS is inside a MEM. */
3429
3430 int
3431 rtx_renumbered_equal_p (x, y)
3432 rtx x, y;
3433 {
3434 register int i;
3435 register RTX_CODE code = GET_CODE (x);
3436 register const char *fmt;
3437
3438 if (x == y)
3439 return 1;
3440
3441 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3442 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3443 && GET_CODE (SUBREG_REG (y)) == REG)))
3444 {
3445 int reg_x = -1, reg_y = -1;
3446 int word_x = 0, word_y = 0;
3447
3448 if (GET_MODE (x) != GET_MODE (y))
3449 return 0;
3450
3451 /* If we haven't done any renumbering, don't
3452 make any assumptions. */
3453 if (reg_renumber == 0)
3454 return rtx_equal_p (x, y);
3455
3456 if (code == SUBREG)
3457 {
3458 reg_x = REGNO (SUBREG_REG (x));
3459 word_x = SUBREG_WORD (x);
3460
3461 if (reg_renumber[reg_x] >= 0)
3462 {
3463 reg_x = reg_renumber[reg_x] + word_x;
3464 word_x = 0;
3465 }
3466 }
3467
3468 else
3469 {
3470 reg_x = REGNO (x);
3471 if (reg_renumber[reg_x] >= 0)
3472 reg_x = reg_renumber[reg_x];
3473 }
3474
3475 if (GET_CODE (y) == SUBREG)
3476 {
3477 reg_y = REGNO (SUBREG_REG (y));
3478 word_y = SUBREG_WORD (y);
3479
3480 if (reg_renumber[reg_y] >= 0)
3481 {
3482 reg_y = reg_renumber[reg_y];
3483 word_y = 0;
3484 }
3485 }
3486
3487 else
3488 {
3489 reg_y = REGNO (y);
3490 if (reg_renumber[reg_y] >= 0)
3491 reg_y = reg_renumber[reg_y];
3492 }
3493
3494 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
3495 }
3496
3497 /* Now we have disposed of all the cases
3498 in which different rtx codes can match. */
3499 if (code != GET_CODE (y))
3500 return 0;
3501
3502 switch (code)
3503 {
3504 case PC:
3505 case CC0:
3506 case ADDR_VEC:
3507 case ADDR_DIFF_VEC:
3508 return 0;
3509
3510 case CONST_INT:
3511 return INTVAL (x) == INTVAL (y);
3512
3513 case LABEL_REF:
3514 /* We can't assume nonlocal labels have their following insns yet. */
3515 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3516 return XEXP (x, 0) == XEXP (y, 0);
3517
3518 /* Two label-refs are equivalent if they point at labels
3519 in the same position in the instruction stream. */
3520 return (next_real_insn (XEXP (x, 0))
3521 == next_real_insn (XEXP (y, 0)));
3522
3523 case SYMBOL_REF:
3524 return XSTR (x, 0) == XSTR (y, 0);
3525
3526 case CODE_LABEL:
3527 /* If we didn't match EQ equality above, they aren't the same. */
3528 return 0;
3529
3530 default:
3531 break;
3532 }
3533
3534 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3535
3536 if (GET_MODE (x) != GET_MODE (y))
3537 return 0;
3538
3539 /* For commutative operations, the RTX match if the operand match in any
3540 order. Also handle the simple binary and unary cases without a loop.
3541
3542 ??? Don't consider PLUS a commutative operator; see comments above. */
3543 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3544 && code != PLUS)
3545 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3546 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3547 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3548 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3549 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3550 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3551 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3552 else if (GET_RTX_CLASS (code) == '1')
3553 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3554
3555 /* Compare the elements. If any pair of corresponding elements
3556 fail to match, return 0 for the whole things. */
3557
3558 fmt = GET_RTX_FORMAT (code);
3559 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3560 {
3561 register int j;
3562 switch (fmt[i])
3563 {
3564 case 'w':
3565 if (XWINT (x, i) != XWINT (y, i))
3566 return 0;
3567 break;
3568
3569 case 'i':
3570 if (XINT (x, i) != XINT (y, i))
3571 return 0;
3572 break;
3573
3574 case 's':
3575 if (strcmp (XSTR (x, i), XSTR (y, i)))
3576 return 0;
3577 break;
3578
3579 case 'e':
3580 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3581 return 0;
3582 break;
3583
3584 case 'u':
3585 if (XEXP (x, i) != XEXP (y, i))
3586 return 0;
3587 /* fall through. */
3588 case '0':
3589 break;
3590
3591 case 'E':
3592 if (XVECLEN (x, i) != XVECLEN (y, i))
3593 return 0;
3594 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3595 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3596 return 0;
3597 break;
3598
3599 default:
3600 abort ();
3601 }
3602 }
3603 return 1;
3604 }
3605 \f
3606 /* If X is a hard register or equivalent to one or a subregister of one,
3607 return the hard register number. If X is a pseudo register that was not
3608 assigned a hard register, return the pseudo register number. Otherwise,
3609 return -1. Any rtx is valid for X. */
3610
3611 int
3612 true_regnum (x)
3613 rtx x;
3614 {
3615 if (GET_CODE (x) == REG)
3616 {
3617 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3618 return reg_renumber[REGNO (x)];
3619 return REGNO (x);
3620 }
3621 if (GET_CODE (x) == SUBREG)
3622 {
3623 int base = true_regnum (SUBREG_REG (x));
3624 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3625 return SUBREG_WORD (x) + base;
3626 }
3627 return -1;
3628 }
3629 \f
3630 /* Optimize code of the form:
3631
3632 for (x = a[i]; x; ...)
3633 ...
3634 for (x = a[i]; x; ...)
3635 ...
3636 foo:
3637
3638 Loop optimize will change the above code into
3639
3640 if (x = a[i])
3641 for (;;)
3642 { ...; if (! (x = ...)) break; }
3643 if (x = a[i])
3644 for (;;)
3645 { ...; if (! (x = ...)) break; }
3646 foo:
3647
3648 In general, if the first test fails, the program can branch
3649 directly to `foo' and skip the second try which is doomed to fail.
3650 We run this after loop optimization and before flow analysis. */
3651
3652 /* When comparing the insn patterns, we track the fact that different
3653 pseudo-register numbers may have been used in each computation.
3654 The following array stores an equivalence -- same_regs[I] == J means
3655 that pseudo register I was used in the first set of tests in a context
3656 where J was used in the second set. We also count the number of such
3657 pending equivalences. If nonzero, the expressions really aren't the
3658 same. */
3659
3660 static int *same_regs;
3661
3662 static int num_same_regs;
3663
3664 /* Track any registers modified between the target of the first jump and
3665 the second jump. They never compare equal. */
3666
3667 static char *modified_regs;
3668
3669 /* Record if memory was modified. */
3670
3671 static int modified_mem;
3672
3673 /* Called via note_stores on each insn between the target of the first
3674 branch and the second branch. It marks any changed registers. */
3675
3676 static void
3677 mark_modified_reg (dest, x, data)
3678 rtx dest;
3679 rtx x ATTRIBUTE_UNUSED;
3680 void *data ATTRIBUTE_UNUSED;
3681 {
3682 int regno;
3683 unsigned int i;
3684
3685 if (GET_CODE (dest) == SUBREG)
3686 dest = SUBREG_REG (dest);
3687
3688 if (GET_CODE (dest) == MEM)
3689 modified_mem = 1;
3690
3691 if (GET_CODE (dest) != REG)
3692 return;
3693
3694 regno = REGNO (dest);
3695 if (regno >= FIRST_PSEUDO_REGISTER)
3696 modified_regs[regno] = 1;
3697 else
3698 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3699 modified_regs[regno + i] = 1;
3700 }
3701
3702 /* F is the first insn in the chain of insns. */
3703
3704 void
3705 thread_jumps (f, max_reg, flag_before_loop)
3706 rtx f;
3707 int max_reg;
3708 int flag_before_loop;
3709 {
3710 /* Basic algorithm is to find a conditional branch,
3711 the label it may branch to, and the branch after
3712 that label. If the two branches test the same condition,
3713 walk back from both branch paths until the insn patterns
3714 differ, or code labels are hit. If we make it back to
3715 the target of the first branch, then we know that the first branch
3716 will either always succeed or always fail depending on the relative
3717 senses of the two branches. So adjust the first branch accordingly
3718 in this case. */
3719
3720 rtx label, b1, b2, t1, t2;
3721 enum rtx_code code1, code2;
3722 rtx b1op0, b1op1, b2op0, b2op1;
3723 int changed = 1;
3724 int i;
3725 int *all_reset;
3726
3727 /* Allocate register tables and quick-reset table. */
3728 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3729 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3730 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3731 for (i = 0; i < max_reg; i++)
3732 all_reset[i] = -1;
3733
3734 while (changed)
3735 {
3736 changed = 0;
3737
3738 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3739 {
3740 rtx set;
3741 rtx set2;
3742
3743 /* Get to a candidate branch insn. */
3744 if (GET_CODE (b1) != JUMP_INSN
3745 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3746 continue;
3747
3748 memset (modified_regs, 0, max_reg * sizeof (char));
3749 modified_mem = 0;
3750
3751 bcopy ((char *) all_reset, (char *) same_regs,
3752 max_reg * sizeof (int));
3753 num_same_regs = 0;
3754
3755 label = JUMP_LABEL (b1);
3756
3757 /* Look for a branch after the target. Record any registers and
3758 memory modified between the target and the branch. Stop when we
3759 get to a label since we can't know what was changed there. */
3760 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3761 {
3762 if (GET_CODE (b2) == CODE_LABEL)
3763 break;
3764
3765 else if (GET_CODE (b2) == JUMP_INSN)
3766 {
3767 /* If this is an unconditional jump and is the only use of
3768 its target label, we can follow it. */
3769 if (any_uncondjump_p (b2)
3770 && onlyjump_p (b2)
3771 && JUMP_LABEL (b2) != 0
3772 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3773 {
3774 b2 = JUMP_LABEL (b2);
3775 continue;
3776 }
3777 else
3778 break;
3779 }
3780
3781 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3782 continue;
3783
3784 if (GET_CODE (b2) == CALL_INSN)
3785 {
3786 modified_mem = 1;
3787 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3788 if (call_used_regs[i] && ! fixed_regs[i]
3789 && i != STACK_POINTER_REGNUM
3790 && i != FRAME_POINTER_REGNUM
3791 && i != HARD_FRAME_POINTER_REGNUM
3792 && i != ARG_POINTER_REGNUM)
3793 modified_regs[i] = 1;
3794 }
3795
3796 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3797 }
3798
3799 /* Check the next candidate branch insn from the label
3800 of the first. */
3801 if (b2 == 0
3802 || GET_CODE (b2) != JUMP_INSN
3803 || b2 == b1
3804 || !any_condjump_p (b2)
3805 || !onlyjump_p (b2))
3806 continue;
3807 set = pc_set (b1);
3808 set2 = pc_set (b2);
3809
3810 /* Get the comparison codes and operands, reversing the
3811 codes if appropriate. If we don't have comparison codes,
3812 we can't do anything. */
3813 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3814 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3815 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3816 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3817 code1 = reverse_condition (code1);
3818
3819 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3820 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3821 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3822 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3823 code2 = reverse_condition (code2);
3824
3825 /* If they test the same things and knowing that B1 branches
3826 tells us whether or not B2 branches, check if we
3827 can thread the branch. */
3828 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3829 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3830 && (comparison_dominates_p (code1, code2)
3831 || (can_reverse_comparison_p (XEXP (SET_SRC (set), 0), b1)
3832 && comparison_dominates_p (code1,
3833 reverse_condition (code2)))))
3834
3835 {
3836 t1 = prev_nonnote_insn (b1);
3837 t2 = prev_nonnote_insn (b2);
3838
3839 while (t1 != 0 && t2 != 0)
3840 {
3841 if (t2 == label)
3842 {
3843 /* We have reached the target of the first branch.
3844 If there are no pending register equivalents,
3845 we know that this branch will either always
3846 succeed (if the senses of the two branches are
3847 the same) or always fail (if not). */
3848 rtx new_label;
3849
3850 if (num_same_regs != 0)
3851 break;
3852
3853 if (comparison_dominates_p (code1, code2))
3854 new_label = JUMP_LABEL (b2);
3855 else
3856 new_label = get_label_after (b2);
3857
3858 if (JUMP_LABEL (b1) != new_label)
3859 {
3860 rtx prev = PREV_INSN (new_label);
3861
3862 if (flag_before_loop
3863 && GET_CODE (prev) == NOTE
3864 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3865 {
3866 /* Don't thread to the loop label. If a loop
3867 label is reused, loop optimization will
3868 be disabled for that loop. */
3869 new_label = gen_label_rtx ();
3870 emit_label_after (new_label, PREV_INSN (prev));
3871 }
3872 changed |= redirect_jump (b1, new_label, 1);
3873 }
3874 break;
3875 }
3876
3877 /* If either of these is not a normal insn (it might be
3878 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
3879 have already been skipped above.) Similarly, fail
3880 if the insns are different. */
3881 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
3882 || recog_memoized (t1) != recog_memoized (t2)
3883 || ! rtx_equal_for_thread_p (PATTERN (t1),
3884 PATTERN (t2), t2))
3885 break;
3886
3887 t1 = prev_nonnote_insn (t1);
3888 t2 = prev_nonnote_insn (t2);
3889 }
3890 }
3891 }
3892 }
3893
3894 /* Clean up. */
3895 free (modified_regs);
3896 free (same_regs);
3897 free (all_reset);
3898 }
3899 \f
3900 /* This is like RTX_EQUAL_P except that it knows about our handling of
3901 possibly equivalent registers and knows to consider volatile and
3902 modified objects as not equal.
3903
3904 YINSN is the insn containing Y. */
3905
3906 int
3907 rtx_equal_for_thread_p (x, y, yinsn)
3908 rtx x, y;
3909 rtx yinsn;
3910 {
3911 register int i;
3912 register int j;
3913 register enum rtx_code code;
3914 register const char *fmt;
3915
3916 code = GET_CODE (x);
3917 /* Rtx's of different codes cannot be equal. */
3918 if (code != GET_CODE (y))
3919 return 0;
3920
3921 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
3922 (REG:SI x) and (REG:HI x) are NOT equivalent. */
3923
3924 if (GET_MODE (x) != GET_MODE (y))
3925 return 0;
3926
3927 /* For floating-point, consider everything unequal. This is a bit
3928 pessimistic, but this pass would only rarely do anything for FP
3929 anyway. */
3930 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3931 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
3932 return 0;
3933
3934 /* For commutative operations, the RTX match if the operand match in any
3935 order. Also handle the simple binary and unary cases without a loop. */
3936 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3937 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
3938 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
3939 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
3940 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
3941 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3942 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
3943 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
3944 else if (GET_RTX_CLASS (code) == '1')
3945 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
3946
3947 /* Handle special-cases first. */
3948 switch (code)
3949 {
3950 case REG:
3951 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
3952 return 1;
3953
3954 /* If neither is user variable or hard register, check for possible
3955 equivalence. */
3956 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
3957 || REGNO (x) < FIRST_PSEUDO_REGISTER
3958 || REGNO (y) < FIRST_PSEUDO_REGISTER)
3959 return 0;
3960
3961 if (same_regs[REGNO (x)] == -1)
3962 {
3963 same_regs[REGNO (x)] = REGNO (y);
3964 num_same_regs++;
3965
3966 /* If this is the first time we are seeing a register on the `Y'
3967 side, see if it is the last use. If not, we can't thread the
3968 jump, so mark it as not equivalent. */
3969 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
3970 return 0;
3971
3972 return 1;
3973 }
3974 else
3975 return (same_regs[REGNO (x)] == (int) REGNO (y));
3976
3977 break;
3978
3979 case MEM:
3980 /* If memory modified or either volatile, not equivalent.
3981 Else, check address. */
3982 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
3983 return 0;
3984
3985 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
3986
3987 case ASM_INPUT:
3988 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
3989 return 0;
3990
3991 break;
3992
3993 case SET:
3994 /* Cancel a pending `same_regs' if setting equivalenced registers.
3995 Then process source. */
3996 if (GET_CODE (SET_DEST (x)) == REG
3997 && GET_CODE (SET_DEST (y)) == REG)
3998 {
3999 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4000 {
4001 same_regs[REGNO (SET_DEST (x))] = -1;
4002 num_same_regs--;
4003 }
4004 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4005 return 0;
4006 }
4007 else
4008 {
4009 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4010 return 0;
4011 }
4012
4013 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4014
4015 case LABEL_REF:
4016 return XEXP (x, 0) == XEXP (y, 0);
4017
4018 case SYMBOL_REF:
4019 return XSTR (x, 0) == XSTR (y, 0);
4020
4021 default:
4022 break;
4023 }
4024
4025 if (x == y)
4026 return 1;
4027
4028 fmt = GET_RTX_FORMAT (code);
4029 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4030 {
4031 switch (fmt[i])
4032 {
4033 case 'w':
4034 if (XWINT (x, i) != XWINT (y, i))
4035 return 0;
4036 break;
4037
4038 case 'n':
4039 case 'i':
4040 if (XINT (x, i) != XINT (y, i))
4041 return 0;
4042 break;
4043
4044 case 'V':
4045 case 'E':
4046 /* Two vectors must have the same length. */
4047 if (XVECLEN (x, i) != XVECLEN (y, i))
4048 return 0;
4049
4050 /* And the corresponding elements must match. */
4051 for (j = 0; j < XVECLEN (x, i); j++)
4052 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4053 XVECEXP (y, i, j), yinsn) == 0)
4054 return 0;
4055 break;
4056
4057 case 'e':
4058 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4059 return 0;
4060 break;
4061
4062 case 'S':
4063 case 's':
4064 if (strcmp (XSTR (x, i), XSTR (y, i)))
4065 return 0;
4066 break;
4067
4068 case 'u':
4069 /* These are just backpointers, so they don't matter. */
4070 break;
4071
4072 case '0':
4073 case 't':
4074 break;
4075
4076 /* It is believed that rtx's at this level will never
4077 contain anything but integers and other rtx's,
4078 except for within LABEL_REFs and SYMBOL_REFs. */
4079 default:
4080 abort ();
4081 }
4082 }
4083 return 1;
4084 }