1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
58 #include "hard-reg-set.h"
60 #include "insn-config.h"
61 #include "insn-flags.h"
62 #include "insn-attr.h"
70 /* ??? Eventually must record somehow the labels used by jumps
71 from nested functions. */
72 /* Pre-record the next or previous real insn for each label?
73 No, this pass is very fast anyway. */
74 /* Condense consecutive labels?
75 This would make life analysis faster, maybe. */
76 /* Optimize jump y; x: ... y: jumpif... x?
77 Don't know if it is worth bothering with. */
78 /* Optimize two cases of conditional jump to conditional jump?
79 This can never delete any instruction or make anything dead,
80 or even change what is live at any point.
81 So perhaps let combiner do it. */
83 /* Vector indexed by uid.
84 For each CODE_LABEL, index by its uid to get first unconditional jump
85 that jumps to the label.
86 For each JUMP_INSN, index by its uid to get the next unconditional jump
87 that jumps to the same label.
88 Element 0 is the start of a chain of all return insns.
89 (It is safe to use element 0 because insn uid 0 is not used. */
91 static rtx
*jump_chain
;
93 /* Maximum index in jump_chain. */
95 static int max_jump_chain
;
97 /* Set nonzero by jump_optimize if control can fall through
98 to the end of the function. */
101 /* Indicates whether death notes are significant in cross jump analysis.
102 Normally they are not significant, because of A and B jump to C,
103 and R dies in A, it must die in B. But this might not be true after
104 stack register conversion, and we must compare death notes in that
107 static int cross_jump_death_matters
= 0;
109 static int init_label_info
PROTO((rtx
));
110 static void delete_barrier_successors
PROTO((rtx
));
111 static void mark_all_labels
PROTO((rtx
, int));
112 static rtx delete_unreferenced_labels
PROTO((rtx
));
113 static void delete_noop_moves
PROTO((rtx
));
114 static int calculate_can_reach_end
PROTO((rtx
, int, int));
115 static int duplicate_loop_exit_test
PROTO((rtx
));
116 static void find_cross_jump
PROTO((rtx
, rtx
, int, rtx
*, rtx
*));
117 static void do_cross_jump
PROTO((rtx
, rtx
, rtx
));
118 static int jump_back_p
PROTO((rtx
, rtx
));
119 static int tension_vector_labels
PROTO((rtx
, int));
120 static void mark_jump_label
PROTO((rtx
, rtx
, int));
121 static void delete_computation
PROTO((rtx
));
122 static void delete_from_jump_chain
PROTO((rtx
));
123 static int delete_labelref_insn
PROTO((rtx
, rtx
, int));
124 static void mark_modified_reg
PROTO((rtx
, rtx
));
125 static void redirect_tablejump
PROTO((rtx
, rtx
));
126 static void jump_optimize_1
PROTO ((rtx
, int, int, int, int));
128 static rtx find_insert_position
PROTO((rtx
, rtx
));
131 /* Main external entry point into the jump optimizer. See comments before
132 jump_optimize_1 for descriptions of the arguments. */
134 jump_optimize (f
, cross_jump
, noop_moves
, after_regscan
)
140 jump_optimize_1 (f
, cross_jump
, noop_moves
, after_regscan
, 0);
143 /* Alternate entry into the jump optimizer. This entry point only rebuilds
144 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
147 rebuild_jump_labels (f
)
150 jump_optimize_1 (f
, 0, 0, 0, 1);
154 /* Delete no-op jumps and optimize jumps to jumps
155 and jumps around jumps.
156 Delete unused labels and unreachable code.
158 If CROSS_JUMP is 1, detect matching code
159 before a jump and its destination and unify them.
160 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
162 If NOOP_MOVES is nonzero, delete no-op move insns.
164 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
165 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
167 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
168 and JUMP_LABEL field for jumping insns.
170 If `optimize' is zero, don't change any code,
171 just determine whether control drops off the end of the function.
172 This case occurs when we have -W and not -O.
173 It works because `delete_insn' checks the value of `optimize'
174 and refrains from actually deleting when that is 0. */
177 jump_optimize_1 (f
, cross_jump
, noop_moves
, after_regscan
, mark_labels_only
)
182 int mark_labels_only
;
184 register rtx insn
, next
;
191 cross_jump_death_matters
= (cross_jump
== 2);
192 max_uid
= init_label_info (f
) + 1;
194 /* If we are performing cross jump optimizations, then initialize
195 tables mapping UIDs to EH regions to avoid incorrect movement
196 of insns from one EH region to another. */
197 if (flag_exceptions
&& cross_jump
)
198 init_insn_eh_region (f
, max_uid
);
200 delete_barrier_successors (f
);
202 /* Leave some extra room for labels and duplicate exit test insns
204 max_jump_chain
= max_uid
* 14 / 10;
205 jump_chain
= (rtx
*) alloca (max_jump_chain
* sizeof (rtx
));
206 bzero ((char *) jump_chain
, max_jump_chain
* sizeof (rtx
));
208 mark_all_labels (f
, cross_jump
);
210 /* Keep track of labels used from static data;
211 they cannot ever be deleted. */
213 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
214 LABEL_NUSES (XEXP (insn
, 0))++;
216 check_exception_handler_labels ();
218 /* Keep track of labels used for marking handlers for exception
219 regions; they cannot usually be deleted. */
221 for (insn
= exception_handler_labels
; insn
; insn
= XEXP (insn
, 1))
222 LABEL_NUSES (XEXP (insn
, 0))++;
224 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
225 notes and recompute LABEL_NUSES. */
226 if (mark_labels_only
)
229 exception_optimize ();
231 last_insn
= delete_unreferenced_labels (f
);
235 /* CAN_REACH_END is persistent for each function. Once set it should
236 not be cleared. This is especially true for the case where we
237 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
238 the front-end before compiling each function. */
239 if (calculate_can_reach_end (last_insn
, 1, 0))
242 /* Zero the "deleted" flag of all the "deleted" insns. */
243 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
244 INSN_DELETED_P (insn
) = 0;
246 /* Show that the jump chain is not valid. */
254 /* If we fall through to the epilogue, see if we can insert a RETURN insn
255 in front of it. If the machine allows it at this point (we might be
256 after reload for a leaf routine), it will improve optimization for it
258 insn
= get_last_insn ();
259 while (insn
&& GET_CODE (insn
) == NOTE
)
260 insn
= PREV_INSN (insn
);
262 if (insn
&& GET_CODE (insn
) != BARRIER
)
264 emit_jump_insn (gen_return ());
271 delete_noop_moves (f
);
273 /* If we haven't yet gotten to reload and we have just run regscan,
274 delete any insn that sets a register that isn't used elsewhere.
275 This helps some of the optimizations below by having less insns
276 being jumped around. */
278 if (! reload_completed
&& after_regscan
)
279 for (insn
= f
; insn
; insn
= next
)
281 rtx set
= single_set (insn
);
283 next
= NEXT_INSN (insn
);
285 if (set
&& GET_CODE (SET_DEST (set
)) == REG
286 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
287 && REGNO_FIRST_UID (REGNO (SET_DEST (set
))) == INSN_UID (insn
)
288 /* We use regno_last_note_uid so as not to delete the setting
289 of a reg that's used in notes. A subsequent optimization
290 might arrange to use that reg for real. */
291 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set
))) == INSN_UID (insn
)
292 && ! side_effects_p (SET_SRC (set
))
293 && ! find_reg_note (insn
, REG_RETVAL
, 0))
297 /* Now iterate optimizing jumps until nothing changes over one pass. */
299 old_max_reg
= max_reg_num ();
304 for (insn
= f
; insn
; insn
= next
)
307 rtx temp
, temp1
, temp2
, temp3
, temp4
, temp5
, temp6
;
309 int this_is_simplejump
, this_is_condjump
, reversep
= 0;
310 int this_is_condjump_in_parallel
;
313 /* If NOT the first iteration, if this is the last jump pass
314 (just before final), do the special peephole optimizations.
315 Avoiding the first iteration gives ordinary jump opts
316 a chance to work before peephole opts. */
318 if (reload_completed
&& !first
&& !flag_no_peephole
)
319 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
)
323 /* That could have deleted some insns after INSN, so check now
324 what the following insn is. */
326 next
= NEXT_INSN (insn
);
328 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
329 jump. Try to optimize by duplicating the loop exit test if so.
330 This is only safe immediately after regscan, because it uses
331 the values of regno_first_uid and regno_last_uid. */
332 if (after_regscan
&& GET_CODE (insn
) == NOTE
333 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
334 && (temp1
= next_nonnote_insn (insn
)) != 0
335 && simplejump_p (temp1
))
337 temp
= PREV_INSN (insn
);
338 if (duplicate_loop_exit_test (insn
))
341 next
= NEXT_INSN (temp
);
346 if (GET_CODE (insn
) != JUMP_INSN
)
349 this_is_simplejump
= simplejump_p (insn
);
350 this_is_condjump
= condjump_p (insn
);
351 this_is_condjump_in_parallel
= condjump_in_parallel_p (insn
);
353 /* Tension the labels in dispatch tables. */
355 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
)
356 changed
|= tension_vector_labels (PATTERN (insn
), 0);
357 if (GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
358 changed
|= tension_vector_labels (PATTERN (insn
), 1);
360 /* If a dispatch table always goes to the same place,
361 get rid of it and replace the insn that uses it. */
363 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
364 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
367 rtx pat
= PATTERN (insn
);
368 int diff_vec_p
= GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
;
369 int len
= XVECLEN (pat
, diff_vec_p
);
370 rtx dispatch
= prev_real_insn (insn
);
372 for (i
= 0; i
< len
; i
++)
373 if (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)
374 != XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0))
378 && GET_CODE (dispatch
) == JUMP_INSN
379 && JUMP_LABEL (dispatch
) != 0
380 /* Don't mess with a casesi insn. */
381 && !(GET_CODE (PATTERN (dispatch
)) == SET
382 && (GET_CODE (SET_SRC (PATTERN (dispatch
)))
384 && next_real_insn (JUMP_LABEL (dispatch
)) == insn
)
386 redirect_tablejump (dispatch
,
387 XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0));
392 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
394 /* If a jump references the end of the function, try to turn
395 it into a RETURN insn, possibly a conditional one. */
396 if (JUMP_LABEL (insn
)
397 && (next_active_insn (JUMP_LABEL (insn
)) == 0
398 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn
))))
400 changed
|= redirect_jump (insn
, NULL_RTX
);
402 /* Detect jump to following insn. */
403 if (reallabelprev
== insn
&& condjump_p (insn
))
405 next
= next_real_insn (JUMP_LABEL (insn
));
411 /* If we have an unconditional jump preceded by a USE, try to put
412 the USE before the target and jump there. This simplifies many
413 of the optimizations below since we don't have to worry about
414 dealing with these USE insns. We only do this if the label
415 being branch to already has the identical USE or if code
416 never falls through to that label. */
418 if (this_is_simplejump
419 && (temp
= prev_nonnote_insn (insn
)) != 0
420 && GET_CODE (temp
) == INSN
&& GET_CODE (PATTERN (temp
)) == USE
421 && (temp1
= prev_nonnote_insn (JUMP_LABEL (insn
))) != 0
422 && (GET_CODE (temp1
) == BARRIER
423 || (GET_CODE (temp1
) == INSN
424 && rtx_equal_p (PATTERN (temp
), PATTERN (temp1
))))
425 /* Don't do this optimization if we have a loop containing only
426 the USE instruction, and the loop start label has a usage
427 count of 1. This is because we will redo this optimization
428 everytime through the outer loop, and jump opt will never
430 && ! ((temp2
= prev_nonnote_insn (temp
)) != 0
431 && temp2
== JUMP_LABEL (insn
)
432 && LABEL_NUSES (temp2
) == 1))
434 if (GET_CODE (temp1
) == BARRIER
)
436 emit_insn_after (PATTERN (temp
), temp1
);
437 temp1
= NEXT_INSN (temp1
);
441 redirect_jump (insn
, get_label_before (temp1
));
442 reallabelprev
= prev_real_insn (temp1
);
446 /* Simplify if (...) x = a; else x = b; by converting it
447 to x = b; if (...) x = a;
448 if B is sufficiently simple, the test doesn't involve X,
449 and nothing in the test modifies B or X.
451 If we have small register classes, we also can't do this if X
454 If the "x = b;" insn has any REG_NOTES, we don't do this because
455 of the possibility that we are running after CSE and there is a
456 REG_EQUAL note that is only valid if the branch has already been
457 taken. If we move the insn with the REG_EQUAL note, we may
458 fold the comparison to always be false in a later CSE pass.
459 (We could also delete the REG_NOTES when moving the insn, but it
460 seems simpler to not move it.) An exception is that we can move
461 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
462 value is the same as "b".
464 INSN is the branch over the `else' part.
468 TEMP to the jump insn preceding "x = a;"
470 TEMP2 to the insn that sets "x = b;"
471 TEMP3 to the insn that sets "x = a;"
472 TEMP4 to the set of "x = b"; */
474 if (this_is_simplejump
475 && (temp3
= prev_active_insn (insn
)) != 0
476 && GET_CODE (temp3
) == INSN
477 && (temp4
= single_set (temp3
)) != 0
478 && GET_CODE (temp1
= SET_DEST (temp4
)) == REG
479 && (! SMALL_REGISTER_CLASSES
480 || REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
)
481 && (temp2
= next_active_insn (insn
)) != 0
482 && GET_CODE (temp2
) == INSN
483 && (temp4
= single_set (temp2
)) != 0
484 && rtx_equal_p (SET_DEST (temp4
), temp1
)
485 && ! side_effects_p (SET_SRC (temp4
))
486 && ! may_trap_p (SET_SRC (temp4
))
487 && (REG_NOTES (temp2
) == 0
488 || ((REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUAL
489 || REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUIV
)
490 && XEXP (REG_NOTES (temp2
), 1) == 0
491 && rtx_equal_p (XEXP (REG_NOTES (temp2
), 0),
493 && (temp
= prev_active_insn (temp3
)) != 0
494 && condjump_p (temp
) && ! simplejump_p (temp
)
495 /* TEMP must skip over the "x = a;" insn */
496 && prev_real_insn (JUMP_LABEL (temp
)) == insn
497 && no_labels_between_p (insn
, JUMP_LABEL (temp
))
498 /* There must be no other entries to the "x = b;" insn. */
499 && no_labels_between_p (JUMP_LABEL (temp
), temp2
)
500 /* INSN must either branch to the insn after TEMP2 or the insn
501 after TEMP2 must branch to the same place as INSN. */
502 && (reallabelprev
== temp2
503 || ((temp5
= next_active_insn (temp2
)) != 0
504 && simplejump_p (temp5
)
505 && JUMP_LABEL (temp5
) == JUMP_LABEL (insn
))))
507 /* The test expression, X, may be a complicated test with
508 multiple branches. See if we can find all the uses of
509 the label that TEMP branches to without hitting a CALL_INSN
510 or a jump to somewhere else. */
511 rtx target
= JUMP_LABEL (temp
);
512 int nuses
= LABEL_NUSES (target
);
518 /* Set P to the first jump insn that goes around "x = a;". */
519 for (p
= temp
; nuses
&& p
; p
= prev_nonnote_insn (p
))
521 if (GET_CODE (p
) == JUMP_INSN
)
523 if (condjump_p (p
) && ! simplejump_p (p
)
524 && JUMP_LABEL (p
) == target
)
533 else if (GET_CODE (p
) == CALL_INSN
)
538 /* We cannot insert anything between a set of cc and its use
539 so if P uses cc0, we must back up to the previous insn. */
540 q
= prev_nonnote_insn (p
);
541 if (q
&& GET_RTX_CLASS (GET_CODE (q
)) == 'i'
542 && sets_cc0_p (PATTERN (q
)))
549 /* If we found all the uses and there was no data conflict, we
550 can move the assignment unless we can branch into the middle
553 && no_labels_between_p (p
, insn
)
554 && ! reg_referenced_between_p (temp1
, p
, NEXT_INSN (temp3
))
555 && ! reg_set_between_p (temp1
, p
, temp3
)
556 && (GET_CODE (SET_SRC (temp4
)) == CONST_INT
557 || ! modified_between_p (SET_SRC (temp4
), p
, temp2
))
558 /* Verify that registers used by the jump are not clobbered
559 by the instruction being moved. */
560 && ! regs_set_between_p (PATTERN (temp
),
564 emit_insn_after_with_line_notes (PATTERN (temp2
), p
, temp2
);
567 /* Set NEXT to an insn that we know won't go away. */
568 next
= next_active_insn (insn
);
570 /* Delete the jump around the set. Note that we must do
571 this before we redirect the test jumps so that it won't
572 delete the code immediately following the assignment
573 we moved (which might be a jump). */
577 /* We either have two consecutive labels or a jump to
578 a jump, so adjust all the JUMP_INSNs to branch to where
580 for (p
= NEXT_INSN (p
); p
!= next
; p
= NEXT_INSN (p
))
581 if (GET_CODE (p
) == JUMP_INSN
)
582 redirect_jump (p
, target
);
589 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
590 to x = a; if (...) goto l; x = b;
591 if A is sufficiently simple, the test doesn't involve X,
592 and nothing in the test modifies A or X.
594 If we have small register classes, we also can't do this if X
597 If the "x = a;" insn has any REG_NOTES, we don't do this because
598 of the possibility that we are running after CSE and there is a
599 REG_EQUAL note that is only valid if the branch has already been
600 taken. If we move the insn with the REG_EQUAL note, we may
601 fold the comparison to always be false in a later CSE pass.
602 (We could also delete the REG_NOTES when moving the insn, but it
603 seems simpler to not move it.) An exception is that we can move
604 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
605 value is the same as "a".
611 TEMP to the jump insn preceding "x = a;"
613 TEMP2 to the insn that sets "x = b;"
614 TEMP3 to the insn that sets "x = a;"
615 TEMP4 to the set of "x = a"; */
617 if (this_is_simplejump
618 && (temp2
= next_active_insn (insn
)) != 0
619 && GET_CODE (temp2
) == INSN
620 && (temp4
= single_set (temp2
)) != 0
621 && GET_CODE (temp1
= SET_DEST (temp4
)) == REG
622 && (! SMALL_REGISTER_CLASSES
623 || REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
)
624 && (temp3
= prev_active_insn (insn
)) != 0
625 && GET_CODE (temp3
) == INSN
626 && (temp4
= single_set (temp3
)) != 0
627 && rtx_equal_p (SET_DEST (temp4
), temp1
)
628 && ! side_effects_p (SET_SRC (temp4
))
629 && ! may_trap_p (SET_SRC (temp4
))
630 && (REG_NOTES (temp3
) == 0
631 || ((REG_NOTE_KIND (REG_NOTES (temp3
)) == REG_EQUAL
632 || REG_NOTE_KIND (REG_NOTES (temp3
)) == REG_EQUIV
)
633 && XEXP (REG_NOTES (temp3
), 1) == 0
634 && rtx_equal_p (XEXP (REG_NOTES (temp3
), 0),
636 && (temp
= prev_active_insn (temp3
)) != 0
637 && condjump_p (temp
) && ! simplejump_p (temp
)
638 /* TEMP must skip over the "x = a;" insn */
639 && prev_real_insn (JUMP_LABEL (temp
)) == insn
640 && no_labels_between_p (temp
, insn
))
642 rtx prev_label
= JUMP_LABEL (temp
);
643 rtx insert_after
= prev_nonnote_insn (temp
);
646 /* We cannot insert anything between a set of cc and its use. */
647 if (insert_after
&& GET_RTX_CLASS (GET_CODE (insert_after
)) == 'i'
648 && sets_cc0_p (PATTERN (insert_after
)))
649 insert_after
= prev_nonnote_insn (insert_after
);
651 ++LABEL_NUSES (prev_label
);
654 && no_labels_between_p (insert_after
, temp
)
655 && ! reg_referenced_between_p (temp1
, insert_after
, temp3
)
656 && ! reg_referenced_between_p (temp1
, temp3
,
658 && ! reg_set_between_p (temp1
, insert_after
, temp
)
659 && ! modified_between_p (SET_SRC (temp4
), insert_after
, temp
)
660 /* Verify that registers used by the jump are not clobbered
661 by the instruction being moved. */
662 && ! regs_set_between_p (PATTERN (temp
),
665 && invert_jump (temp
, JUMP_LABEL (insn
)))
667 emit_insn_after_with_line_notes (PATTERN (temp3
),
668 insert_after
, temp3
);
671 /* Set NEXT to an insn that we know won't go away. */
675 if (prev_label
&& --LABEL_NUSES (prev_label
) == 0)
676 delete_insn (prev_label
);
682 /* If we have if (...) x = exp; and branches are expensive,
683 EXP is a single insn, does not have any side effects, cannot
684 trap, and is not too costly, convert this to
685 t = exp; if (...) x = t;
687 Don't do this when we have CC0 because it is unlikely to help
688 and we'd need to worry about where to place the new insn and
689 the potential for conflicts. We also can't do this when we have
690 notes on the insn for the same reason as above.
694 TEMP to the "x = exp;" insn.
695 TEMP1 to the single set in the "x = exp;" insn.
698 if (! reload_completed
699 && this_is_condjump
&& ! this_is_simplejump
701 && (temp
= next_nonnote_insn (insn
)) != 0
702 && GET_CODE (temp
) == INSN
703 && REG_NOTES (temp
) == 0
704 && (reallabelprev
== temp
705 || ((temp2
= next_active_insn (temp
)) != 0
706 && simplejump_p (temp2
)
707 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
708 && (temp1
= single_set (temp
)) != 0
709 && (temp2
= SET_DEST (temp1
), GET_CODE (temp2
) == REG
)
710 && (! SMALL_REGISTER_CLASSES
711 || REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
)
712 && GET_CODE (SET_SRC (temp1
)) != REG
713 && GET_CODE (SET_SRC (temp1
)) != SUBREG
714 && GET_CODE (SET_SRC (temp1
)) != CONST_INT
715 && ! side_effects_p (SET_SRC (temp1
))
716 && ! may_trap_p (SET_SRC (temp1
))
717 && rtx_cost (SET_SRC (temp1
), SET
) < 10)
719 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
721 if ((temp3
= find_insert_position (insn
, temp
))
722 && validate_change (temp
, &SET_DEST (temp1
), new, 0))
724 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
725 emit_insn_after_with_line_notes (PATTERN (temp
),
726 PREV_INSN (temp3
), temp
);
728 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
732 reg_scan_update (temp3
, NEXT_INSN (next
), old_max_reg
);
733 old_max_reg
= max_reg_num ();
738 /* Similarly, if it takes two insns to compute EXP but they
739 have the same destination. Here TEMP3 will be the second
740 insn and TEMP4 the SET from that insn. */
742 if (! reload_completed
743 && this_is_condjump
&& ! this_is_simplejump
745 && (temp
= next_nonnote_insn (insn
)) != 0
746 && GET_CODE (temp
) == INSN
747 && REG_NOTES (temp
) == 0
748 && (temp3
= next_nonnote_insn (temp
)) != 0
749 && GET_CODE (temp3
) == INSN
750 && REG_NOTES (temp3
) == 0
751 && (reallabelprev
== temp3
752 || ((temp2
= next_active_insn (temp3
)) != 0
753 && simplejump_p (temp2
)
754 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
755 && (temp1
= single_set (temp
)) != 0
756 && (temp2
= SET_DEST (temp1
), GET_CODE (temp2
) == REG
)
757 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
758 && (! SMALL_REGISTER_CLASSES
759 || REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
)
760 && ! side_effects_p (SET_SRC (temp1
))
761 && ! may_trap_p (SET_SRC (temp1
))
762 && rtx_cost (SET_SRC (temp1
), SET
) < 10
763 && (temp4
= single_set (temp3
)) != 0
764 && rtx_equal_p (SET_DEST (temp4
), temp2
)
765 && ! side_effects_p (SET_SRC (temp4
))
766 && ! may_trap_p (SET_SRC (temp4
))
767 && rtx_cost (SET_SRC (temp4
), SET
) < 10)
769 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
771 if ((temp5
= find_insert_position (insn
, temp
))
772 && (temp6
= find_insert_position (insn
, temp3
))
773 && validate_change (temp
, &SET_DEST (temp1
), new, 0))
775 /* Use the earliest of temp5 and temp6. */
778 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
779 emit_insn_after_with_line_notes (PATTERN (temp
),
780 PREV_INSN (temp6
), temp
);
781 emit_insn_after_with_line_notes
782 (replace_rtx (PATTERN (temp3
), temp2
, new),
783 PREV_INSN (temp6
), temp3
);
786 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
790 reg_scan_update (temp6
, NEXT_INSN (next
), old_max_reg
);
791 old_max_reg
= max_reg_num ();
796 /* Finally, handle the case where two insns are used to
797 compute EXP but a temporary register is used. Here we must
798 ensure that the temporary register is not used anywhere else. */
800 if (! reload_completed
802 && this_is_condjump
&& ! this_is_simplejump
804 && (temp
= next_nonnote_insn (insn
)) != 0
805 && GET_CODE (temp
) == INSN
806 && REG_NOTES (temp
) == 0
807 && (temp3
= next_nonnote_insn (temp
)) != 0
808 && GET_CODE (temp3
) == INSN
809 && REG_NOTES (temp3
) == 0
810 && (reallabelprev
== temp3
811 || ((temp2
= next_active_insn (temp3
)) != 0
812 && simplejump_p (temp2
)
813 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
814 && (temp1
= single_set (temp
)) != 0
815 && (temp5
= SET_DEST (temp1
),
816 (GET_CODE (temp5
) == REG
817 || (GET_CODE (temp5
) == SUBREG
818 && (temp5
= SUBREG_REG (temp5
),
819 GET_CODE (temp5
) == REG
))))
820 && REGNO (temp5
) >= FIRST_PSEUDO_REGISTER
821 && REGNO_FIRST_UID (REGNO (temp5
)) == INSN_UID (temp
)
822 && REGNO_LAST_UID (REGNO (temp5
)) == INSN_UID (temp3
)
823 && ! side_effects_p (SET_SRC (temp1
))
824 && ! may_trap_p (SET_SRC (temp1
))
825 && rtx_cost (SET_SRC (temp1
), SET
) < 10
826 && (temp4
= single_set (temp3
)) != 0
827 && (temp2
= SET_DEST (temp4
), GET_CODE (temp2
) == REG
)
828 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
829 && (! SMALL_REGISTER_CLASSES
830 || REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
)
831 && rtx_equal_p (SET_DEST (temp4
), temp2
)
832 && ! side_effects_p (SET_SRC (temp4
))
833 && ! may_trap_p (SET_SRC (temp4
))
834 && rtx_cost (SET_SRC (temp4
), SET
) < 10)
836 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
838 if ((temp5
= find_insert_position (insn
, temp
))
839 && (temp6
= find_insert_position (insn
, temp3
))
840 && validate_change (temp3
, &SET_DEST (temp4
), new, 0))
842 /* Use the earliest of temp5 and temp6. */
845 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
846 emit_insn_after_with_line_notes (PATTERN (temp
),
847 PREV_INSN (temp6
), temp
);
848 emit_insn_after_with_line_notes (PATTERN (temp3
),
849 PREV_INSN (temp6
), temp3
);
852 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
856 reg_scan_update (temp6
, NEXT_INSN (next
), old_max_reg
);
857 old_max_reg
= max_reg_num ();
861 #endif /* HAVE_cc0 */
863 /* Try to use a conditional move (if the target has them), or a
864 store-flag insn. The general case is:
866 1) x = a; if (...) x = b; and
869 If the jump would be faster, the machine should not have defined
870 the movcc or scc insns!. These cases are often made by the
871 previous optimization.
873 The second case is treated as x = x; if (...) x = b;.
875 INSN here is the jump around the store. We set:
877 TEMP to the "x op= b;" insn.
880 TEMP3 to A (X in the second case).
881 TEMP4 to the condition being tested.
882 TEMP5 to the earliest insn used to find the condition.
883 TEMP6 to the SET of TEMP. */
885 if (/* We can't do this after reload has completed. */
887 && this_is_condjump
&& ! this_is_simplejump
888 /* Set TEMP to the "x = b;" insn. */
889 && (temp
= next_nonnote_insn (insn
)) != 0
890 && GET_CODE (temp
) == INSN
891 && (temp6
= single_set (temp
)) != NULL_RTX
892 && GET_CODE (temp1
= SET_DEST (temp6
)) == REG
893 && (! SMALL_REGISTER_CLASSES
894 || REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
)
895 && ! side_effects_p (temp2
= SET_SRC (temp6
))
896 && ! may_trap_p (temp2
)
897 /* Allow either form, but prefer the former if both apply.
898 There is no point in using the old value of TEMP1 if
899 it is a register, since cse will alias them. It can
900 lose if the old value were a hard register since CSE
901 won't replace hard registers. Avoid using TEMP3 if
902 small register classes and it is a hard register. */
903 && (((temp3
= reg_set_last (temp1
, insn
)) != 0
904 && ! (SMALL_REGISTER_CLASSES
&& GET_CODE (temp3
) == REG
905 && REGNO (temp3
) < FIRST_PSEUDO_REGISTER
))
906 /* Make the latter case look like x = x; if (...) x = b; */
907 || (temp3
= temp1
, 1))
908 /* INSN must either branch to the insn after TEMP or the insn
909 after TEMP must branch to the same place as INSN. */
910 && (reallabelprev
== temp
911 || ((temp4
= next_active_insn (temp
)) != 0
912 && simplejump_p (temp4
)
913 && JUMP_LABEL (temp4
) == JUMP_LABEL (insn
)))
914 && (temp4
= get_condition (insn
, &temp5
)) != 0
915 /* We must be comparing objects whose modes imply the size.
916 We could handle BLKmode if (1) emit_store_flag could
917 and (2) we could find the size reliably. */
918 && GET_MODE (XEXP (temp4
, 0)) != BLKmode
919 /* Even if branches are cheap, the store_flag optimization
920 can win when the operation to be performed can be
921 expressed directly. */
923 /* If the previous insn sets CC0 and something else, we can't
924 do this since we are going to delete that insn. */
926 && ! ((temp6
= prev_nonnote_insn (insn
)) != 0
927 && GET_CODE (temp6
) == INSN
928 && (sets_cc0_p (PATTERN (temp6
)) == -1
929 || (sets_cc0_p (PATTERN (temp6
)) == 1
930 && FIND_REG_INC_NOTE (temp6
, NULL_RTX
))))
934 #ifdef HAVE_conditional_move
935 /* First try a conditional move. */
937 enum rtx_code code
= GET_CODE (temp4
);
939 rtx cond0
, cond1
, aval
, bval
;
940 rtx target
, new_insn
;
942 /* Copy the compared variables into cond0 and cond1, so that
943 any side effects performed in or after the old comparison,
944 will not affect our compare which will come later. */
945 /* ??? Is it possible to just use the comparison in the jump
946 insn? After all, we're going to delete it. We'd have
947 to modify emit_conditional_move to take a comparison rtx
948 instead or write a new function. */
949 cond0
= gen_reg_rtx (GET_MODE (XEXP (temp4
, 0)));
950 /* We want the target to be able to simplify comparisons with
951 zero (and maybe other constants as well), so don't create
952 pseudos for them. There's no need to either. */
953 if (GET_CODE (XEXP (temp4
, 1)) == CONST_INT
954 || GET_CODE (XEXP (temp4
, 1)) == CONST_DOUBLE
)
955 cond1
= XEXP (temp4
, 1);
957 cond1
= gen_reg_rtx (GET_MODE (XEXP (temp4
, 1)));
959 /* Careful about copying these values -- an IOR or what may
960 need to do other things, like clobber flags. */
961 /* ??? Assume for the moment that AVAL is ok. */
966 /* If we're not dealing with a register or the insn is more
967 complex than a simple SET, duplicate the computation and
968 replace the destination with a new temporary. */
969 if (register_operand (temp2
, GET_MODE (var
))
970 && GET_CODE (PATTERN (temp
)) == SET
)
974 bval
= gen_reg_rtx (GET_MODE (var
));
975 new_insn
= copy_rtx (temp
);
976 temp6
= single_set (new_insn
);
977 SET_DEST (temp6
) = bval
;
978 emit_insn (PATTERN (new_insn
));
981 target
= emit_conditional_move (var
, code
,
982 cond0
, cond1
, VOIDmode
,
983 aval
, bval
, GET_MODE (var
),
984 (code
== LTU
|| code
== GEU
985 || code
== LEU
|| code
== GTU
));
989 rtx seq1
, seq2
, last
;
992 /* Save the conditional move sequence but don't emit it
993 yet. On some machines, like the alpha, it is possible
994 that temp5 == insn, so next generate the sequence that
995 saves the compared values and then emit both
996 sequences ensuring seq1 occurs before seq2. */
1000 /* "Now that we can't fail..." Famous last words.
1001 Generate the copy insns that preserve the compared
1004 emit_move_insn (cond0
, XEXP (temp4
, 0));
1005 if (cond1
!= XEXP (temp4
, 1))
1006 emit_move_insn (cond1
, XEXP (temp4
, 1));
1007 seq1
= get_insns ();
1010 /* Validate the sequence -- this may be some weird
1011 bit-extract-and-test instruction for which there
1012 exists no complimentary bit-extract insn. */
1014 for (last
= seq1
; last
; last
= NEXT_INSN (last
))
1015 if (recog_memoized (last
) < 0)
1023 emit_insns_before (seq1
, temp5
);
1025 /* Insert conditional move after insn, to be sure
1026 that the jump and a possible compare won't be
1028 last
= emit_insns_after (seq2
, insn
);
1030 /* ??? We can also delete the insn that sets X to A.
1031 Flow will do it too though. */
1033 next
= NEXT_INSN (insn
);
1038 reg_scan_update (seq1
, NEXT_INSN (last
),
1040 old_max_reg
= max_reg_num ();
1052 /* That didn't work, try a store-flag insn.
1054 We further divide the cases into:
1056 1) x = a; if (...) x = b; and either A or B is zero,
1057 2) if (...) x = 0; and jumps are expensive,
1058 3) x = a; if (...) x = b; and A and B are constants where all
1059 the set bits in A are also set in B and jumps are expensive,
1060 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1062 5) if (...) x = b; if jumps are even more expensive. */
1064 if (GET_MODE_CLASS (GET_MODE (temp1
)) == MODE_INT
1065 && ((GET_CODE (temp3
) == CONST_INT
)
1066 /* Make the latter case look like
1067 x = x; if (...) x = 0; */
1070 && temp2
== const0_rtx
)
1071 || BRANCH_COST
>= 3)))
1072 /* If B is zero, OK; if A is zero, can only do (1) if we
1073 can reverse the condition. See if (3) applies possibly
1074 by reversing the condition. Prefer reversing to (4) when
1075 branches are very expensive. */
1076 && (((BRANCH_COST
>= 2
1077 || STORE_FLAG_VALUE
== -1
1078 || (STORE_FLAG_VALUE
== 1
1079 /* Check that the mask is a power of two,
1080 so that it can probably be generated
1082 && GET_CODE (temp3
) == CONST_INT
1083 && exact_log2 (INTVAL (temp3
)) >= 0))
1084 && (reversep
= 0, temp2
== const0_rtx
))
1085 || ((BRANCH_COST
>= 2
1086 || STORE_FLAG_VALUE
== -1
1087 || (STORE_FLAG_VALUE
== 1
1088 && GET_CODE (temp2
) == CONST_INT
1089 && exact_log2 (INTVAL (temp2
)) >= 0))
1090 && temp3
== const0_rtx
1091 && (reversep
= can_reverse_comparison_p (temp4
, insn
)))
1092 || (BRANCH_COST
>= 2
1093 && GET_CODE (temp2
) == CONST_INT
1094 && GET_CODE (temp3
) == CONST_INT
1095 && ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp2
)
1096 || ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp3
)
1097 && (reversep
= can_reverse_comparison_p (temp4
,
1099 || BRANCH_COST
>= 3)
1102 enum rtx_code code
= GET_CODE (temp4
);
1103 rtx uval
, cval
, var
= temp1
;
1107 /* If necessary, reverse the condition. */
1109 code
= reverse_condition (code
), uval
= temp2
, cval
= temp3
;
1111 uval
= temp3
, cval
= temp2
;
1113 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1114 is the constant 1, it is best to just compute the result
1115 directly. If UVAL is constant and STORE_FLAG_VALUE
1116 includes all of its bits, it is best to compute the flag
1117 value unnormalized and `and' it with UVAL. Otherwise,
1118 normalize to -1 and `and' with UVAL. */
1119 normalizep
= (cval
!= const0_rtx
? -1
1120 : (uval
== const1_rtx
? 1
1121 : (GET_CODE (uval
) == CONST_INT
1122 && (INTVAL (uval
) & ~STORE_FLAG_VALUE
) == 0)
1125 /* We will be putting the store-flag insn immediately in
1126 front of the comparison that was originally being done,
1127 so we know all the variables in TEMP4 will be valid.
1128 However, this might be in front of the assignment of
1129 A to VAR. If it is, it would clobber the store-flag
1130 we will be emitting.
1132 Therefore, emit into a temporary which will be copied to
1133 VAR immediately after TEMP. */
1136 target
= emit_store_flag (gen_reg_rtx (GET_MODE (var
)), code
,
1137 XEXP (temp4
, 0), XEXP (temp4
, 1),
1139 (code
== LTU
|| code
== LEU
1140 || code
== GEU
|| code
== GTU
),
1150 /* Put the store-flag insns in front of the first insn
1151 used to compute the condition to ensure that we
1152 use the same values of them as the current
1153 comparison. However, the remainder of the insns we
1154 generate will be placed directly in front of the
1155 jump insn, in case any of the pseudos we use
1156 are modified earlier. */
1158 emit_insns_before (seq
, temp5
);
1162 /* Both CVAL and UVAL are non-zero. */
1163 if (cval
!= const0_rtx
&& uval
!= const0_rtx
)
1167 tem1
= expand_and (uval
, target
, NULL_RTX
);
1168 if (GET_CODE (cval
) == CONST_INT
1169 && GET_CODE (uval
) == CONST_INT
1170 && (INTVAL (cval
) & INTVAL (uval
)) == INTVAL (cval
))
1174 tem2
= expand_unop (GET_MODE (var
), one_cmpl_optab
,
1175 target
, NULL_RTX
, 0);
1176 tem2
= expand_and (cval
, tem2
,
1177 (GET_CODE (tem2
) == REG
1181 /* If we usually make new pseudos, do so here. This
1182 turns out to help machines that have conditional
1184 /* ??? Conditional moves have already been handled.
1185 This may be obsolete. */
1187 if (flag_expensive_optimizations
)
1190 target
= expand_binop (GET_MODE (var
), ior_optab
,
1194 else if (normalizep
!= 1)
1196 /* We know that either CVAL or UVAL is zero. If
1197 UVAL is zero, negate TARGET and `and' with CVAL.
1198 Otherwise, `and' with UVAL. */
1199 if (uval
== const0_rtx
)
1201 target
= expand_unop (GET_MODE (var
), one_cmpl_optab
,
1202 target
, NULL_RTX
, 0);
1206 target
= expand_and (uval
, target
,
1207 (GET_CODE (target
) == REG
1208 && ! preserve_subexpressions_p ()
1209 ? target
: NULL_RTX
));
1212 emit_move_insn (var
, target
);
1216 /* If INSN uses CC0, we must not separate it from the
1217 insn that sets cc0. */
1218 if (reg_mentioned_p (cc0_rtx
, PATTERN (before
)))
1219 before
= prev_nonnote_insn (before
);
1221 emit_insns_before (seq
, before
);
1224 next
= NEXT_INSN (insn
);
1229 reg_scan_update (seq
, NEXT_INSN (next
), old_max_reg
);
1230 old_max_reg
= max_reg_num ();
1241 /* If branches are expensive, convert
1242 if (foo) bar++; to bar += (foo != 0);
1243 and similarly for "bar--;"
1245 INSN is the conditional branch around the arithmetic. We set:
1247 TEMP is the arithmetic insn.
1248 TEMP1 is the SET doing the arithmetic.
1249 TEMP2 is the operand being incremented or decremented.
1250 TEMP3 to the condition being tested.
1251 TEMP4 to the earliest insn used to find the condition. */
1253 if ((BRANCH_COST
>= 2
1261 && ! reload_completed
1262 && this_is_condjump
&& ! this_is_simplejump
1263 && (temp
= next_nonnote_insn (insn
)) != 0
1264 && (temp1
= single_set (temp
)) != 0
1265 && (temp2
= SET_DEST (temp1
),
1266 GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
)
1267 && GET_CODE (SET_SRC (temp1
)) == PLUS
1268 && (XEXP (SET_SRC (temp1
), 1) == const1_rtx
1269 || XEXP (SET_SRC (temp1
), 1) == constm1_rtx
)
1270 && rtx_equal_p (temp2
, XEXP (SET_SRC (temp1
), 0))
1271 && ! side_effects_p (temp2
)
1272 && ! may_trap_p (temp2
)
1273 /* INSN must either branch to the insn after TEMP or the insn
1274 after TEMP must branch to the same place as INSN. */
1275 && (reallabelprev
== temp
1276 || ((temp3
= next_active_insn (temp
)) != 0
1277 && simplejump_p (temp3
)
1278 && JUMP_LABEL (temp3
) == JUMP_LABEL (insn
)))
1279 && (temp3
= get_condition (insn
, &temp4
)) != 0
1280 /* We must be comparing objects whose modes imply the size.
1281 We could handle BLKmode if (1) emit_store_flag could
1282 and (2) we could find the size reliably. */
1283 && GET_MODE (XEXP (temp3
, 0)) != BLKmode
1284 && can_reverse_comparison_p (temp3
, insn
))
1286 rtx temp6
, target
= 0, seq
, init_insn
= 0, init
= temp2
;
1287 enum rtx_code code
= reverse_condition (GET_CODE (temp3
));
1291 /* It must be the case that TEMP2 is not modified in the range
1292 [TEMP4, INSN). The one exception we make is if the insn
1293 before INSN sets TEMP2 to something which is also unchanged
1294 in that range. In that case, we can move the initialization
1295 into our sequence. */
1297 if ((temp5
= prev_active_insn (insn
)) != 0
1298 && no_labels_between_p (temp5
, insn
)
1299 && GET_CODE (temp5
) == INSN
1300 && (temp6
= single_set (temp5
)) != 0
1301 && rtx_equal_p (temp2
, SET_DEST (temp6
))
1302 && (CONSTANT_P (SET_SRC (temp6
))
1303 || GET_CODE (SET_SRC (temp6
)) == REG
1304 || GET_CODE (SET_SRC (temp6
)) == SUBREG
))
1306 emit_insn (PATTERN (temp5
));
1308 init
= SET_SRC (temp6
);
1311 if (CONSTANT_P (init
)
1312 || ! reg_set_between_p (init
, PREV_INSN (temp4
), insn
))
1313 target
= emit_store_flag (gen_reg_rtx (GET_MODE (temp2
)), code
,
1314 XEXP (temp3
, 0), XEXP (temp3
, 1),
1316 (code
== LTU
|| code
== LEU
1317 || code
== GTU
|| code
== GEU
), 1);
1319 /* If we can do the store-flag, do the addition or
1323 target
= expand_binop (GET_MODE (temp2
),
1324 (XEXP (SET_SRC (temp1
), 1) == const1_rtx
1325 ? add_optab
: sub_optab
),
1326 temp2
, target
, temp2
, 0, OPTAB_WIDEN
);
1330 /* Put the result back in temp2 in case it isn't already.
1331 Then replace the jump, possible a CC0-setting insn in
1332 front of the jump, and TEMP, with the sequence we have
1335 if (target
!= temp2
)
1336 emit_move_insn (temp2
, target
);
1341 emit_insns_before (seq
, temp4
);
1345 delete_insn (init_insn
);
1347 next
= NEXT_INSN (insn
);
1349 delete_insn (prev_nonnote_insn (insn
));
1355 reg_scan_update (seq
, NEXT_INSN (next
), old_max_reg
);
1356 old_max_reg
= max_reg_num ();
1366 /* Simplify if (...) x = 1; else {...} if (x) ...
1367 We recognize this case scanning backwards as well.
1369 TEMP is the assignment to x;
1370 TEMP1 is the label at the head of the second if. */
1371 /* ?? This should call get_condition to find the values being
1372 compared, instead of looking for a COMPARE insn when HAVE_cc0
1373 is not defined. This would allow it to work on the m88k. */
1374 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1375 is not defined and the condition is tested by a separate compare
1376 insn. This is because the code below assumes that the result
1377 of the compare dies in the following branch.
1379 Not only that, but there might be other insns between the
1380 compare and branch whose results are live. Those insns need
1383 A way to fix this is to move the insns at JUMP_LABEL (insn)
1384 to before INSN. If we are running before flow, they will
1385 be deleted if they aren't needed. But this doesn't work
1388 This is really a special-case of jump threading, anyway. The
1389 right thing to do is to replace this and jump threading with
1390 much simpler code in cse.
1392 This code has been turned off in the non-cc0 case in the
1396 else if (this_is_simplejump
1397 /* Safe to skip USE and CLOBBER insns here
1398 since they will not be deleted. */
1399 && (temp
= prev_active_insn (insn
))
1400 && no_labels_between_p (temp
, insn
)
1401 && GET_CODE (temp
) == INSN
1402 && GET_CODE (PATTERN (temp
)) == SET
1403 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1404 && CONSTANT_P (SET_SRC (PATTERN (temp
)))
1405 && (temp1
= next_active_insn (JUMP_LABEL (insn
)))
1406 /* If we find that the next value tested is `x'
1407 (TEMP1 is the insn where this happens), win. */
1408 && GET_CODE (temp1
) == INSN
1409 && GET_CODE (PATTERN (temp1
)) == SET
1411 /* Does temp1 `tst' the value of x? */
1412 && SET_SRC (PATTERN (temp1
)) == SET_DEST (PATTERN (temp
))
1413 && SET_DEST (PATTERN (temp1
)) == cc0_rtx
1414 && (temp1
= next_nonnote_insn (temp1
))
1416 /* Does temp1 compare the value of x against zero? */
1417 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1418 && XEXP (SET_SRC (PATTERN (temp1
)), 1) == const0_rtx
1419 && (XEXP (SET_SRC (PATTERN (temp1
)), 0)
1420 == SET_DEST (PATTERN (temp
)))
1421 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1422 && (temp1
= find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1424 && condjump_p (temp1
))
1426 /* Get the if_then_else from the condjump. */
1427 rtx choice
= SET_SRC (PATTERN (temp1
));
1428 if (GET_CODE (choice
) == IF_THEN_ELSE
)
1430 enum rtx_code code
= GET_CODE (XEXP (choice
, 0));
1431 rtx val
= SET_SRC (PATTERN (temp
));
1433 = simplify_relational_operation (code
, GET_MODE (SET_DEST (PATTERN (temp
))),
1437 if (cond
== const_true_rtx
)
1438 ultimate
= XEXP (choice
, 1);
1439 else if (cond
== const0_rtx
)
1440 ultimate
= XEXP (choice
, 2);
1444 if (ultimate
== pc_rtx
)
1445 ultimate
= get_label_after (temp1
);
1446 else if (ultimate
&& GET_CODE (ultimate
) != RETURN
)
1447 ultimate
= XEXP (ultimate
, 0);
1449 if (ultimate
&& JUMP_LABEL(insn
) != ultimate
)
1450 changed
|= redirect_jump (insn
, ultimate
);
1456 /* @@ This needs a bit of work before it will be right.
1458 Any type of comparison can be accepted for the first and
1459 second compare. When rewriting the first jump, we must
1460 compute the what conditions can reach label3, and use the
1461 appropriate code. We can not simply reverse/swap the code
1462 of the first jump. In some cases, the second jump must be
1466 < == converts to > ==
1467 < != converts to == >
1470 If the code is written to only accept an '==' test for the second
1471 compare, then all that needs to be done is to swap the condition
1472 of the first branch.
1474 It is questionable whether we want this optimization anyways,
1475 since if the user wrote code like this because he/she knew that
1476 the jump to label1 is taken most of the time, then rewriting
1477 this gives slower code. */
1478 /* @@ This should call get_condition to find the values being
1479 compared, instead of looking for a COMPARE insn when HAVE_cc0
1480 is not defined. This would allow it to work on the m88k. */
1481 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1482 is not defined and the condition is tested by a separate compare
1483 insn. This is because the code below assumes that the result
1484 of the compare dies in the following branch. */
1486 /* Simplify test a ~= b
1500 where ~= is an inequality, e.g. >, and ~~= is the swapped
1503 We recognize this case scanning backwards.
1505 TEMP is the conditional jump to `label2';
1506 TEMP1 is the test for `a == b';
1507 TEMP2 is the conditional jump to `label1';
1508 TEMP3 is the test for `a ~= b'. */
1509 else if (this_is_simplejump
1510 && (temp
= prev_active_insn (insn
))
1511 && no_labels_between_p (temp
, insn
)
1512 && condjump_p (temp
)
1513 && (temp1
= prev_active_insn (temp
))
1514 && no_labels_between_p (temp1
, temp
)
1515 && GET_CODE (temp1
) == INSN
1516 && GET_CODE (PATTERN (temp1
)) == SET
1518 && sets_cc0_p (PATTERN (temp1
)) == 1
1520 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1521 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1522 && (temp
== find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1524 && (temp2
= prev_active_insn (temp1
))
1525 && no_labels_between_p (temp2
, temp1
)
1526 && condjump_p (temp2
)
1527 && JUMP_LABEL (temp2
) == next_nonnote_insn (NEXT_INSN (insn
))
1528 && (temp3
= prev_active_insn (temp2
))
1529 && no_labels_between_p (temp3
, temp2
)
1530 && GET_CODE (PATTERN (temp3
)) == SET
1531 && rtx_equal_p (SET_DEST (PATTERN (temp3
)),
1532 SET_DEST (PATTERN (temp1
)))
1533 && rtx_equal_p (SET_SRC (PATTERN (temp1
)),
1534 SET_SRC (PATTERN (temp3
)))
1535 && ! inequality_comparisons_p (PATTERN (temp
))
1536 && inequality_comparisons_p (PATTERN (temp2
)))
1538 rtx fallthrough_label
= JUMP_LABEL (temp2
);
1540 ++LABEL_NUSES (fallthrough_label
);
1541 if (swap_jump (temp2
, JUMP_LABEL (insn
)))
1547 if (--LABEL_NUSES (fallthrough_label
) == 0)
1548 delete_insn (fallthrough_label
);
1551 /* Simplify if (...) {... x = 1;} if (x) ...
1553 We recognize this case backwards.
1555 TEMP is the test of `x';
1556 TEMP1 is the assignment to `x' at the end of the
1557 previous statement. */
1558 /* @@ This should call get_condition to find the values being
1559 compared, instead of looking for a COMPARE insn when HAVE_cc0
1560 is not defined. This would allow it to work on the m88k. */
1561 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1562 is not defined and the condition is tested by a separate compare
1563 insn. This is because the code below assumes that the result
1564 of the compare dies in the following branch. */
1566 /* ??? This has to be turned off. The problem is that the
1567 unconditional jump might indirectly end up branching to the
1568 label between TEMP1 and TEMP. We can't detect this, in general,
1569 since it may become a jump to there after further optimizations.
1570 If that jump is done, it will be deleted, so we will retry
1571 this optimization in the next pass, thus an infinite loop.
1573 The present code prevents this by putting the jump after the
1574 label, but this is not logically correct. */
1576 else if (this_is_condjump
1577 /* Safe to skip USE and CLOBBER insns here
1578 since they will not be deleted. */
1579 && (temp
= prev_active_insn (insn
))
1580 && no_labels_between_p (temp
, insn
)
1581 && GET_CODE (temp
) == INSN
1582 && GET_CODE (PATTERN (temp
)) == SET
1584 && sets_cc0_p (PATTERN (temp
)) == 1
1585 && GET_CODE (SET_SRC (PATTERN (temp
))) == REG
1587 /* Temp must be a compare insn, we can not accept a register
1588 to register move here, since it may not be simply a
1590 && GET_CODE (SET_SRC (PATTERN (temp
))) == COMPARE
1591 && XEXP (SET_SRC (PATTERN (temp
)), 1) == const0_rtx
1592 && GET_CODE (XEXP (SET_SRC (PATTERN (temp
)), 0)) == REG
1593 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1594 && insn
== find_next_ref (SET_DEST (PATTERN (temp
)), temp
)
1596 /* May skip USE or CLOBBER insns here
1597 for checking for opportunity, since we
1598 take care of them later. */
1599 && (temp1
= prev_active_insn (temp
))
1600 && GET_CODE (temp1
) == INSN
1601 && GET_CODE (PATTERN (temp1
)) == SET
1603 && SET_SRC (PATTERN (temp
)) == SET_DEST (PATTERN (temp1
))
1605 && (XEXP (SET_SRC (PATTERN (temp
)), 0)
1606 == SET_DEST (PATTERN (temp1
)))
1608 && CONSTANT_P (SET_SRC (PATTERN (temp1
)))
1609 /* If this isn't true, cse will do the job. */
1610 && ! no_labels_between_p (temp1
, temp
))
1612 /* Get the if_then_else from the condjump. */
1613 rtx choice
= SET_SRC (PATTERN (insn
));
1614 if (GET_CODE (choice
) == IF_THEN_ELSE
1615 && (GET_CODE (XEXP (choice
, 0)) == EQ
1616 || GET_CODE (XEXP (choice
, 0)) == NE
))
1618 int want_nonzero
= (GET_CODE (XEXP (choice
, 0)) == NE
);
1623 /* Get the place that condjump will jump to
1624 if it is reached from here. */
1625 if ((SET_SRC (PATTERN (temp1
)) != const0_rtx
)
1627 ultimate
= XEXP (choice
, 1);
1629 ultimate
= XEXP (choice
, 2);
1630 /* Get it as a CODE_LABEL. */
1631 if (ultimate
== pc_rtx
)
1632 ultimate
= get_label_after (insn
);
1634 /* Get the label out of the LABEL_REF. */
1635 ultimate
= XEXP (ultimate
, 0);
1637 /* Insert the jump immediately before TEMP, specifically
1638 after the label that is between TEMP1 and TEMP. */
1639 last_insn
= PREV_INSN (temp
);
1641 /* If we would be branching to the next insn, the jump
1642 would immediately be deleted and the re-inserted in
1643 a subsequent pass over the code. So don't do anything
1645 if (next_active_insn (last_insn
)
1646 != next_active_insn (ultimate
))
1648 emit_barrier_after (last_insn
);
1649 p
= emit_jump_insn_after (gen_jump (ultimate
),
1651 JUMP_LABEL (p
) = ultimate
;
1652 ++LABEL_NUSES (ultimate
);
1653 if (INSN_UID (ultimate
) < max_jump_chain
1654 && INSN_CODE (p
) < max_jump_chain
)
1656 jump_chain
[INSN_UID (p
)]
1657 = jump_chain
[INSN_UID (ultimate
)];
1658 jump_chain
[INSN_UID (ultimate
)] = p
;
1666 /* Detect a conditional jump going to the same place
1667 as an immediately following unconditional jump. */
1668 else if (this_is_condjump
1669 && (temp
= next_active_insn (insn
)) != 0
1670 && simplejump_p (temp
)
1671 && (next_active_insn (JUMP_LABEL (insn
))
1672 == next_active_insn (JUMP_LABEL (temp
))))
1676 /* ??? Optional. Disables some optimizations, but makes
1677 gcov output more accurate with -O. */
1678 if (flag_test_coverage
&& !reload_completed
)
1679 for (tem
= insn
; tem
!= temp
; tem
= NEXT_INSN (tem
))
1680 if (GET_CODE (tem
) == NOTE
&& NOTE_LINE_NUMBER (tem
) > 0)
1691 /* Detect a conditional jump jumping over an unconditional trap. */
1693 && this_is_condjump
&& ! this_is_simplejump
1694 && reallabelprev
!= 0
1695 && GET_CODE (reallabelprev
) == INSN
1696 && GET_CODE (PATTERN (reallabelprev
)) == TRAP_IF
1697 && TRAP_CONDITION (PATTERN (reallabelprev
)) == const_true_rtx
1698 && prev_active_insn (reallabelprev
) == insn
1699 && no_labels_between_p (insn
, reallabelprev
)
1700 && (temp2
= get_condition (insn
, &temp4
))
1701 && can_reverse_comparison_p (temp2
, insn
))
1703 rtx
new = gen_cond_trap (reverse_condition (GET_CODE (temp2
)),
1704 XEXP (temp2
, 0), XEXP (temp2
, 1),
1705 TRAP_CODE (PATTERN (reallabelprev
)));
1709 emit_insn_before (new, temp4
);
1710 delete_insn (reallabelprev
);
1716 /* Detect a jump jumping to an unconditional trap. */
1717 else if (HAVE_trap
&& this_is_condjump
1718 && (temp
= next_active_insn (JUMP_LABEL (insn
)))
1719 && GET_CODE (temp
) == INSN
1720 && GET_CODE (PATTERN (temp
)) == TRAP_IF
1721 && (this_is_simplejump
1722 || (temp2
= get_condition (insn
, &temp4
))))
1724 rtx tc
= TRAP_CONDITION (PATTERN (temp
));
1726 if (tc
== const_true_rtx
1727 || (! this_is_simplejump
&& rtx_equal_p (temp2
, tc
)))
1730 /* Replace an unconditional jump to a trap with a trap. */
1731 if (this_is_simplejump
)
1733 emit_barrier_after (emit_insn_before (gen_trap (), insn
));
1738 new = gen_cond_trap (GET_CODE (temp2
), XEXP (temp2
, 0),
1740 TRAP_CODE (PATTERN (temp
)));
1743 emit_insn_before (new, temp4
);
1749 /* If the trap condition and jump condition are mutually
1750 exclusive, redirect the jump to the following insn. */
1751 else if (GET_RTX_CLASS (GET_CODE (tc
)) == '<'
1752 && ! this_is_simplejump
1753 && swap_condition (GET_CODE (temp2
)) == GET_CODE (tc
)
1754 && rtx_equal_p (XEXP (tc
, 0), XEXP (temp2
, 0))
1755 && rtx_equal_p (XEXP (tc
, 1), XEXP (temp2
, 1))
1756 && redirect_jump (insn
, get_label_after (temp
)))
1764 /* Detect a conditional jump jumping over an unconditional jump. */
1766 else if ((this_is_condjump
|| this_is_condjump_in_parallel
)
1767 && ! this_is_simplejump
1768 && reallabelprev
!= 0
1769 && GET_CODE (reallabelprev
) == JUMP_INSN
1770 && prev_active_insn (reallabelprev
) == insn
1771 && no_labels_between_p (insn
, reallabelprev
)
1772 && simplejump_p (reallabelprev
))
1774 /* When we invert the unconditional jump, we will be
1775 decrementing the usage count of its old label.
1776 Make sure that we don't delete it now because that
1777 might cause the following code to be deleted. */
1778 rtx prev_uses
= prev_nonnote_insn (reallabelprev
);
1779 rtx prev_label
= JUMP_LABEL (insn
);
1782 ++LABEL_NUSES (prev_label
);
1784 if (invert_jump (insn
, JUMP_LABEL (reallabelprev
)))
1786 /* It is very likely that if there are USE insns before
1787 this jump, they hold REG_DEAD notes. These REG_DEAD
1788 notes are no longer valid due to this optimization,
1789 and will cause the life-analysis that following passes
1790 (notably delayed-branch scheduling) to think that
1791 these registers are dead when they are not.
1793 To prevent this trouble, we just remove the USE insns
1794 from the insn chain. */
1796 while (prev_uses
&& GET_CODE (prev_uses
) == INSN
1797 && GET_CODE (PATTERN (prev_uses
)) == USE
)
1799 rtx useless
= prev_uses
;
1800 prev_uses
= prev_nonnote_insn (prev_uses
);
1801 delete_insn (useless
);
1804 delete_insn (reallabelprev
);
1809 /* We can now safely delete the label if it is unreferenced
1810 since the delete_insn above has deleted the BARRIER. */
1811 if (prev_label
&& --LABEL_NUSES (prev_label
) == 0)
1812 delete_insn (prev_label
);
1817 /* Detect a jump to a jump. */
1819 nlabel
= follow_jumps (JUMP_LABEL (insn
));
1820 if (nlabel
!= JUMP_LABEL (insn
)
1821 && redirect_jump (insn
, nlabel
))
1827 /* Look for if (foo) bar; else break; */
1828 /* The insns look like this:
1829 insn = condjump label1;
1830 ...range1 (some insns)...
1833 ...range2 (some insns)...
1834 jump somewhere unconditionally
1837 rtx label1
= next_label (insn
);
1838 rtx range1end
= label1
? prev_active_insn (label1
) : 0;
1839 /* Don't do this optimization on the first round, so that
1840 jump-around-a-jump gets simplified before we ask here
1841 whether a jump is unconditional.
1843 Also don't do it when we are called after reload since
1844 it will confuse reorg. */
1846 && (reload_completed
? ! flag_delayed_branch
: 1)
1847 /* Make sure INSN is something we can invert. */
1848 && condjump_p (insn
)
1850 && JUMP_LABEL (insn
) == label1
1851 && LABEL_NUSES (label1
) == 1
1852 && GET_CODE (range1end
) == JUMP_INSN
1853 && simplejump_p (range1end
))
1855 rtx label2
= next_label (label1
);
1856 rtx range2end
= label2
? prev_active_insn (label2
) : 0;
1857 if (range1end
!= range2end
1858 && JUMP_LABEL (range1end
) == label2
1859 && GET_CODE (range2end
) == JUMP_INSN
1860 && GET_CODE (NEXT_INSN (range2end
)) == BARRIER
1861 /* Invert the jump condition, so we
1862 still execute the same insns in each case. */
1863 && invert_jump (insn
, label1
))
1865 rtx range1beg
= next_active_insn (insn
);
1866 rtx range2beg
= next_active_insn (label1
);
1867 rtx range1after
, range2after
;
1868 rtx range1before
, range2before
;
1871 /* Include in each range any notes before it, to be
1872 sure that we get the line number note if any, even
1873 if there are other notes here. */
1874 while (PREV_INSN (range1beg
)
1875 && GET_CODE (PREV_INSN (range1beg
)) == NOTE
)
1876 range1beg
= PREV_INSN (range1beg
);
1878 while (PREV_INSN (range2beg
)
1879 && GET_CODE (PREV_INSN (range2beg
)) == NOTE
)
1880 range2beg
= PREV_INSN (range2beg
);
1882 /* Don't move NOTEs for blocks or loops; shift them
1883 outside the ranges, where they'll stay put. */
1884 range1beg
= squeeze_notes (range1beg
, range1end
);
1885 range2beg
= squeeze_notes (range2beg
, range2end
);
1887 /* Get current surrounds of the 2 ranges. */
1888 range1before
= PREV_INSN (range1beg
);
1889 range2before
= PREV_INSN (range2beg
);
1890 range1after
= NEXT_INSN (range1end
);
1891 range2after
= NEXT_INSN (range2end
);
1893 /* Splice range2 where range1 was. */
1894 NEXT_INSN (range1before
) = range2beg
;
1895 PREV_INSN (range2beg
) = range1before
;
1896 NEXT_INSN (range2end
) = range1after
;
1897 PREV_INSN (range1after
) = range2end
;
1898 /* Splice range1 where range2 was. */
1899 NEXT_INSN (range2before
) = range1beg
;
1900 PREV_INSN (range1beg
) = range2before
;
1901 NEXT_INSN (range1end
) = range2after
;
1902 PREV_INSN (range2after
) = range1end
;
1904 /* Check for a loop end note between the end of
1905 range2, and the next code label. If there is one,
1906 then what we have really seen is
1907 if (foo) break; end_of_loop;
1908 and moved the break sequence outside the loop.
1909 We must move the LOOP_END note to where the
1910 loop really ends now, or we will confuse loop
1911 optimization. Stop if we find a LOOP_BEG note
1912 first, since we don't want to move the LOOP_END
1913 note in that case. */
1914 for (;range2after
!= label2
; range2after
= rangenext
)
1916 rangenext
= NEXT_INSN (range2after
);
1917 if (GET_CODE (range2after
) == NOTE
)
1919 if (NOTE_LINE_NUMBER (range2after
)
1920 == NOTE_INSN_LOOP_END
)
1922 NEXT_INSN (PREV_INSN (range2after
))
1924 PREV_INSN (rangenext
)
1925 = PREV_INSN (range2after
);
1926 PREV_INSN (range2after
)
1927 = PREV_INSN (range1beg
);
1928 NEXT_INSN (range2after
) = range1beg
;
1929 NEXT_INSN (PREV_INSN (range1beg
))
1931 PREV_INSN (range1beg
) = range2after
;
1933 else if (NOTE_LINE_NUMBER (range2after
)
1934 == NOTE_INSN_LOOP_BEG
)
1944 /* Now that the jump has been tensioned,
1945 try cross jumping: check for identical code
1946 before the jump and before its target label. */
1948 /* First, cross jumping of conditional jumps: */
1950 if (cross_jump
&& condjump_p (insn
))
1952 rtx newjpos
, newlpos
;
1953 rtx x
= prev_real_insn (JUMP_LABEL (insn
));
1955 /* A conditional jump may be crossjumped
1956 only if the place it jumps to follows
1957 an opposing jump that comes back here. */
1959 if (x
!= 0 && ! jump_back_p (x
, insn
))
1960 /* We have no opposing jump;
1961 cannot cross jump this insn. */
1965 /* TARGET is nonzero if it is ok to cross jump
1966 to code before TARGET. If so, see if matches. */
1968 find_cross_jump (insn
, x
, 2,
1969 &newjpos
, &newlpos
);
1973 do_cross_jump (insn
, newjpos
, newlpos
);
1974 /* Make the old conditional jump
1975 into an unconditional one. */
1976 SET_SRC (PATTERN (insn
))
1977 = gen_rtx_LABEL_REF (VOIDmode
, JUMP_LABEL (insn
));
1978 INSN_CODE (insn
) = -1;
1979 emit_barrier_after (insn
);
1980 /* Add to jump_chain unless this is a new label
1981 whose UID is too large. */
1982 if (INSN_UID (JUMP_LABEL (insn
)) < max_jump_chain
)
1984 jump_chain
[INSN_UID (insn
)]
1985 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
1986 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
1993 /* Cross jumping of unconditional jumps:
1994 a few differences. */
1996 if (cross_jump
&& simplejump_p (insn
))
1998 rtx newjpos
, newlpos
;
2003 /* TARGET is nonzero if it is ok to cross jump
2004 to code before TARGET. If so, see if matches. */
2005 find_cross_jump (insn
, JUMP_LABEL (insn
), 1,
2006 &newjpos
, &newlpos
);
2008 /* If cannot cross jump to code before the label,
2009 see if we can cross jump to another jump to
2011 /* Try each other jump to this label. */
2012 if (INSN_UID (JUMP_LABEL (insn
)) < max_uid
)
2013 for (target
= jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
2014 target
!= 0 && newjpos
== 0;
2015 target
= jump_chain
[INSN_UID (target
)])
2017 && JUMP_LABEL (target
) == JUMP_LABEL (insn
)
2018 /* Ignore TARGET if it's deleted. */
2019 && ! INSN_DELETED_P (target
))
2020 find_cross_jump (insn
, target
, 2,
2021 &newjpos
, &newlpos
);
2025 do_cross_jump (insn
, newjpos
, newlpos
);
2031 /* This code was dead in the previous jump.c! */
2032 if (cross_jump
&& GET_CODE (PATTERN (insn
)) == RETURN
)
2034 /* Return insns all "jump to the same place"
2035 so we can cross-jump between any two of them. */
2037 rtx newjpos
, newlpos
, target
;
2041 /* If cannot cross jump to code before the label,
2042 see if we can cross jump to another jump to
2044 /* Try each other jump to this label. */
2045 for (target
= jump_chain
[0];
2046 target
!= 0 && newjpos
== 0;
2047 target
= jump_chain
[INSN_UID (target
)])
2049 && ! INSN_DELETED_P (target
)
2050 && GET_CODE (PATTERN (target
)) == RETURN
)
2051 find_cross_jump (insn
, target
, 2,
2052 &newjpos
, &newlpos
);
2056 do_cross_jump (insn
, newjpos
, newlpos
);
2067 /* Delete extraneous line number notes.
2068 Note that two consecutive notes for different lines are not really
2069 extraneous. There should be some indication where that line belonged,
2070 even if it became empty. */
2075 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2076 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) >= 0)
2078 /* Delete this note if it is identical to previous note. */
2080 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
2081 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
))
2094 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2095 in front of it. If the machine allows it at this point (we might be
2096 after reload for a leaf routine), it will improve optimization for it
2097 to be there. We do this both here and at the start of this pass since
2098 the RETURN might have been deleted by some of our optimizations. */
2099 insn
= get_last_insn ();
2100 while (insn
&& GET_CODE (insn
) == NOTE
)
2101 insn
= PREV_INSN (insn
);
2103 if (insn
&& GET_CODE (insn
) != BARRIER
)
2105 emit_jump_insn (gen_return ());
2111 /* CAN_REACH_END is persistent for each function. Once set it should
2112 not be cleared. This is especially true for the case where we
2113 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2114 the front-end before compiling each function. */
2115 if (calculate_can_reach_end (last_insn
, 0, 1))
2118 /* Show JUMP_CHAIN no longer valid. */
2122 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2123 notes whose labels don't occur in the insn any more. Returns the
2124 largest INSN_UID found. */
2129 int largest_uid
= 0;
2132 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2134 if (GET_CODE (insn
) == CODE_LABEL
)
2135 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
2136 else if (GET_CODE (insn
) == JUMP_INSN
)
2137 JUMP_LABEL (insn
) = 0;
2138 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
2142 for (note
= REG_NOTES (insn
); note
; note
= next
)
2144 next
= XEXP (note
, 1);
2145 if (REG_NOTE_KIND (note
) == REG_LABEL
2146 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
2147 remove_note (insn
, note
);
2150 if (INSN_UID (insn
) > largest_uid
)
2151 largest_uid
= INSN_UID (insn
);
2157 /* Delete insns following barriers, up to next label.
2159 Also delete no-op jumps created by gcse. */
2161 delete_barrier_successors (f
)
2166 for (insn
= f
; insn
;)
2168 if (GET_CODE (insn
) == BARRIER
)
2170 insn
= NEXT_INSN (insn
);
2172 never_reached_warning (insn
);
2174 while (insn
!= 0 && GET_CODE (insn
) != CODE_LABEL
)
2176 if (GET_CODE (insn
) == NOTE
2177 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)
2178 insn
= NEXT_INSN (insn
);
2180 insn
= delete_insn (insn
);
2182 /* INSN is now the code_label. */
2184 /* Also remove (set (pc) (pc)) insns which can be created by
2185 gcse. We eliminate such insns now to avoid having them
2186 cause problems later. */
2187 else if (GET_CODE (insn
) == JUMP_INSN
2188 && GET_CODE (PATTERN (insn
)) == SET
2189 && SET_SRC (PATTERN (insn
)) == pc_rtx
2190 && SET_DEST (PATTERN (insn
)) == pc_rtx
)
2191 insn
= delete_insn (insn
);
2194 insn
= NEXT_INSN (insn
);
2198 /* Mark the label each jump jumps to.
2199 Combine consecutive labels, and count uses of labels.
2201 For each label, make a chain (using `jump_chain')
2202 of all the *unconditional* jumps that jump to it;
2203 also make a chain of all returns.
2205 CROSS_JUMP indicates whether we are doing cross jumping
2206 and if we are whether we will be paying attention to
2207 death notes or not. */
2210 mark_all_labels (f
, cross_jump
)
2216 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2217 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2219 mark_jump_label (PATTERN (insn
), insn
, cross_jump
);
2220 if (! INSN_DELETED_P (insn
) && GET_CODE (insn
) == JUMP_INSN
)
2222 if (JUMP_LABEL (insn
) != 0 && simplejump_p (insn
))
2224 jump_chain
[INSN_UID (insn
)]
2225 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
2226 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
2228 if (GET_CODE (PATTERN (insn
)) == RETURN
)
2230 jump_chain
[INSN_UID (insn
)] = jump_chain
[0];
2231 jump_chain
[0] = insn
;
2237 /* Delete all labels already not referenced.
2238 Also find and return the last insn. */
2241 delete_unreferenced_labels (f
)
2244 rtx final
= NULL_RTX
;
2247 for (insn
= f
; insn
; )
2249 if (GET_CODE (insn
) == CODE_LABEL
&& LABEL_NUSES (insn
) == 0)
2250 insn
= delete_insn (insn
);
2254 insn
= NEXT_INSN (insn
);
2261 /* Delete various simple forms of moves which have no necessary
2265 delete_noop_moves (f
)
2270 for (insn
= f
; insn
; )
2272 next
= NEXT_INSN (insn
);
2274 if (GET_CODE (insn
) == INSN
)
2276 register rtx body
= PATTERN (insn
);
2278 /* Combine stack_adjusts with following push_insns. */
2279 #ifdef PUSH_ROUNDING
2280 if (GET_CODE (body
) == SET
2281 && SET_DEST (body
) == stack_pointer_rtx
2282 && GET_CODE (SET_SRC (body
)) == PLUS
2283 && XEXP (SET_SRC (body
), 0) == stack_pointer_rtx
2284 && GET_CODE (XEXP (SET_SRC (body
), 1)) == CONST_INT
2285 && INTVAL (XEXP (SET_SRC (body
), 1)) > 0)
2288 rtx stack_adjust_insn
= insn
;
2289 int stack_adjust_amount
= INTVAL (XEXP (SET_SRC (body
), 1));
2290 int total_pushed
= 0;
2293 /* Find all successive push insns. */
2295 /* Don't convert more than three pushes;
2296 that starts adding too many displaced addresses
2297 and the whole thing starts becoming a losing
2302 p
= next_nonnote_insn (p
);
2303 if (p
== 0 || GET_CODE (p
) != INSN
)
2305 pbody
= PATTERN (p
);
2306 if (GET_CODE (pbody
) != SET
)
2308 dest
= SET_DEST (pbody
);
2309 /* Allow a no-op move between the adjust and the push. */
2310 if (GET_CODE (dest
) == REG
2311 && GET_CODE (SET_SRC (pbody
)) == REG
2312 && REGNO (dest
) == REGNO (SET_SRC (pbody
)))
2314 if (! (GET_CODE (dest
) == MEM
2315 && GET_CODE (XEXP (dest
, 0)) == POST_INC
2316 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
2319 if (total_pushed
+ GET_MODE_SIZE (GET_MODE (SET_DEST (pbody
)))
2320 > stack_adjust_amount
)
2322 total_pushed
+= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody
)));
2325 /* Discard the amount pushed from the stack adjust;
2326 maybe eliminate it entirely. */
2327 if (total_pushed
>= stack_adjust_amount
)
2329 delete_computation (stack_adjust_insn
);
2330 total_pushed
= stack_adjust_amount
;
2333 XEXP (SET_SRC (PATTERN (stack_adjust_insn
)), 1)
2334 = GEN_INT (stack_adjust_amount
- total_pushed
);
2336 /* Change the appropriate push insns to ordinary stores. */
2338 while (total_pushed
> 0)
2341 p
= next_nonnote_insn (p
);
2342 if (GET_CODE (p
) != INSN
)
2344 pbody
= PATTERN (p
);
2345 if (GET_CODE (pbody
) != SET
)
2347 dest
= SET_DEST (pbody
);
2348 /* Allow a no-op move between the adjust and the push. */
2349 if (GET_CODE (dest
) == REG
2350 && GET_CODE (SET_SRC (pbody
)) == REG
2351 && REGNO (dest
) == REGNO (SET_SRC (pbody
)))
2353 if (! (GET_CODE (dest
) == MEM
2354 && GET_CODE (XEXP (dest
, 0)) == POST_INC
2355 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
2357 total_pushed
-= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody
)));
2358 /* If this push doesn't fully fit in the space
2359 of the stack adjust that we deleted,
2360 make another stack adjust here for what we
2361 didn't use up. There should be peepholes
2362 to recognize the resulting sequence of insns. */
2363 if (total_pushed
< 0)
2365 emit_insn_before (gen_add2_insn (stack_pointer_rtx
,
2366 GEN_INT (- total_pushed
)),
2371 = plus_constant (stack_pointer_rtx
, total_pushed
);
2376 /* Detect and delete no-op move instructions
2377 resulting from not allocating a parameter in a register. */
2379 if (GET_CODE (body
) == SET
2380 && (SET_DEST (body
) == SET_SRC (body
)
2381 || (GET_CODE (SET_DEST (body
)) == MEM
2382 && GET_CODE (SET_SRC (body
)) == MEM
2383 && rtx_equal_p (SET_SRC (body
), SET_DEST (body
))))
2384 && ! (GET_CODE (SET_DEST (body
)) == MEM
2385 && MEM_VOLATILE_P (SET_DEST (body
)))
2386 && ! (GET_CODE (SET_SRC (body
)) == MEM
2387 && MEM_VOLATILE_P (SET_SRC (body
))))
2388 delete_computation (insn
);
2390 /* Detect and ignore no-op move instructions
2391 resulting from smart or fortuitous register allocation. */
2393 else if (GET_CODE (body
) == SET
)
2395 int sreg
= true_regnum (SET_SRC (body
));
2396 int dreg
= true_regnum (SET_DEST (body
));
2398 if (sreg
== dreg
&& sreg
>= 0)
2400 else if (sreg
>= 0 && dreg
>= 0)
2403 rtx tem
= find_equiv_reg (NULL_RTX
, insn
, 0,
2404 sreg
, NULL_PTR
, dreg
,
2405 GET_MODE (SET_SRC (body
)));
2408 && GET_MODE (tem
) == GET_MODE (SET_DEST (body
)))
2410 /* DREG may have been the target of a REG_DEAD note in
2411 the insn which makes INSN redundant. If so, reorg
2412 would still think it is dead. So search for such a
2413 note and delete it if we find it. */
2414 if (! find_regno_note (insn
, REG_UNUSED
, dreg
))
2415 for (trial
= prev_nonnote_insn (insn
);
2416 trial
&& GET_CODE (trial
) != CODE_LABEL
;
2417 trial
= prev_nonnote_insn (trial
))
2418 if (find_regno_note (trial
, REG_DEAD
, dreg
))
2420 remove_death (dreg
, trial
);
2424 /* Deleting insn could lose a death-note for SREG. */
2425 if ((trial
= find_regno_note (insn
, REG_DEAD
, sreg
)))
2427 /* Change this into a USE so that we won't emit
2428 code for it, but still can keep the note. */
2430 = gen_rtx_USE (VOIDmode
, XEXP (trial
, 0));
2431 INSN_CODE (insn
) = -1;
2432 /* Remove all reg notes but the REG_DEAD one. */
2433 REG_NOTES (insn
) = trial
;
2434 XEXP (trial
, 1) = NULL_RTX
;
2440 else if (dreg
>= 0 && CONSTANT_P (SET_SRC (body
))
2441 && find_equiv_reg (SET_SRC (body
), insn
, 0, dreg
,
2443 GET_MODE (SET_DEST (body
))))
2445 /* This handles the case where we have two consecutive
2446 assignments of the same constant to pseudos that didn't
2447 get a hard reg. Each SET from the constant will be
2448 converted into a SET of the spill register and an
2449 output reload will be made following it. This produces
2450 two loads of the same constant into the same spill
2455 /* Look back for a death note for the first reg.
2456 If there is one, it is no longer accurate. */
2457 while (in_insn
&& GET_CODE (in_insn
) != CODE_LABEL
)
2459 if ((GET_CODE (in_insn
) == INSN
2460 || GET_CODE (in_insn
) == JUMP_INSN
)
2461 && find_regno_note (in_insn
, REG_DEAD
, dreg
))
2463 remove_death (dreg
, in_insn
);
2466 in_insn
= PREV_INSN (in_insn
);
2469 /* Delete the second load of the value. */
2473 else if (GET_CODE (body
) == PARALLEL
)
2475 /* If each part is a set between two identical registers or
2476 a USE or CLOBBER, delete the insn. */
2480 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
2482 tem
= XVECEXP (body
, 0, i
);
2483 if (GET_CODE (tem
) == USE
|| GET_CODE (tem
) == CLOBBER
)
2486 if (GET_CODE (tem
) != SET
2487 || (sreg
= true_regnum (SET_SRC (tem
))) < 0
2488 || (dreg
= true_regnum (SET_DEST (tem
))) < 0
2496 /* Also delete insns to store bit fields if they are no-ops. */
2497 /* Not worth the hair to detect this in the big-endian case. */
2498 else if (! BYTES_BIG_ENDIAN
2499 && GET_CODE (body
) == SET
2500 && GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
2501 && XEXP (SET_DEST (body
), 2) == const0_rtx
2502 && XEXP (SET_DEST (body
), 0) == SET_SRC (body
)
2503 && ! (GET_CODE (SET_SRC (body
)) == MEM
2504 && MEM_VOLATILE_P (SET_SRC (body
))))
2511 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2512 If so indicate that this function can drop off the end by returning
2515 CHECK_DELETED indicates whether we must check if the note being
2516 searched for has the deleted flag set.
2518 DELETE_FINAL_NOTE indicates whether we should delete the note
2522 calculate_can_reach_end (last
, check_deleted
, delete_final_note
)
2525 int delete_final_note
;
2530 while (insn
!= NULL_RTX
)
2534 /* One label can follow the end-note: the return label. */
2535 if (GET_CODE (insn
) == CODE_LABEL
&& n_labels
-- > 0)
2537 /* Ordinary insns can follow it if returning a structure. */
2538 else if (GET_CODE (insn
) == INSN
)
2540 /* If machine uses explicit RETURN insns, no epilogue,
2541 then one of them follows the note. */
2542 else if (GET_CODE (insn
) == JUMP_INSN
2543 && GET_CODE (PATTERN (insn
)) == RETURN
)
2545 /* A barrier can follow the return insn. */
2546 else if (GET_CODE (insn
) == BARRIER
)
2548 /* Other kinds of notes can follow also. */
2549 else if (GET_CODE (insn
) == NOTE
2550 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)
2556 insn
= PREV_INSN (insn
);
2559 /* See if we backed up to the appropriate type of note. */
2560 if (insn
!= NULL_RTX
2561 && GET_CODE (insn
) == NOTE
2562 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
2563 && (check_deleted
== 0
2564 || ! INSN_DELETED_P (insn
)))
2566 if (delete_final_note
)
2574 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2575 jump. Assume that this unconditional jump is to the exit test code. If
2576 the code is sufficiently simple, make a copy of it before INSN,
2577 followed by a jump to the exit of the loop. Then delete the unconditional
2580 Return 1 if we made the change, else 0.
2582 This is only safe immediately after a regscan pass because it uses the
2583 values of regno_first_uid and regno_last_uid. */
2586 duplicate_loop_exit_test (loop_start
)
2589 rtx insn
, set
, reg
, p
, link
;
2590 rtx copy
= 0, first_copy
= 0;
2592 rtx exitcode
= NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start
)));
2594 int max_reg
= max_reg_num ();
2597 /* Scan the exit code. We do not perform this optimization if any insn:
2601 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2602 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2603 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2606 We also do not do this if we find an insn with ASM_OPERANDS. While
2607 this restriction should not be necessary, copying an insn with
2608 ASM_OPERANDS can confuse asm_noperands in some cases.
2610 Also, don't do this if the exit code is more than 20 insns. */
2612 for (insn
= exitcode
;
2614 && ! (GET_CODE (insn
) == NOTE
2615 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
2616 insn
= NEXT_INSN (insn
))
2618 switch (GET_CODE (insn
))
2624 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2625 a jump immediately after the loop start that branches outside
2626 the loop but within an outer loop, near the exit test.
2627 If we copied this exit test and created a phony
2628 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2629 before the exit test look like these could be safely moved
2630 out of the loop even if they actually may be never executed.
2631 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2633 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
2634 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
)
2638 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2639 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
2640 /* If we were to duplicate this code, we would not move
2641 the BLOCK notes, and so debugging the moved code would
2642 be difficult. Thus, we only move the code with -O2 or
2649 /* The code below would grossly mishandle REG_WAS_0 notes,
2650 so get rid of them here. */
2651 while ((p
= find_reg_note (insn
, REG_WAS_0
, NULL_RTX
)) != 0)
2652 remove_note (insn
, p
);
2653 if (++num_insns
> 20
2654 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
2655 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)
2656 || asm_noperands (PATTERN (insn
)) > 0)
2664 /* Unless INSN is zero, we can do the optimization. */
2670 /* See if any insn sets a register only used in the loop exit code and
2671 not a user variable. If so, replace it with a new register. */
2672 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
2673 if (GET_CODE (insn
) == INSN
2674 && (set
= single_set (insn
)) != 0
2675 && ((reg
= SET_DEST (set
), GET_CODE (reg
) == REG
)
2676 || (GET_CODE (reg
) == SUBREG
2677 && (reg
= SUBREG_REG (reg
), GET_CODE (reg
) == REG
)))
2678 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
2679 && REGNO_FIRST_UID (REGNO (reg
)) == INSN_UID (insn
))
2681 for (p
= NEXT_INSN (insn
); p
!= lastexit
; p
= NEXT_INSN (p
))
2682 if (REGNO_LAST_UID (REGNO (reg
)) == INSN_UID (p
))
2687 /* We can do the replacement. Allocate reg_map if this is the
2688 first replacement we found. */
2691 reg_map
= (rtx
*) alloca (max_reg
* sizeof (rtx
));
2692 bzero ((char *) reg_map
, max_reg
* sizeof (rtx
));
2695 REG_LOOP_TEST_P (reg
) = 1;
2697 reg_map
[REGNO (reg
)] = gen_reg_rtx (GET_MODE (reg
));
2701 /* Now copy each insn. */
2702 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
2704 switch (GET_CODE (insn
))
2707 copy
= emit_barrier_before (loop_start
);
2710 /* Only copy line-number notes. */
2711 if (NOTE_LINE_NUMBER (insn
) >= 0)
2713 copy
= emit_note_before (NOTE_LINE_NUMBER (insn
), loop_start
);
2714 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
2719 copy
= emit_insn_before (copy_rtx (PATTERN (insn
)), loop_start
);
2721 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
2723 mark_jump_label (PATTERN (copy
), copy
, 0);
2725 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2727 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2728 if (REG_NOTE_KIND (link
) != REG_LABEL
)
2730 = copy_rtx (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link
),
2733 if (reg_map
&& REG_NOTES (copy
))
2734 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
2738 copy
= emit_jump_insn_before (copy_rtx (PATTERN (insn
)), loop_start
);
2740 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
2741 mark_jump_label (PATTERN (copy
), copy
, 0);
2742 if (REG_NOTES (insn
))
2744 REG_NOTES (copy
) = copy_rtx (REG_NOTES (insn
));
2746 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
2749 /* If this is a simple jump, add it to the jump chain. */
2751 if (INSN_UID (copy
) < max_jump_chain
&& JUMP_LABEL (copy
)
2752 && simplejump_p (copy
))
2754 jump_chain
[INSN_UID (copy
)]
2755 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
2756 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
2764 /* Record the first insn we copied. We need it so that we can
2765 scan the copied insns for new pseudo registers. */
2770 /* Now clean up by emitting a jump to the end label and deleting the jump
2771 at the start of the loop. */
2772 if (! copy
|| GET_CODE (copy
) != BARRIER
)
2774 copy
= emit_jump_insn_before (gen_jump (get_label_after (insn
)),
2777 /* Record the first insn we copied. We need it so that we can
2778 scan the copied insns for new pseudo registers. This may not
2779 be strictly necessary since we should have copied at least one
2780 insn above. But I am going to be safe. */
2784 mark_jump_label (PATTERN (copy
), copy
, 0);
2785 if (INSN_UID (copy
) < max_jump_chain
2786 && INSN_UID (JUMP_LABEL (copy
)) < max_jump_chain
)
2788 jump_chain
[INSN_UID (copy
)]
2789 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
2790 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
2792 emit_barrier_before (loop_start
);
2795 /* Now scan from the first insn we copied to the last insn we copied
2796 (copy) for new pseudo registers. Do this after the code to jump to
2797 the end label since that might create a new pseudo too. */
2798 reg_scan_update (first_copy
, copy
, max_reg
);
2800 /* Mark the exit code as the virtual top of the converted loop. */
2801 emit_note_before (NOTE_INSN_LOOP_VTOP
, exitcode
);
2803 delete_insn (next_nonnote_insn (loop_start
));
2808 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2809 loop-end notes between START and END out before START. Assume that
2810 END is not such a note. START may be such a note. Returns the value
2811 of the new starting insn, which may be different if the original start
2815 squeeze_notes (start
, end
)
2821 for (insn
= start
; insn
!= end
; insn
= next
)
2823 next
= NEXT_INSN (insn
);
2824 if (GET_CODE (insn
) == NOTE
2825 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
2826 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2827 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
2828 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
2829 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
2830 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
))
2836 rtx prev
= PREV_INSN (insn
);
2837 PREV_INSN (insn
) = PREV_INSN (start
);
2838 NEXT_INSN (insn
) = start
;
2839 NEXT_INSN (PREV_INSN (insn
)) = insn
;
2840 PREV_INSN (NEXT_INSN (insn
)) = insn
;
2841 NEXT_INSN (prev
) = next
;
2842 PREV_INSN (next
) = prev
;
2850 /* Compare the instructions before insn E1 with those before E2
2851 to find an opportunity for cross jumping.
2852 (This means detecting identical sequences of insns followed by
2853 jumps to the same place, or followed by a label and a jump
2854 to that label, and replacing one with a jump to the other.)
2856 Assume E1 is a jump that jumps to label E2
2857 (that is not always true but it might as well be).
2858 Find the longest possible equivalent sequences
2859 and store the first insns of those sequences into *F1 and *F2.
2860 Store zero there if no equivalent preceding instructions are found.
2862 We give up if we find a label in stream 1.
2863 Actually we could transfer that label into stream 2. */
2866 find_cross_jump (e1
, e2
, minimum
, f1
, f2
)
2871 register rtx i1
= e1
, i2
= e2
;
2872 register rtx p1
, p2
;
2875 rtx last1
= 0, last2
= 0;
2876 rtx afterlast1
= 0, afterlast2
= 0;
2883 i1
= prev_nonnote_insn (i1
);
2885 i2
= PREV_INSN (i2
);
2886 while (i2
&& (GET_CODE (i2
) == NOTE
|| GET_CODE (i2
) == CODE_LABEL
))
2887 i2
= PREV_INSN (i2
);
2892 /* Don't allow the range of insns preceding E1 or E2
2893 to include the other (E2 or E1). */
2894 if (i2
== e1
|| i1
== e2
)
2897 /* If we will get to this code by jumping, those jumps will be
2898 tensioned to go directly to the new label (before I2),
2899 so this cross-jumping won't cost extra. So reduce the minimum. */
2900 if (GET_CODE (i1
) == CODE_LABEL
)
2906 if (i2
== 0 || GET_CODE (i1
) != GET_CODE (i2
))
2909 /* Avoid moving insns across EH regions if either of the insns
2912 && (asynchronous_exceptions
|| GET_CODE (i1
) == CALL_INSN
)
2913 && !in_same_eh_region (i1
, i2
))
2919 /* If this is a CALL_INSN, compare register usage information.
2920 If we don't check this on stack register machines, the two
2921 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2922 numbers of stack registers in the same basic block.
2923 If we don't check this on machines with delay slots, a delay slot may
2924 be filled that clobbers a parameter expected by the subroutine.
2926 ??? We take the simple route for now and assume that if they're
2927 equal, they were constructed identically. */
2929 if (GET_CODE (i1
) == CALL_INSN
2930 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1
),
2931 CALL_INSN_FUNCTION_USAGE (i2
)))
2935 /* If cross_jump_death_matters is not 0, the insn's mode
2936 indicates whether or not the insn contains any stack-like
2939 if (!lose
&& cross_jump_death_matters
&& stack_regs_mentioned (i1
))
2941 /* If register stack conversion has already been done, then
2942 death notes must also be compared before it is certain that
2943 the two instruction streams match. */
2946 HARD_REG_SET i1_regset
, i2_regset
;
2948 CLEAR_HARD_REG_SET (i1_regset
);
2949 CLEAR_HARD_REG_SET (i2_regset
);
2951 for (note
= REG_NOTES (i1
); note
; note
= XEXP (note
, 1))
2952 if (REG_NOTE_KIND (note
) == REG_DEAD
2953 && STACK_REG_P (XEXP (note
, 0)))
2954 SET_HARD_REG_BIT (i1_regset
, REGNO (XEXP (note
, 0)));
2956 for (note
= REG_NOTES (i2
); note
; note
= XEXP (note
, 1))
2957 if (REG_NOTE_KIND (note
) == REG_DEAD
2958 && STACK_REG_P (XEXP (note
, 0)))
2959 SET_HARD_REG_BIT (i2_regset
, REGNO (XEXP (note
, 0)));
2961 GO_IF_HARD_REG_EQUAL (i1_regset
, i2_regset
, done
);
2970 /* Don't allow old-style asm or volatile extended asms to be accepted
2971 for cross jumping purposes. It is conceptually correct to allow
2972 them, since cross-jumping preserves the dynamic instruction order
2973 even though it is changing the static instruction order. However,
2974 if an asm is being used to emit an assembler pseudo-op, such as
2975 the MIPS `.set reorder' pseudo-op, then the static instruction order
2976 matters and it must be preserved. */
2977 if (GET_CODE (p1
) == ASM_INPUT
|| GET_CODE (p2
) == ASM_INPUT
2978 || (GET_CODE (p1
) == ASM_OPERANDS
&& MEM_VOLATILE_P (p1
))
2979 || (GET_CODE (p2
) == ASM_OPERANDS
&& MEM_VOLATILE_P (p2
)))
2982 if (lose
|| GET_CODE (p1
) != GET_CODE (p2
)
2983 || ! rtx_renumbered_equal_p (p1
, p2
))
2985 /* The following code helps take care of G++ cleanups. */
2989 if (!lose
&& GET_CODE (p1
) == GET_CODE (p2
)
2990 && ((equiv1
= find_reg_note (i1
, REG_EQUAL
, NULL_RTX
)) != 0
2991 || (equiv1
= find_reg_note (i1
, REG_EQUIV
, NULL_RTX
)) != 0)
2992 && ((equiv2
= find_reg_note (i2
, REG_EQUAL
, NULL_RTX
)) != 0
2993 || (equiv2
= find_reg_note (i2
, REG_EQUIV
, NULL_RTX
)) != 0)
2994 /* If the equivalences are not to a constant, they may
2995 reference pseudos that no longer exist, so we can't
2997 && CONSTANT_P (XEXP (equiv1
, 0))
2998 && rtx_equal_p (XEXP (equiv1
, 0), XEXP (equiv2
, 0)))
3000 rtx s1
= single_set (i1
);
3001 rtx s2
= single_set (i2
);
3002 if (s1
!= 0 && s2
!= 0
3003 && rtx_renumbered_equal_p (SET_DEST (s1
), SET_DEST (s2
)))
3005 validate_change (i1
, &SET_SRC (s1
), XEXP (equiv1
, 0), 1);
3006 validate_change (i2
, &SET_SRC (s2
), XEXP (equiv2
, 0), 1);
3007 if (! rtx_renumbered_equal_p (p1
, p2
))
3009 else if (apply_change_group ())
3014 /* Insns fail to match; cross jumping is limited to the following
3018 /* Don't allow the insn after a compare to be shared by
3019 cross-jumping unless the compare is also shared.
3020 Here, if either of these non-matching insns is a compare,
3021 exclude the following insn from possible cross-jumping. */
3022 if (sets_cc0_p (p1
) || sets_cc0_p (p2
))
3023 last1
= afterlast1
, last2
= afterlast2
, ++minimum
;
3026 /* If cross-jumping here will feed a jump-around-jump
3027 optimization, this jump won't cost extra, so reduce
3029 if (GET_CODE (i1
) == JUMP_INSN
3031 && prev_real_insn (JUMP_LABEL (i1
)) == e1
)
3037 if (GET_CODE (p1
) != USE
&& GET_CODE (p1
) != CLOBBER
)
3039 /* Ok, this insn is potentially includable in a cross-jump here. */
3040 afterlast1
= last1
, afterlast2
= last2
;
3041 last1
= i1
, last2
= i2
, --minimum
;
3045 if (minimum
<= 0 && last1
!= 0 && last1
!= e1
)
3046 *f1
= last1
, *f2
= last2
;
3050 do_cross_jump (insn
, newjpos
, newlpos
)
3051 rtx insn
, newjpos
, newlpos
;
3053 /* Find an existing label at this point
3054 or make a new one if there is none. */
3055 register rtx label
= get_label_before (newlpos
);
3057 /* Make the same jump insn jump to the new point. */
3058 if (GET_CODE (PATTERN (insn
)) == RETURN
)
3060 /* Remove from jump chain of returns. */
3061 delete_from_jump_chain (insn
);
3062 /* Change the insn. */
3063 PATTERN (insn
) = gen_jump (label
);
3064 INSN_CODE (insn
) = -1;
3065 JUMP_LABEL (insn
) = label
;
3066 LABEL_NUSES (label
)++;
3067 /* Add to new the jump chain. */
3068 if (INSN_UID (label
) < max_jump_chain
3069 && INSN_UID (insn
) < max_jump_chain
)
3071 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (label
)];
3072 jump_chain
[INSN_UID (label
)] = insn
;
3076 redirect_jump (insn
, label
);
3078 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3079 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3080 the NEWJPOS stream. */
3082 while (newjpos
!= insn
)
3086 for (lnote
= REG_NOTES (newlpos
); lnote
; lnote
= XEXP (lnote
, 1))
3087 if ((REG_NOTE_KIND (lnote
) == REG_EQUAL
3088 || REG_NOTE_KIND (lnote
) == REG_EQUIV
)
3089 && ! find_reg_note (newjpos
, REG_EQUAL
, XEXP (lnote
, 0))
3090 && ! find_reg_note (newjpos
, REG_EQUIV
, XEXP (lnote
, 0)))
3091 remove_note (newlpos
, lnote
);
3093 delete_insn (newjpos
);
3094 newjpos
= next_real_insn (newjpos
);
3095 newlpos
= next_real_insn (newlpos
);
3099 /* Return the label before INSN, or put a new label there. */
3102 get_label_before (insn
)
3107 /* Find an existing label at this point
3108 or make a new one if there is none. */
3109 label
= prev_nonnote_insn (insn
);
3111 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
3113 rtx prev
= PREV_INSN (insn
);
3115 label
= gen_label_rtx ();
3116 emit_label_after (label
, prev
);
3117 LABEL_NUSES (label
) = 0;
3122 /* Return the label after INSN, or put a new label there. */
3125 get_label_after (insn
)
3130 /* Find an existing label at this point
3131 or make a new one if there is none. */
3132 label
= next_nonnote_insn (insn
);
3134 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
3136 label
= gen_label_rtx ();
3137 emit_label_after (label
, insn
);
3138 LABEL_NUSES (label
) = 0;
3143 /* Return 1 if INSN is a jump that jumps to right after TARGET
3144 only on the condition that TARGET itself would drop through.
3145 Assumes that TARGET is a conditional jump. */
3148 jump_back_p (insn
, target
)
3152 enum rtx_code codei
, codet
;
3154 if (simplejump_p (insn
) || ! condjump_p (insn
)
3155 || simplejump_p (target
)
3156 || target
!= prev_real_insn (JUMP_LABEL (insn
)))
3159 cinsn
= XEXP (SET_SRC (PATTERN (insn
)), 0);
3160 ctarget
= XEXP (SET_SRC (PATTERN (target
)), 0);
3162 codei
= GET_CODE (cinsn
);
3163 codet
= GET_CODE (ctarget
);
3165 if (XEXP (SET_SRC (PATTERN (insn
)), 1) == pc_rtx
)
3167 if (! can_reverse_comparison_p (cinsn
, insn
))
3169 codei
= reverse_condition (codei
);
3172 if (XEXP (SET_SRC (PATTERN (target
)), 2) == pc_rtx
)
3174 if (! can_reverse_comparison_p (ctarget
, target
))
3176 codet
= reverse_condition (codet
);
3179 return (codei
== codet
3180 && rtx_renumbered_equal_p (XEXP (cinsn
, 0), XEXP (ctarget
, 0))
3181 && rtx_renumbered_equal_p (XEXP (cinsn
, 1), XEXP (ctarget
, 1)));
3184 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3185 return non-zero if it is safe to reverse this comparison. It is if our
3186 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3187 this is known to be an integer comparison. */
3190 can_reverse_comparison_p (comparison
, insn
)
3196 /* If this is not actually a comparison, we can't reverse it. */
3197 if (GET_RTX_CLASS (GET_CODE (comparison
)) != '<')
3200 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3201 /* If this is an NE comparison, it is safe to reverse it to an EQ
3202 comparison and vice versa, even for floating point. If no operands
3203 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3204 always false and NE is always true, so the reversal is also valid. */
3206 || GET_CODE (comparison
) == NE
3207 || GET_CODE (comparison
) == EQ
)
3210 arg0
= XEXP (comparison
, 0);
3212 /* Make sure ARG0 is one of the actual objects being compared. If we
3213 can't do this, we can't be sure the comparison can be reversed.
3215 Handle cc0 and a MODE_CC register. */
3216 if ((GET_CODE (arg0
) == REG
&& GET_MODE_CLASS (GET_MODE (arg0
)) == MODE_CC
)
3222 rtx prev
= prev_nonnote_insn (insn
);
3225 /* If the comparison itself was a loop invariant, it could have been
3226 hoisted out of the loop. If we proceed to unroll such a loop, then
3227 we may not be able to find the comparison when copying the loop.
3229 Returning zero in that case is the safe thing to do. */
3233 set
= single_set (prev
);
3234 if (set
== 0 || SET_DEST (set
) != arg0
)
3237 arg0
= SET_SRC (set
);
3239 if (GET_CODE (arg0
) == COMPARE
)
3240 arg0
= XEXP (arg0
, 0);
3243 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3244 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3245 return (GET_CODE (arg0
) == CONST_INT
3246 || (GET_MODE (arg0
) != VOIDmode
3247 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_CC
3248 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_FLOAT
));
3251 /* Given an rtx-code for a comparison, return the code
3252 for the negated comparison.
3253 WATCH OUT! reverse_condition is not safe to use on a jump
3254 that might be acting on the results of an IEEE floating point comparison,
3255 because of the special treatment of non-signaling nans in comparisons.
3256 Use can_reverse_comparison_p to be sure. */
3259 reverse_condition (code
)
3300 /* Similar, but return the code when two operands of a comparison are swapped.
3301 This IS safe for IEEE floating-point. */
3304 swap_condition (code
)
3343 /* Given a comparison CODE, return the corresponding unsigned comparison.
3344 If CODE is an equality comparison or already an unsigned comparison,
3345 CODE is returned. */
3348 unsigned_condition (code
)
3378 /* Similarly, return the signed version of a comparison. */
3381 signed_condition (code
)
3411 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3412 truth of CODE1 implies the truth of CODE2. */
3415 comparison_dominates_p (code1
, code2
)
3416 enum rtx_code code1
, code2
;
3424 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
)
3429 if (code2
== LE
|| code2
== NE
)
3434 if (code2
== GE
|| code2
== NE
)
3439 if (code2
== LEU
|| code2
== NE
)
3444 if (code2
== GEU
|| code2
== NE
)
3455 /* Return 1 if INSN is an unconditional jump and nothing else. */
3461 return (GET_CODE (insn
) == JUMP_INSN
3462 && GET_CODE (PATTERN (insn
)) == SET
3463 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
3464 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
3467 /* Return nonzero if INSN is a (possibly) conditional jump
3468 and nothing more. */
3474 register rtx x
= PATTERN (insn
);
3475 if (GET_CODE (x
) != SET
)
3477 if (GET_CODE (SET_DEST (x
)) != PC
)
3479 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
3481 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
3483 if (XEXP (SET_SRC (x
), 2) == pc_rtx
3484 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
3485 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
3487 if (XEXP (SET_SRC (x
), 1) == pc_rtx
3488 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
3489 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
3494 /* Return nonzero if INSN is a (possibly) conditional jump
3495 and nothing more. */
3498 condjump_in_parallel_p (insn
)
3501 register rtx x
= PATTERN (insn
);
3503 if (GET_CODE (x
) != PARALLEL
)
3506 x
= XVECEXP (x
, 0, 0);
3508 if (GET_CODE (x
) != SET
)
3510 if (GET_CODE (SET_DEST (x
)) != PC
)
3512 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
3514 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
3516 if (XEXP (SET_SRC (x
), 2) == pc_rtx
3517 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
3518 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
3520 if (XEXP (SET_SRC (x
), 1) == pc_rtx
3521 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
3522 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
3527 /* Return the label of a conditional jump. */
3530 condjump_label (insn
)
3533 register rtx x
= PATTERN (insn
);
3535 if (GET_CODE (x
) == PARALLEL
)
3536 x
= XVECEXP (x
, 0, 0);
3537 if (GET_CODE (x
) != SET
)
3539 if (GET_CODE (SET_DEST (x
)) != PC
)
3542 if (GET_CODE (x
) == LABEL_REF
)
3544 if (GET_CODE (x
) != IF_THEN_ELSE
)
3546 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
3548 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
3553 /* Return true if INSN is a (possibly conditional) return insn. */
3556 returnjump_p_1 (loc
, data
)
3558 void *data ATTRIBUTE_UNUSED
;
3561 return GET_CODE (x
) == RETURN
;
3568 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
3571 /* Return true if INSN is a jump that only transfers control and
3580 if (GET_CODE (insn
) != JUMP_INSN
)
3583 set
= single_set (insn
);
3586 if (GET_CODE (SET_DEST (set
)) != PC
)
3588 if (side_effects_p (SET_SRC (set
)))
3596 /* Return 1 if X is an RTX that does nothing but set the condition codes
3597 and CLOBBER or USE registers.
3598 Return -1 if X does explicitly set the condition codes,
3599 but also does other things. */
3603 rtx x ATTRIBUTE_UNUSED
;
3605 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
3607 if (GET_CODE (x
) == PARALLEL
)
3611 int other_things
= 0;
3612 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
3614 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
3615 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
3617 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
3620 return ! sets_cc0
? 0 : other_things
? -1 : 1;
3626 /* Follow any unconditional jump at LABEL;
3627 return the ultimate label reached by any such chain of jumps.
3628 If LABEL is not followed by a jump, return LABEL.
3629 If the chain loops or we can't find end, return LABEL,
3630 since that tells caller to avoid changing the insn.
3632 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3633 a USE or CLOBBER. */
3636 follow_jumps (label
)
3641 register rtx value
= label
;
3646 && (insn
= next_active_insn (value
)) != 0
3647 && GET_CODE (insn
) == JUMP_INSN
3648 && ((JUMP_LABEL (insn
) != 0 && simplejump_p (insn
))
3649 || GET_CODE (PATTERN (insn
)) == RETURN
)
3650 && (next
= NEXT_INSN (insn
))
3651 && GET_CODE (next
) == BARRIER
);
3654 /* Don't chain through the insn that jumps into a loop
3655 from outside the loop,
3656 since that would create multiple loop entry jumps
3657 and prevent loop optimization. */
3659 if (!reload_completed
)
3660 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
3661 if (GET_CODE (tem
) == NOTE
3662 && (NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
3663 /* ??? Optional. Disables some optimizations, but makes
3664 gcov output more accurate with -O. */
3665 || (flag_test_coverage
&& NOTE_LINE_NUMBER (tem
) > 0)))
3668 /* If we have found a cycle, make the insn jump to itself. */
3669 if (JUMP_LABEL (insn
) == label
)
3672 tem
= next_active_insn (JUMP_LABEL (insn
));
3673 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
3674 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
3677 value
= JUMP_LABEL (insn
);
3684 /* Assuming that field IDX of X is a vector of label_refs,
3685 replace each of them by the ultimate label reached by it.
3686 Return nonzero if a change is made.
3687 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3690 tension_vector_labels (x
, idx
)
3696 for (i
= XVECLEN (x
, idx
) - 1; i
>= 0; i
--)
3698 register rtx olabel
= XEXP (XVECEXP (x
, idx
, i
), 0);
3699 register rtx nlabel
= follow_jumps (olabel
);
3700 if (nlabel
&& nlabel
!= olabel
)
3702 XEXP (XVECEXP (x
, idx
, i
), 0) = nlabel
;
3703 ++LABEL_NUSES (nlabel
);
3704 if (--LABEL_NUSES (olabel
) == 0)
3705 delete_insn (olabel
);
3712 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3713 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3714 in INSN, then store one of them in JUMP_LABEL (INSN).
3715 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3716 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3717 Also, when there are consecutive labels, canonicalize on the last of them.
3719 Note that two labels separated by a loop-beginning note
3720 must be kept distinct if we have not yet done loop-optimization,
3721 because the gap between them is where loop-optimize
3722 will want to move invariant code to. CROSS_JUMP tells us
3723 that loop-optimization is done with.
3725 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3726 two labels distinct if they are separated by only USE or CLOBBER insns. */
3729 mark_jump_label (x
, insn
, cross_jump
)
3734 register RTX_CODE code
= GET_CODE (x
);
3736 register const char *fmt
;
3752 /* If this is a constant-pool reference, see if it is a label. */
3753 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
3754 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
3755 mark_jump_label (get_pool_constant (XEXP (x
, 0)), insn
, cross_jump
);
3760 rtx label
= XEXP (x
, 0);
3765 if (GET_CODE (label
) != CODE_LABEL
)
3768 /* Ignore references to labels of containing functions. */
3769 if (LABEL_REF_NONLOCAL_P (x
))
3772 /* If there are other labels following this one,
3773 replace it with the last of the consecutive labels. */
3774 for (next
= NEXT_INSN (label
); next
; next
= NEXT_INSN (next
))
3776 if (GET_CODE (next
) == CODE_LABEL
)
3778 else if (cross_jump
&& GET_CODE (next
) == INSN
3779 && (GET_CODE (PATTERN (next
)) == USE
3780 || GET_CODE (PATTERN (next
)) == CLOBBER
))
3782 else if (GET_CODE (next
) != NOTE
)
3784 else if (! cross_jump
3785 && (NOTE_LINE_NUMBER (next
) == NOTE_INSN_LOOP_BEG
3786 || NOTE_LINE_NUMBER (next
) == NOTE_INSN_FUNCTION_END
3787 /* ??? Optional. Disables some optimizations, but
3788 makes gcov output more accurate with -O. */
3789 || (flag_test_coverage
&& NOTE_LINE_NUMBER (next
) > 0)))
3793 XEXP (x
, 0) = label
;
3794 if (! insn
|| ! INSN_DELETED_P (insn
))
3795 ++LABEL_NUSES (label
);
3799 if (GET_CODE (insn
) == JUMP_INSN
)
3800 JUMP_LABEL (insn
) = label
;
3802 /* If we've changed OLABEL and we had a REG_LABEL note
3803 for it, update it as well. */
3804 else if (label
!= olabel
3805 && (note
= find_reg_note (insn
, REG_LABEL
, olabel
)) != 0)
3806 XEXP (note
, 0) = label
;
3808 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3810 else if (! find_reg_note (insn
, REG_LABEL
, label
))
3812 /* This code used to ignore labels which refered to dispatch
3813 tables to avoid flow.c generating worse code.
3815 However, in the presense of global optimizations like
3816 gcse which call find_basic_blocks without calling
3817 life_analysis, not recording such labels will lead
3818 to compiler aborts because of inconsistencies in the
3819 flow graph. So we go ahead and record the label.
3821 It may also be the case that the optimization argument
3822 is no longer valid because of the more accurate cfg
3823 we build in find_basic_blocks -- it no longer pessimizes
3824 code when it finds a REG_LABEL note. */
3825 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_LABEL
, label
,
3832 /* Do walk the labels in a vector, but not the first operand of an
3833 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3836 if (! INSN_DELETED_P (insn
))
3838 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
3840 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
3841 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, cross_jump
);
3849 fmt
= GET_RTX_FORMAT (code
);
3850 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3853 mark_jump_label (XEXP (x
, i
), insn
, cross_jump
);
3854 else if (fmt
[i
] == 'E')
3857 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3858 mark_jump_label (XVECEXP (x
, i
, j
), insn
, cross_jump
);
3863 /* If all INSN does is set the pc, delete it,
3864 and delete the insn that set the condition codes for it
3865 if that's what the previous thing was. */
3871 register rtx set
= single_set (insn
);
3873 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
3874 delete_computation (insn
);
3877 /* Recursively delete prior insns that compute the value (used only by INSN
3878 which the caller is deleting) stored in the register mentioned by NOTE
3879 which is a REG_DEAD note associated with INSN. */
3882 delete_prior_computation (note
, insn
)
3887 rtx reg
= XEXP (note
, 0);
3889 for (our_prev
= prev_nonnote_insn (insn
);
3890 our_prev
&& GET_CODE (our_prev
) == INSN
;
3891 our_prev
= prev_nonnote_insn (our_prev
))
3893 rtx pat
= PATTERN (our_prev
);
3895 /* If we reach a SEQUENCE, it is too complex to try to
3896 do anything with it, so give up. */
3897 if (GET_CODE (pat
) == SEQUENCE
)
3900 if (GET_CODE (pat
) == USE
3901 && GET_CODE (XEXP (pat
, 0)) == INSN
)
3902 /* reorg creates USEs that look like this. We leave them
3903 alone because reorg needs them for its own purposes. */
3906 if (reg_set_p (reg
, pat
))
3908 if (side_effects_p (pat
))
3911 if (GET_CODE (pat
) == PARALLEL
)
3913 /* If we find a SET of something else, we can't
3918 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3920 rtx part
= XVECEXP (pat
, 0, i
);
3922 if (GET_CODE (part
) == SET
3923 && SET_DEST (part
) != reg
)
3927 if (i
== XVECLEN (pat
, 0))
3928 delete_computation (our_prev
);
3930 else if (GET_CODE (pat
) == SET
3931 && GET_CODE (SET_DEST (pat
)) == REG
)
3933 int dest_regno
= REGNO (SET_DEST (pat
));
3935 = dest_regno
+ (dest_regno
< FIRST_PSEUDO_REGISTER
3936 ? HARD_REGNO_NREGS (dest_regno
,
3937 GET_MODE (SET_DEST (pat
))) : 1);
3938 int regno
= REGNO (reg
);
3939 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
3940 ? HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) : 1);
3942 if (dest_regno
>= regno
3943 && dest_endregno
<= endregno
)
3944 delete_computation (our_prev
);
3946 /* We may have a multi-word hard register and some, but not
3947 all, of the words of the register are needed in subsequent
3948 insns. Write REG_UNUSED notes for those parts that were not
3950 else if (dest_regno
<= regno
3951 && dest_endregno
>= endregno
3952 && ! find_regno_note (our_prev
, REG_UNUSED
, REGNO(reg
)))
3956 REG_NOTES (our_prev
)
3957 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (our_prev
));
3959 for (i
= dest_regno
; i
< dest_endregno
; i
++)
3960 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
3963 if (i
== dest_endregno
)
3964 delete_computation (our_prev
);
3971 /* If PAT references the register that dies here, it is an
3972 additional use. Hence any prior SET isn't dead. However, this
3973 insn becomes the new place for the REG_DEAD note. */
3974 if (reg_overlap_mentioned_p (reg
, pat
))
3976 XEXP (note
, 1) = REG_NOTES (our_prev
);
3977 REG_NOTES (our_prev
) = note
;
3983 /* Delete INSN and recursively delete insns that compute values used only
3984 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3985 If we are running before flow.c, we need do nothing since flow.c will
3986 delete dead code. We also can't know if the registers being used are
3987 dead or not at this point.
3989 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3990 nothing other than set a register that dies in this insn, we can delete
3993 On machines with CC0, if CC0 is used in this insn, we may be able to
3994 delete the insn that set it. */
3997 delete_computation (insn
)
4004 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
4006 rtx prev
= prev_nonnote_insn (insn
);
4007 /* We assume that at this stage
4008 CC's are always set explicitly
4009 and always immediately before the jump that
4010 will use them. So if the previous insn
4011 exists to set the CC's, delete it
4012 (unless it performs auto-increments, etc.). */
4013 if (prev
&& GET_CODE (prev
) == INSN
4014 && sets_cc0_p (PATTERN (prev
)))
4016 if (sets_cc0_p (PATTERN (prev
)) > 0
4017 && ! side_effects_p (PATTERN (prev
)))
4018 delete_computation (prev
);
4020 /* Otherwise, show that cc0 won't be used. */
4021 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
4022 cc0_rtx
, REG_NOTES (prev
));
4027 #ifdef INSN_SCHEDULING
4028 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4029 reload has completed. The schedulers need to be fixed. Until
4030 they are, we must not rely on the death notes here. */
4031 if (reload_completed
&& flag_schedule_insns_after_reload
)
4038 set
= single_set (insn
);
4040 for (note
= REG_NOTES (insn
); note
; note
= next
)
4042 next
= XEXP (note
, 1);
4044 if (REG_NOTE_KIND (note
) != REG_DEAD
4045 /* Verify that the REG_NOTE is legitimate. */
4046 || GET_CODE (XEXP (note
, 0)) != REG
)
4049 if (set
&& reg_overlap_mentioned_p (SET_DEST (set
), XEXP (note
, 0)))
4052 delete_prior_computation (note
, insn
);
4055 /* The REG_DEAD note may have been omitted for a register
4056 which is both set and used by the insn. */
4058 && GET_CODE (SET_DEST (set
)) == REG
4059 && reg_mentioned_p (SET_DEST (set
), SET_SRC (set
)))
4061 note
= gen_rtx_EXPR_LIST (REG_DEAD
, SET_DEST (set
), NULL_RTX
);
4062 delete_prior_computation (note
, insn
);
4068 /* Delete insn INSN from the chain of insns and update label ref counts.
4069 May delete some following insns as a consequence; may even delete
4070 a label elsewhere and insns that follow it.
4072 Returns the first insn after INSN that was not deleted. */
4078 register rtx next
= NEXT_INSN (insn
);
4079 register rtx prev
= PREV_INSN (insn
);
4080 register int was_code_label
= (GET_CODE (insn
) == CODE_LABEL
);
4081 register int dont_really_delete
= 0;
4083 while (next
&& INSN_DELETED_P (next
))
4084 next
= NEXT_INSN (next
);
4086 /* This insn is already deleted => return first following nondeleted. */
4087 if (INSN_DELETED_P (insn
))
4091 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
4093 /* Don't delete user-declared labels. Convert them to special NOTEs
4095 if (was_code_label
&& LABEL_NAME (insn
) != 0
4096 && optimize
&& ! dont_really_delete
)
4098 PUT_CODE (insn
, NOTE
);
4099 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED_LABEL
;
4100 NOTE_SOURCE_FILE (insn
) = 0;
4101 dont_really_delete
= 1;
4104 /* Mark this insn as deleted. */
4105 INSN_DELETED_P (insn
) = 1;
4107 /* If this is an unconditional jump, delete it from the jump chain. */
4108 if (simplejump_p (insn
))
4109 delete_from_jump_chain (insn
);
4111 /* If instruction is followed by a barrier,
4112 delete the barrier too. */
4114 if (next
!= 0 && GET_CODE (next
) == BARRIER
)
4116 INSN_DELETED_P (next
) = 1;
4117 next
= NEXT_INSN (next
);
4120 /* Patch out INSN (and the barrier if any) */
4122 if (optimize
&& ! dont_really_delete
)
4126 NEXT_INSN (prev
) = next
;
4127 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4128 NEXT_INSN (XVECEXP (PATTERN (prev
), 0,
4129 XVECLEN (PATTERN (prev
), 0) - 1)) = next
;
4134 PREV_INSN (next
) = prev
;
4135 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
4136 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
4139 if (prev
&& NEXT_INSN (prev
) == 0)
4140 set_last_insn (prev
);
4143 /* If deleting a jump, decrement the count of the label,
4144 and delete the label if it is now unused. */
4146 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
4148 rtx lab
= JUMP_LABEL (insn
), lab_next
;
4150 if (--LABEL_NUSES (lab
) == 0)
4152 /* This can delete NEXT or PREV,
4153 either directly if NEXT is JUMP_LABEL (INSN),
4154 or indirectly through more levels of jumps. */
4157 /* I feel a little doubtful about this loop,
4158 but I see no clean and sure alternative way
4159 to find the first insn after INSN that is not now deleted.
4160 I hope this works. */
4161 while (next
&& INSN_DELETED_P (next
))
4162 next
= NEXT_INSN (next
);
4165 else if ((lab_next
= next_nonnote_insn (lab
)) != NULL
4166 && GET_CODE (lab_next
) == JUMP_INSN
4167 && (GET_CODE (PATTERN (lab_next
)) == ADDR_VEC
4168 || GET_CODE (PATTERN (lab_next
)) == ADDR_DIFF_VEC
))
4170 /* If we're deleting the tablejump, delete the dispatch table.
4171 We may not be able to kill the label immediately preceeding
4172 just yet, as it might be referenced in code leading up to
4174 delete_insn (lab_next
);
4178 /* Likewise if we're deleting a dispatch table. */
4180 if (GET_CODE (insn
) == JUMP_INSN
4181 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
4182 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
4184 rtx pat
= PATTERN (insn
);
4185 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
4186 int len
= XVECLEN (pat
, diff_vec_p
);
4188 for (i
= 0; i
< len
; i
++)
4189 if (--LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
4190 delete_insn (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
4191 while (next
&& INSN_DELETED_P (next
))
4192 next
= NEXT_INSN (next
);
4196 while (prev
&& (INSN_DELETED_P (prev
) || GET_CODE (prev
) == NOTE
))
4197 prev
= PREV_INSN (prev
);
4199 /* If INSN was a label and a dispatch table follows it,
4200 delete the dispatch table. The tablejump must have gone already.
4201 It isn't useful to fall through into a table. */
4204 && NEXT_INSN (insn
) != 0
4205 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
4206 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
4207 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
4208 next
= delete_insn (NEXT_INSN (insn
));
4210 /* If INSN was a label, delete insns following it if now unreachable. */
4212 if (was_code_label
&& prev
&& GET_CODE (prev
) == BARRIER
)
4214 register RTX_CODE code
;
4216 && (GET_RTX_CLASS (code
= GET_CODE (next
)) == 'i'
4217 || code
== NOTE
|| code
== BARRIER
4218 || (code
== CODE_LABEL
&& INSN_DELETED_P (next
))))
4221 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
4222 next
= NEXT_INSN (next
);
4223 /* Keep going past other deleted labels to delete what follows. */
4224 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
4225 next
= NEXT_INSN (next
);
4227 /* Note: if this deletes a jump, it can cause more
4228 deletion of unreachable code, after a different label.
4229 As long as the value from this recursive call is correct,
4230 this invocation functions correctly. */
4231 next
= delete_insn (next
);
4238 /* Advance from INSN till reaching something not deleted
4239 then return that. May return INSN itself. */
4242 next_nondeleted_insn (insn
)
4245 while (INSN_DELETED_P (insn
))
4246 insn
= NEXT_INSN (insn
);
4250 /* Delete a range of insns from FROM to TO, inclusive.
4251 This is for the sake of peephole optimization, so assume
4252 that whatever these insns do will still be done by a new
4253 peephole insn that will replace them. */
4256 delete_for_peephole (from
, to
)
4257 register rtx from
, to
;
4259 register rtx insn
= from
;
4263 register rtx next
= NEXT_INSN (insn
);
4264 register rtx prev
= PREV_INSN (insn
);
4266 if (GET_CODE (insn
) != NOTE
)
4268 INSN_DELETED_P (insn
) = 1;
4270 /* Patch this insn out of the chain. */
4271 /* We don't do this all at once, because we
4272 must preserve all NOTEs. */
4274 NEXT_INSN (prev
) = next
;
4277 PREV_INSN (next
) = prev
;
4285 /* Note that if TO is an unconditional jump
4286 we *do not* delete the BARRIER that follows,
4287 since the peephole that replaces this sequence
4288 is also an unconditional jump in that case. */
4291 /* We have determined that INSN is never reached, and are about to
4292 delete it. Print a warning if the user asked for one.
4294 To try to make this warning more useful, this should only be called
4295 once per basic block not reached, and it only warns when the basic
4296 block contains more than one line from the current function, and
4297 contains at least one operation. CSE and inlining can duplicate insns,
4298 so it's possible to get spurious warnings from this. */
4301 never_reached_warning (avoided_insn
)
4305 rtx a_line_note
= NULL
;
4306 int two_avoided_lines
= 0;
4307 int contains_insn
= 0;
4309 if (! warn_notreached
)
4312 /* Scan forwards, looking at LINE_NUMBER notes, until
4313 we hit a LABEL or we run out of insns. */
4315 for (insn
= avoided_insn
; insn
!= NULL
; insn
= NEXT_INSN (insn
))
4317 if (GET_CODE (insn
) == CODE_LABEL
)
4319 else if (GET_CODE (insn
) == NOTE
/* A line number note? */
4320 && NOTE_LINE_NUMBER (insn
) >= 0)
4322 if (a_line_note
== NULL
)
4325 two_avoided_lines
|= (NOTE_LINE_NUMBER (a_line_note
)
4326 != NOTE_LINE_NUMBER (insn
));
4328 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
4331 if (two_avoided_lines
&& contains_insn
)
4332 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note
),
4333 NOTE_LINE_NUMBER (a_line_note
),
4334 "will never be executed");
4337 /* Invert the condition of the jump JUMP, and make it jump
4338 to label NLABEL instead of where it jumps now. */
4341 invert_jump (jump
, nlabel
)
4344 /* We have to either invert the condition and change the label or
4345 do neither. Either operation could fail. We first try to invert
4346 the jump. If that succeeds, we try changing the label. If that fails,
4347 we invert the jump back to what it was. */
4349 if (! invert_exp (PATTERN (jump
), jump
))
4352 if (redirect_jump (jump
, nlabel
))
4354 if (flag_branch_probabilities
)
4356 rtx note
= find_reg_note (jump
, REG_BR_PROB
, 0);
4358 /* An inverted jump means that a probability taken becomes a
4359 probability not taken. Subtract the branch probability from the
4360 probability base to convert it back to a taken probability.
4361 (We don't flip the probability on a branch that's never taken. */
4362 if (note
&& XINT (XEXP (note
, 0), 0) >= 0)
4363 XINT (XEXP (note
, 0), 0) = REG_BR_PROB_BASE
- XINT (XEXP (note
, 0), 0);
4369 if (! invert_exp (PATTERN (jump
), jump
))
4370 /* This should just be putting it back the way it was. */
4376 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4378 Return 1 if we can do so, 0 if we cannot find a way to do so that
4379 matches a pattern. */
4382 invert_exp (x
, insn
)
4386 register RTX_CODE code
;
4388 register const char *fmt
;
4390 code
= GET_CODE (x
);
4392 if (code
== IF_THEN_ELSE
)
4394 register rtx comp
= XEXP (x
, 0);
4397 /* We can do this in two ways: The preferable way, which can only
4398 be done if this is not an integer comparison, is to reverse
4399 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4400 of the IF_THEN_ELSE. If we can't do either, fail. */
4402 if (can_reverse_comparison_p (comp
, insn
)
4403 && validate_change (insn
, &XEXP (x
, 0),
4404 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp
)),
4405 GET_MODE (comp
), XEXP (comp
, 0),
4406 XEXP (comp
, 1)), 0))
4410 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
4411 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
4412 return apply_change_group ();
4415 fmt
= GET_RTX_FORMAT (code
);
4416 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4419 if (! invert_exp (XEXP (x
, i
), insn
))
4424 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4425 if (!invert_exp (XVECEXP (x
, i
, j
), insn
))
4433 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4434 If the old jump target label is unused as a result,
4435 it and the code following it may be deleted.
4437 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4440 The return value will be 1 if the change was made, 0 if it wasn't (this
4441 can only occur for NLABEL == 0). */
4444 redirect_jump (jump
, nlabel
)
4447 register rtx olabel
= JUMP_LABEL (jump
);
4449 if (nlabel
== olabel
)
4452 if (! redirect_exp (&PATTERN (jump
), olabel
, nlabel
, jump
))
4455 /* If this is an unconditional branch, delete it from the jump_chain of
4456 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4457 have UID's in range and JUMP_CHAIN is valid). */
4458 if (jump_chain
&& (simplejump_p (jump
)
4459 || GET_CODE (PATTERN (jump
)) == RETURN
))
4461 int label_index
= nlabel
? INSN_UID (nlabel
) : 0;
4463 delete_from_jump_chain (jump
);
4464 if (label_index
< max_jump_chain
4465 && INSN_UID (jump
) < max_jump_chain
)
4467 jump_chain
[INSN_UID (jump
)] = jump_chain
[label_index
];
4468 jump_chain
[label_index
] = jump
;
4472 JUMP_LABEL (jump
) = nlabel
;
4474 ++LABEL_NUSES (nlabel
);
4476 if (olabel
&& --LABEL_NUSES (olabel
) == 0)
4477 delete_insn (olabel
);
4482 /* Delete the instruction JUMP from any jump chain it might be on. */
4485 delete_from_jump_chain (jump
)
4489 rtx olabel
= JUMP_LABEL (jump
);
4491 /* Handle unconditional jumps. */
4492 if (jump_chain
&& olabel
!= 0
4493 && INSN_UID (olabel
) < max_jump_chain
4494 && simplejump_p (jump
))
4495 index
= INSN_UID (olabel
);
4496 /* Handle return insns. */
4497 else if (jump_chain
&& GET_CODE (PATTERN (jump
)) == RETURN
)
4501 if (jump_chain
[index
] == jump
)
4502 jump_chain
[index
] = jump_chain
[INSN_UID (jump
)];
4507 for (insn
= jump_chain
[index
];
4509 insn
= jump_chain
[INSN_UID (insn
)])
4510 if (jump_chain
[INSN_UID (insn
)] == jump
)
4512 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (jump
)];
4518 /* If NLABEL is nonzero, throughout the rtx at LOC,
4519 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4520 zero, alter (RETURN) to (LABEL_REF NLABEL).
4522 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4523 validity with validate_change. Convert (set (pc) (label_ref olabel))
4526 Return 0 if we found a change we would like to make but it is invalid.
4527 Otherwise, return 1. */
4530 redirect_exp (loc
, olabel
, nlabel
, insn
)
4535 register rtx x
= *loc
;
4536 register RTX_CODE code
= GET_CODE (x
);
4538 register const char *fmt
;
4540 if (code
== LABEL_REF
)
4542 if (XEXP (x
, 0) == olabel
)
4545 XEXP (x
, 0) = nlabel
;
4547 return validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 0);
4551 else if (code
== RETURN
&& olabel
== 0)
4553 x
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
4554 if (loc
== &PATTERN (insn
))
4555 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
4556 return validate_change (insn
, loc
, x
, 0);
4559 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
4560 && GET_CODE (SET_SRC (x
)) == LABEL_REF
4561 && XEXP (SET_SRC (x
), 0) == olabel
)
4562 return validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 0);
4564 fmt
= GET_RTX_FORMAT (code
);
4565 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4568 if (! redirect_exp (&XEXP (x
, i
), olabel
, nlabel
, insn
))
4573 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4574 if (! redirect_exp (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
))
4582 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4584 If the old jump target label (before the dispatch table) becomes unused,
4585 it and the dispatch table may be deleted. In that case, find the insn
4586 before the jump references that label and delete it and logical successors
4590 redirect_tablejump (jump
, nlabel
)
4593 register rtx olabel
= JUMP_LABEL (jump
);
4595 /* Add this jump to the jump_chain of NLABEL. */
4596 if (jump_chain
&& INSN_UID (nlabel
) < max_jump_chain
4597 && INSN_UID (jump
) < max_jump_chain
)
4599 jump_chain
[INSN_UID (jump
)] = jump_chain
[INSN_UID (nlabel
)];
4600 jump_chain
[INSN_UID (nlabel
)] = jump
;
4603 PATTERN (jump
) = gen_jump (nlabel
);
4604 JUMP_LABEL (jump
) = nlabel
;
4605 ++LABEL_NUSES (nlabel
);
4606 INSN_CODE (jump
) = -1;
4608 if (--LABEL_NUSES (olabel
) == 0)
4610 delete_labelref_insn (jump
, olabel
, 0);
4611 delete_insn (olabel
);
4615 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4616 If we found one, delete it and then delete this insn if DELETE_THIS is
4617 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4620 delete_labelref_insn (insn
, label
, delete_this
)
4627 if (GET_CODE (insn
) != NOTE
4628 && reg_mentioned_p (label
, PATTERN (insn
)))
4639 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
4640 if (delete_labelref_insn (XEXP (link
, 0), label
, 1))
4654 /* Like rtx_equal_p except that it considers two REGs as equal
4655 if they renumber to the same value and considers two commutative
4656 operations to be the same if the order of the operands has been
4659 ??? Addition is not commutative on the PA due to the weird implicit
4660 space register selection rules for memory addresses. Therefore, we
4661 don't consider a + b == b + a.
4663 We could/should make this test a little tighter. Possibly only
4664 disabling it on the PA via some backend macro or only disabling this
4665 case when the PLUS is inside a MEM. */
4668 rtx_renumbered_equal_p (x
, y
)
4672 register RTX_CODE code
= GET_CODE (x
);
4673 register const char *fmt
;
4678 if ((code
== REG
|| (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
))
4679 && (GET_CODE (y
) == REG
|| (GET_CODE (y
) == SUBREG
4680 && GET_CODE (SUBREG_REG (y
)) == REG
)))
4682 int reg_x
= -1, reg_y
= -1;
4683 int word_x
= 0, word_y
= 0;
4685 if (GET_MODE (x
) != GET_MODE (y
))
4688 /* If we haven't done any renumbering, don't
4689 make any assumptions. */
4690 if (reg_renumber
== 0)
4691 return rtx_equal_p (x
, y
);
4695 reg_x
= REGNO (SUBREG_REG (x
));
4696 word_x
= SUBREG_WORD (x
);
4698 if (reg_renumber
[reg_x
] >= 0)
4700 reg_x
= reg_renumber
[reg_x
] + word_x
;
4708 if (reg_renumber
[reg_x
] >= 0)
4709 reg_x
= reg_renumber
[reg_x
];
4712 if (GET_CODE (y
) == SUBREG
)
4714 reg_y
= REGNO (SUBREG_REG (y
));
4715 word_y
= SUBREG_WORD (y
);
4717 if (reg_renumber
[reg_y
] >= 0)
4719 reg_y
= reg_renumber
[reg_y
];
4727 if (reg_renumber
[reg_y
] >= 0)
4728 reg_y
= reg_renumber
[reg_y
];
4731 return reg_x
>= 0 && reg_x
== reg_y
&& word_x
== word_y
;
4734 /* Now we have disposed of all the cases
4735 in which different rtx codes can match. */
4736 if (code
!= GET_CODE (y
))
4748 return INTVAL (x
) == INTVAL (y
);
4751 /* We can't assume nonlocal labels have their following insns yet. */
4752 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
4753 return XEXP (x
, 0) == XEXP (y
, 0);
4755 /* Two label-refs are equivalent if they point at labels
4756 in the same position in the instruction stream. */
4757 return (next_real_insn (XEXP (x
, 0))
4758 == next_real_insn (XEXP (y
, 0)));
4761 return XSTR (x
, 0) == XSTR (y
, 0);
4764 /* If we didn't match EQ equality above, they aren't the same. */
4771 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4773 if (GET_MODE (x
) != GET_MODE (y
))
4776 /* For commutative operations, the RTX match if the operand match in any
4777 order. Also handle the simple binary and unary cases without a loop.
4779 ??? Don't consider PLUS a commutative operator; see comments above. */
4780 if ((code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
4782 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
4783 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
4784 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
4785 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
4786 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
4787 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
4788 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
4789 else if (GET_RTX_CLASS (code
) == '1')
4790 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
4792 /* Compare the elements. If any pair of corresponding elements
4793 fail to match, return 0 for the whole things. */
4795 fmt
= GET_RTX_FORMAT (code
);
4796 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4802 if (XWINT (x
, i
) != XWINT (y
, i
))
4807 if (XINT (x
, i
) != XINT (y
, i
))
4812 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
4817 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
4822 if (XEXP (x
, i
) != XEXP (y
, i
))
4829 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
4831 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4832 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
4843 /* If X is a hard register or equivalent to one or a subregister of one,
4844 return the hard register number. If X is a pseudo register that was not
4845 assigned a hard register, return the pseudo register number. Otherwise,
4846 return -1. Any rtx is valid for X. */
4852 if (GET_CODE (x
) == REG
)
4854 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
4855 return reg_renumber
[REGNO (x
)];
4858 if (GET_CODE (x
) == SUBREG
)
4860 int base
= true_regnum (SUBREG_REG (x
));
4861 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
4862 return SUBREG_WORD (x
) + base
;
4867 /* Optimize code of the form:
4869 for (x = a[i]; x; ...)
4871 for (x = a[i]; x; ...)
4875 Loop optimize will change the above code into
4879 { ...; if (! (x = ...)) break; }
4882 { ...; if (! (x = ...)) break; }
4885 In general, if the first test fails, the program can branch
4886 directly to `foo' and skip the second try which is doomed to fail.
4887 We run this after loop optimization and before flow analysis. */
4889 /* When comparing the insn patterns, we track the fact that different
4890 pseudo-register numbers may have been used in each computation.
4891 The following array stores an equivalence -- same_regs[I] == J means
4892 that pseudo register I was used in the first set of tests in a context
4893 where J was used in the second set. We also count the number of such
4894 pending equivalences. If nonzero, the expressions really aren't the
4897 static int *same_regs
;
4899 static int num_same_regs
;
4901 /* Track any registers modified between the target of the first jump and
4902 the second jump. They never compare equal. */
4904 static char *modified_regs
;
4906 /* Record if memory was modified. */
4908 static int modified_mem
;
4910 /* Called via note_stores on each insn between the target of the first
4911 branch and the second branch. It marks any changed registers. */
4914 mark_modified_reg (dest
, x
)
4916 rtx x ATTRIBUTE_UNUSED
;
4920 if (GET_CODE (dest
) == SUBREG
)
4921 dest
= SUBREG_REG (dest
);
4923 if (GET_CODE (dest
) == MEM
)
4926 if (GET_CODE (dest
) != REG
)
4929 regno
= REGNO (dest
);
4930 if (regno
>= FIRST_PSEUDO_REGISTER
)
4931 modified_regs
[regno
] = 1;
4933 for (i
= 0; i
< HARD_REGNO_NREGS (regno
, GET_MODE (dest
)); i
++)
4934 modified_regs
[regno
+ i
] = 1;
4937 /* F is the first insn in the chain of insns. */
4940 thread_jumps (f
, max_reg
, flag_before_loop
)
4943 int flag_before_loop
;
4945 /* Basic algorithm is to find a conditional branch,
4946 the label it may branch to, and the branch after
4947 that label. If the two branches test the same condition,
4948 walk back from both branch paths until the insn patterns
4949 differ, or code labels are hit. If we make it back to
4950 the target of the first branch, then we know that the first branch
4951 will either always succeed or always fail depending on the relative
4952 senses of the two branches. So adjust the first branch accordingly
4955 rtx label
, b1
, b2
, t1
, t2
;
4956 enum rtx_code code1
, code2
;
4957 rtx b1op0
, b1op1
, b2op0
, b2op1
;
4962 /* Allocate register tables and quick-reset table. */
4963 modified_regs
= (char *) alloca (max_reg
* sizeof (char));
4964 same_regs
= (int *) alloca (max_reg
* sizeof (int));
4965 all_reset
= (int *) alloca (max_reg
* sizeof (int));
4966 for (i
= 0; i
< max_reg
; i
++)
4973 for (b1
= f
; b1
; b1
= NEXT_INSN (b1
))
4975 /* Get to a candidate branch insn. */
4976 if (GET_CODE (b1
) != JUMP_INSN
4977 || ! condjump_p (b1
) || simplejump_p (b1
)
4978 || JUMP_LABEL (b1
) == 0)
4981 bzero (modified_regs
, max_reg
* sizeof (char));
4984 bcopy ((char *) all_reset
, (char *) same_regs
,
4985 max_reg
* sizeof (int));
4988 label
= JUMP_LABEL (b1
);
4990 /* Look for a branch after the target. Record any registers and
4991 memory modified between the target and the branch. Stop when we
4992 get to a label since we can't know what was changed there. */
4993 for (b2
= NEXT_INSN (label
); b2
; b2
= NEXT_INSN (b2
))
4995 if (GET_CODE (b2
) == CODE_LABEL
)
4998 else if (GET_CODE (b2
) == JUMP_INSN
)
5000 /* If this is an unconditional jump and is the only use of
5001 its target label, we can follow it. */
5002 if (simplejump_p (b2
)
5003 && JUMP_LABEL (b2
) != 0
5004 && LABEL_NUSES (JUMP_LABEL (b2
)) == 1)
5006 b2
= JUMP_LABEL (b2
);
5013 if (GET_CODE (b2
) != CALL_INSN
&& GET_CODE (b2
) != INSN
)
5016 if (GET_CODE (b2
) == CALL_INSN
)
5019 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5020 if (call_used_regs
[i
] && ! fixed_regs
[i
]
5021 && i
!= STACK_POINTER_REGNUM
5022 && i
!= FRAME_POINTER_REGNUM
5023 && i
!= HARD_FRAME_POINTER_REGNUM
5024 && i
!= ARG_POINTER_REGNUM
)
5025 modified_regs
[i
] = 1;
5028 note_stores (PATTERN (b2
), mark_modified_reg
);
5031 /* Check the next candidate branch insn from the label
5034 || GET_CODE (b2
) != JUMP_INSN
5036 || ! condjump_p (b2
)
5037 || simplejump_p (b2
))
5040 /* Get the comparison codes and operands, reversing the
5041 codes if appropriate. If we don't have comparison codes,
5042 we can't do anything. */
5043 b1op0
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 0);
5044 b1op1
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 1);
5045 code1
= GET_CODE (XEXP (SET_SRC (PATTERN (b1
)), 0));
5046 if (XEXP (SET_SRC (PATTERN (b1
)), 1) == pc_rtx
)
5047 code1
= reverse_condition (code1
);
5049 b2op0
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 0);
5050 b2op1
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 1);
5051 code2
= GET_CODE (XEXP (SET_SRC (PATTERN (b2
)), 0));
5052 if (XEXP (SET_SRC (PATTERN (b2
)), 1) == pc_rtx
)
5053 code2
= reverse_condition (code2
);
5055 /* If they test the same things and knowing that B1 branches
5056 tells us whether or not B2 branches, check if we
5057 can thread the branch. */
5058 if (rtx_equal_for_thread_p (b1op0
, b2op0
, b2
)
5059 && rtx_equal_for_thread_p (b1op1
, b2op1
, b2
)
5060 && (comparison_dominates_p (code1
, code2
)
5061 || (comparison_dominates_p (code1
, reverse_condition (code2
))
5062 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1
)),
5066 t1
= prev_nonnote_insn (b1
);
5067 t2
= prev_nonnote_insn (b2
);
5069 while (t1
!= 0 && t2
!= 0)
5073 /* We have reached the target of the first branch.
5074 If there are no pending register equivalents,
5075 we know that this branch will either always
5076 succeed (if the senses of the two branches are
5077 the same) or always fail (if not). */
5080 if (num_same_regs
!= 0)
5083 if (comparison_dominates_p (code1
, code2
))
5084 new_label
= JUMP_LABEL (b2
);
5086 new_label
= get_label_after (b2
);
5088 if (JUMP_LABEL (b1
) != new_label
)
5090 rtx prev
= PREV_INSN (new_label
);
5092 if (flag_before_loop
5093 && GET_CODE (prev
) == NOTE
5094 && NOTE_LINE_NUMBER (prev
) == NOTE_INSN_LOOP_BEG
)
5096 /* Don't thread to the loop label. If a loop
5097 label is reused, loop optimization will
5098 be disabled for that loop. */
5099 new_label
= gen_label_rtx ();
5100 emit_label_after (new_label
, PREV_INSN (prev
));
5102 changed
|= redirect_jump (b1
, new_label
);
5107 /* If either of these is not a normal insn (it might be
5108 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5109 have already been skipped above.) Similarly, fail
5110 if the insns are different. */
5111 if (GET_CODE (t1
) != INSN
|| GET_CODE (t2
) != INSN
5112 || recog_memoized (t1
) != recog_memoized (t2
)
5113 || ! rtx_equal_for_thread_p (PATTERN (t1
),
5117 t1
= prev_nonnote_insn (t1
);
5118 t2
= prev_nonnote_insn (t2
);
5125 /* This is like RTX_EQUAL_P except that it knows about our handling of
5126 possibly equivalent registers and knows to consider volatile and
5127 modified objects as not equal.
5129 YINSN is the insn containing Y. */
5132 rtx_equal_for_thread_p (x
, y
, yinsn
)
5138 register enum rtx_code code
;
5139 register const char *fmt
;
5141 code
= GET_CODE (x
);
5142 /* Rtx's of different codes cannot be equal. */
5143 if (code
!= GET_CODE (y
))
5146 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5147 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5149 if (GET_MODE (x
) != GET_MODE (y
))
5152 /* For floating-point, consider everything unequal. This is a bit
5153 pessimistic, but this pass would only rarely do anything for FP
5155 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
5156 && FLOAT_MODE_P (GET_MODE (x
)) && ! flag_fast_math
)
5159 /* For commutative operations, the RTX match if the operand match in any
5160 order. Also handle the simple binary and unary cases without a loop. */
5161 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
5162 return ((rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
5163 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
))
5164 || (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 1), yinsn
)
5165 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 0), yinsn
)));
5166 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
5167 return (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
5168 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
));
5169 else if (GET_RTX_CLASS (code
) == '1')
5170 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
5172 /* Handle special-cases first. */
5176 if (REGNO (x
) == REGNO (y
) && ! modified_regs
[REGNO (x
)])
5179 /* If neither is user variable or hard register, check for possible
5181 if (REG_USERVAR_P (x
) || REG_USERVAR_P (y
)
5182 || REGNO (x
) < FIRST_PSEUDO_REGISTER
5183 || REGNO (y
) < FIRST_PSEUDO_REGISTER
)
5186 if (same_regs
[REGNO (x
)] == -1)
5188 same_regs
[REGNO (x
)] = REGNO (y
);
5191 /* If this is the first time we are seeing a register on the `Y'
5192 side, see if it is the last use. If not, we can't thread the
5193 jump, so mark it as not equivalent. */
5194 if (REGNO_LAST_UID (REGNO (y
)) != INSN_UID (yinsn
))
5200 return (same_regs
[REGNO (x
)] == REGNO (y
));
5205 /* If memory modified or either volatile, not equivalent.
5206 Else, check address. */
5207 if (modified_mem
|| MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
5210 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
5213 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
5219 /* Cancel a pending `same_regs' if setting equivalenced registers.
5220 Then process source. */
5221 if (GET_CODE (SET_DEST (x
)) == REG
5222 && GET_CODE (SET_DEST (y
)) == REG
)
5224 if (same_regs
[REGNO (SET_DEST (x
))] == REGNO (SET_DEST (y
)))
5226 same_regs
[REGNO (SET_DEST (x
))] = -1;
5229 else if (REGNO (SET_DEST (x
)) != REGNO (SET_DEST (y
)))
5233 if (rtx_equal_for_thread_p (SET_DEST (x
), SET_DEST (y
), yinsn
) == 0)
5236 return rtx_equal_for_thread_p (SET_SRC (x
), SET_SRC (y
), yinsn
);
5239 return XEXP (x
, 0) == XEXP (y
, 0);
5242 return XSTR (x
, 0) == XSTR (y
, 0);
5251 fmt
= GET_RTX_FORMAT (code
);
5252 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5257 if (XWINT (x
, i
) != XWINT (y
, i
))
5263 if (XINT (x
, i
) != XINT (y
, i
))
5269 /* Two vectors must have the same length. */
5270 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
5273 /* And the corresponding elements must match. */
5274 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5275 if (rtx_equal_for_thread_p (XVECEXP (x
, i
, j
),
5276 XVECEXP (y
, i
, j
), yinsn
) == 0)
5281 if (rtx_equal_for_thread_p (XEXP (x
, i
), XEXP (y
, i
), yinsn
) == 0)
5287 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
5292 /* These are just backpointers, so they don't matter. */
5299 /* It is believed that rtx's at this level will never
5300 contain anything but integers and other rtx's,
5301 except for within LABEL_REFs and SYMBOL_REFs. */
5311 /* Return the insn that NEW can be safely inserted in front of starting at
5312 the jump insn INSN. Return 0 if it is not safe to do this jump
5313 optimization. Note that NEW must contain a single set. */
5316 find_insert_position (insn
, new)
5323 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5324 if (GET_CODE (PATTERN (new)) != PARALLEL
)
5327 for (i
= XVECLEN (PATTERN (new), 0) - 1; i
>= 0; i
--)
5328 if (GET_CODE (XVECEXP (PATTERN (new), 0, i
)) == CLOBBER
5329 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i
), 0),
5336 /* There is a good chance that the previous insn PREV sets the thing
5337 being clobbered (often the CC in a hard reg). If PREV does not
5338 use what NEW sets, we can insert NEW before PREV. */
5340 prev
= prev_active_insn (insn
);
5341 for (i
= XVECLEN (PATTERN (new), 0) - 1; i
>= 0; i
--)
5342 if (GET_CODE (XVECEXP (PATTERN (new), 0, i
)) == CLOBBER
5343 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i
), 0),
5345 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i
), 0),
5349 return reg_mentioned_p (SET_DEST (single_set (new)), prev
) ? 0 : prev
;
5351 #endif /* !HAVE_cc0 */