jump.c (onlyjump_p): New function.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "hard-reg-set.h"
59 #include "regs.h"
60 #include "insn-config.h"
61 #include "insn-flags.h"
62 #include "insn-attr.h"
63 #include "recog.h"
64 #include "expr.h"
65 #include "real.h"
66 #include "except.h"
67 #include "toplev.h"
68
69 /* ??? Eventually must record somehow the labels used by jumps
70 from nested functions. */
71 /* Pre-record the next or previous real insn for each label?
72 No, this pass is very fast anyway. */
73 /* Condense consecutive labels?
74 This would make life analysis faster, maybe. */
75 /* Optimize jump y; x: ... y: jumpif... x?
76 Don't know if it is worth bothering with. */
77 /* Optimize two cases of conditional jump to conditional jump?
78 This can never delete any instruction or make anything dead,
79 or even change what is live at any point.
80 So perhaps let combiner do it. */
81
82 /* Vector indexed by uid.
83 For each CODE_LABEL, index by its uid to get first unconditional jump
84 that jumps to the label.
85 For each JUMP_INSN, index by its uid to get the next unconditional jump
86 that jumps to the same label.
87 Element 0 is the start of a chain of all return insns.
88 (It is safe to use element 0 because insn uid 0 is not used. */
89
90 static rtx *jump_chain;
91
92 /* List of labels referred to from initializers.
93 These can never be deleted. */
94 rtx forced_labels;
95
96 /* Maximum index in jump_chain. */
97
98 static int max_jump_chain;
99
100 /* Set nonzero by jump_optimize if control can fall through
101 to the end of the function. */
102 int can_reach_end;
103
104 /* Indicates whether death notes are significant in cross jump analysis.
105 Normally they are not significant, because of A and B jump to C,
106 and R dies in A, it must die in B. But this might not be true after
107 stack register conversion, and we must compare death notes in that
108 case. */
109
110 static int cross_jump_death_matters = 0;
111
112 static int init_label_info PROTO((rtx));
113 static void delete_barrier_successors PROTO((rtx));
114 static void mark_all_labels PROTO((rtx, int));
115 static rtx delete_unreferenced_labels PROTO((rtx));
116 static void delete_noop_moves PROTO((rtx));
117 static int calculate_can_reach_end PROTO((rtx, int, int));
118 static int duplicate_loop_exit_test PROTO((rtx));
119 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
120 static void do_cross_jump PROTO((rtx, rtx, rtx));
121 static int jump_back_p PROTO((rtx, rtx));
122 static int tension_vector_labels PROTO((rtx, int));
123 static void mark_jump_label PROTO((rtx, rtx, int));
124 static void delete_computation PROTO((rtx));
125 static void delete_from_jump_chain PROTO((rtx));
126 static int delete_labelref_insn PROTO((rtx, rtx, int));
127 static void mark_modified_reg PROTO((rtx, rtx));
128 static void redirect_tablejump PROTO((rtx, rtx));
129 static void jump_optimize_1 PROTO ((rtx, int, int, int, int));
130 #ifndef HAVE_cc0
131 static rtx find_insert_position PROTO((rtx, rtx));
132 #endif
133
134 /* Main external entry point into the jump optimizer. See comments before
135 jump_optimize_1 for descriptions of the arguments. */
136 void
137 jump_optimize (f, cross_jump, noop_moves, after_regscan)
138 rtx f;
139 int cross_jump;
140 int noop_moves;
141 int after_regscan;
142 {
143 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
144 }
145
146 /* Alternate entry into the jump optimizer. This entry point only rebuilds
147 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
148 instructions. */
149 void
150 rebuild_jump_labels (f)
151 rtx f;
152 {
153 jump_optimize_1 (f, 0, 0, 0, 1);
154 }
155
156 \f
157 /* Delete no-op jumps and optimize jumps to jumps
158 and jumps around jumps.
159 Delete unused labels and unreachable code.
160
161 If CROSS_JUMP is 1, detect matching code
162 before a jump and its destination and unify them.
163 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
164
165 If NOOP_MOVES is nonzero, delete no-op move insns.
166
167 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
168 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
169
170 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
171 and JUMP_LABEL field for jumping insns.
172
173 If `optimize' is zero, don't change any code,
174 just determine whether control drops off the end of the function.
175 This case occurs when we have -W and not -O.
176 It works because `delete_insn' checks the value of `optimize'
177 and refrains from actually deleting when that is 0. */
178
179 static void
180 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
181 rtx f;
182 int cross_jump;
183 int noop_moves;
184 int after_regscan;
185 int mark_labels_only;
186 {
187 register rtx insn, next;
188 int changed;
189 int old_max_reg;
190 int first = 1;
191 int max_uid = 0;
192 rtx last_insn;
193
194 cross_jump_death_matters = (cross_jump == 2);
195 max_uid = init_label_info (f) + 1;
196
197 /* If we are performing cross jump optimizations, then initialize
198 tables mapping UIDs to EH regions to avoid incorrect movement
199 of insns from one EH region to another. */
200 if (flag_exceptions && cross_jump)
201 init_insn_eh_region (f, max_uid);
202
203 delete_barrier_successors (f);
204
205 /* Leave some extra room for labels and duplicate exit test insns
206 we make. */
207 max_jump_chain = max_uid * 14 / 10;
208 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
209 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
210
211 mark_all_labels (f, cross_jump);
212
213 /* Keep track of labels used from static data;
214 they cannot ever be deleted. */
215
216 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
217 LABEL_NUSES (XEXP (insn, 0))++;
218
219 check_exception_handler_labels ();
220
221 /* Keep track of labels used for marking handlers for exception
222 regions; they cannot usually be deleted. */
223
224 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
225 LABEL_NUSES (XEXP (insn, 0))++;
226
227 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
228 notes and recompute LABEL_NUSES. */
229 if (mark_labels_only)
230 return;
231
232 exception_optimize ();
233
234 last_insn = delete_unreferenced_labels (f);
235
236 if (!optimize)
237 {
238 /* CAN_REACH_END is persistent for each function. Once set it should
239 not be cleared. This is especially true for the case where we
240 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
241 the front-end before compiling each function. */
242 if (calculate_can_reach_end (last_insn, 1, 0))
243 can_reach_end = 1;
244
245 /* Zero the "deleted" flag of all the "deleted" insns. */
246 for (insn = f; insn; insn = NEXT_INSN (insn))
247 INSN_DELETED_P (insn) = 0;
248
249 /* Show that the jump chain is not valid. */
250 jump_chain = 0;
251 return;
252 }
253
254 #ifdef HAVE_return
255 if (HAVE_return)
256 {
257 /* If we fall through to the epilogue, see if we can insert a RETURN insn
258 in front of it. If the machine allows it at this point (we might be
259 after reload for a leaf routine), it will improve optimization for it
260 to be there. */
261 insn = get_last_insn ();
262 while (insn && GET_CODE (insn) == NOTE)
263 insn = PREV_INSN (insn);
264
265 if (insn && GET_CODE (insn) != BARRIER)
266 {
267 emit_jump_insn (gen_return ());
268 emit_barrier ();
269 }
270 }
271 #endif
272
273 if (noop_moves)
274 delete_noop_moves (f);
275
276 /* If we haven't yet gotten to reload and we have just run regscan,
277 delete any insn that sets a register that isn't used elsewhere.
278 This helps some of the optimizations below by having less insns
279 being jumped around. */
280
281 if (! reload_completed && after_regscan)
282 for (insn = f; insn; insn = next)
283 {
284 rtx set = single_set (insn);
285
286 next = NEXT_INSN (insn);
287
288 if (set && GET_CODE (SET_DEST (set)) == REG
289 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
290 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
291 /* We use regno_last_note_uid so as not to delete the setting
292 of a reg that's used in notes. A subsequent optimization
293 might arrange to use that reg for real. */
294 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
295 && ! side_effects_p (SET_SRC (set))
296 && ! find_reg_note (insn, REG_RETVAL, 0))
297 delete_insn (insn);
298 }
299
300 /* Now iterate optimizing jumps until nothing changes over one pass. */
301 changed = 1;
302 old_max_reg = max_reg_num ();
303 while (changed)
304 {
305 changed = 0;
306
307 for (insn = f; insn; insn = next)
308 {
309 rtx reallabelprev;
310 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
311 rtx nlabel;
312 int this_is_simplejump, this_is_condjump, reversep = 0;
313 int this_is_condjump_in_parallel;
314
315 #if 0
316 /* If NOT the first iteration, if this is the last jump pass
317 (just before final), do the special peephole optimizations.
318 Avoiding the first iteration gives ordinary jump opts
319 a chance to work before peephole opts. */
320
321 if (reload_completed && !first && !flag_no_peephole)
322 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
323 peephole (insn);
324 #endif
325
326 /* That could have deleted some insns after INSN, so check now
327 what the following insn is. */
328
329 next = NEXT_INSN (insn);
330
331 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
332 jump. Try to optimize by duplicating the loop exit test if so.
333 This is only safe immediately after regscan, because it uses
334 the values of regno_first_uid and regno_last_uid. */
335 if (after_regscan && GET_CODE (insn) == NOTE
336 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
337 && (temp1 = next_nonnote_insn (insn)) != 0
338 && simplejump_p (temp1))
339 {
340 temp = PREV_INSN (insn);
341 if (duplicate_loop_exit_test (insn))
342 {
343 changed = 1;
344 next = NEXT_INSN (temp);
345 continue;
346 }
347 }
348
349 if (GET_CODE (insn) != JUMP_INSN)
350 continue;
351
352 this_is_simplejump = simplejump_p (insn);
353 this_is_condjump = condjump_p (insn);
354 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
355
356 /* Tension the labels in dispatch tables. */
357
358 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
359 changed |= tension_vector_labels (PATTERN (insn), 0);
360 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
361 changed |= tension_vector_labels (PATTERN (insn), 1);
362
363 /* If a dispatch table always goes to the same place,
364 get rid of it and replace the insn that uses it. */
365
366 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
367 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
368 {
369 int i;
370 rtx pat = PATTERN (insn);
371 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
372 int len = XVECLEN (pat, diff_vec_p);
373 rtx dispatch = prev_real_insn (insn);
374
375 for (i = 0; i < len; i++)
376 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
377 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
378 break;
379 if (i == len
380 && dispatch != 0
381 && GET_CODE (dispatch) == JUMP_INSN
382 && JUMP_LABEL (dispatch) != 0
383 /* Don't mess with a casesi insn. */
384 && !(GET_CODE (PATTERN (dispatch)) == SET
385 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
386 == IF_THEN_ELSE))
387 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
388 {
389 redirect_tablejump (dispatch,
390 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
391 changed = 1;
392 }
393 }
394
395 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
396
397 /* If a jump references the end of the function, try to turn
398 it into a RETURN insn, possibly a conditional one. */
399 if (JUMP_LABEL (insn)
400 && (next_active_insn (JUMP_LABEL (insn)) == 0
401 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
402 == RETURN))
403 changed |= redirect_jump (insn, NULL_RTX);
404
405 /* Detect jump to following insn. */
406 if (reallabelprev == insn && condjump_p (insn))
407 {
408 next = next_real_insn (JUMP_LABEL (insn));
409 delete_jump (insn);
410 changed = 1;
411 continue;
412 }
413
414 /* If we have an unconditional jump preceded by a USE, try to put
415 the USE before the target and jump there. This simplifies many
416 of the optimizations below since we don't have to worry about
417 dealing with these USE insns. We only do this if the label
418 being branch to already has the identical USE or if code
419 never falls through to that label. */
420
421 if (this_is_simplejump
422 && (temp = prev_nonnote_insn (insn)) != 0
423 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
424 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
425 && (GET_CODE (temp1) == BARRIER
426 || (GET_CODE (temp1) == INSN
427 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
428 /* Don't do this optimization if we have a loop containing only
429 the USE instruction, and the loop start label has a usage
430 count of 1. This is because we will redo this optimization
431 everytime through the outer loop, and jump opt will never
432 exit. */
433 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
434 && temp2 == JUMP_LABEL (insn)
435 && LABEL_NUSES (temp2) == 1))
436 {
437 if (GET_CODE (temp1) == BARRIER)
438 {
439 emit_insn_after (PATTERN (temp), temp1);
440 temp1 = NEXT_INSN (temp1);
441 }
442
443 delete_insn (temp);
444 redirect_jump (insn, get_label_before (temp1));
445 reallabelprev = prev_real_insn (temp1);
446 changed = 1;
447 }
448
449 /* Simplify if (...) x = a; else x = b; by converting it
450 to x = b; if (...) x = a;
451 if B is sufficiently simple, the test doesn't involve X,
452 and nothing in the test modifies B or X.
453
454 If we have small register classes, we also can't do this if X
455 is a hard register.
456
457 If the "x = b;" insn has any REG_NOTES, we don't do this because
458 of the possibility that we are running after CSE and there is a
459 REG_EQUAL note that is only valid if the branch has already been
460 taken. If we move the insn with the REG_EQUAL note, we may
461 fold the comparison to always be false in a later CSE pass.
462 (We could also delete the REG_NOTES when moving the insn, but it
463 seems simpler to not move it.) An exception is that we can move
464 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
465 value is the same as "b".
466
467 INSN is the branch over the `else' part.
468
469 We set:
470
471 TEMP to the jump insn preceding "x = a;"
472 TEMP1 to X
473 TEMP2 to the insn that sets "x = b;"
474 TEMP3 to the insn that sets "x = a;"
475 TEMP4 to the set of "x = b"; */
476
477 if (this_is_simplejump
478 && (temp3 = prev_active_insn (insn)) != 0
479 && GET_CODE (temp3) == INSN
480 && (temp4 = single_set (temp3)) != 0
481 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
482 && (! SMALL_REGISTER_CLASSES
483 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
484 && (temp2 = next_active_insn (insn)) != 0
485 && GET_CODE (temp2) == INSN
486 && (temp4 = single_set (temp2)) != 0
487 && rtx_equal_p (SET_DEST (temp4), temp1)
488 && ! side_effects_p (SET_SRC (temp4))
489 && ! may_trap_p (SET_SRC (temp4))
490 && (REG_NOTES (temp2) == 0
491 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
492 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
493 && XEXP (REG_NOTES (temp2), 1) == 0
494 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
495 SET_SRC (temp4))))
496 && (temp = prev_active_insn (temp3)) != 0
497 && condjump_p (temp) && ! simplejump_p (temp)
498 /* TEMP must skip over the "x = a;" insn */
499 && prev_real_insn (JUMP_LABEL (temp)) == insn
500 && no_labels_between_p (insn, JUMP_LABEL (temp))
501 /* There must be no other entries to the "x = b;" insn. */
502 && no_labels_between_p (JUMP_LABEL (temp), temp2)
503 /* INSN must either branch to the insn after TEMP2 or the insn
504 after TEMP2 must branch to the same place as INSN. */
505 && (reallabelprev == temp2
506 || ((temp5 = next_active_insn (temp2)) != 0
507 && simplejump_p (temp5)
508 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
509 {
510 /* The test expression, X, may be a complicated test with
511 multiple branches. See if we can find all the uses of
512 the label that TEMP branches to without hitting a CALL_INSN
513 or a jump to somewhere else. */
514 rtx target = JUMP_LABEL (temp);
515 int nuses = LABEL_NUSES (target);
516 rtx p;
517 #ifdef HAVE_cc0
518 rtx q;
519 #endif
520
521 /* Set P to the first jump insn that goes around "x = a;". */
522 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
523 {
524 if (GET_CODE (p) == JUMP_INSN)
525 {
526 if (condjump_p (p) && ! simplejump_p (p)
527 && JUMP_LABEL (p) == target)
528 {
529 nuses--;
530 if (nuses == 0)
531 break;
532 }
533 else
534 break;
535 }
536 else if (GET_CODE (p) == CALL_INSN)
537 break;
538 }
539
540 #ifdef HAVE_cc0
541 /* We cannot insert anything between a set of cc and its use
542 so if P uses cc0, we must back up to the previous insn. */
543 q = prev_nonnote_insn (p);
544 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
545 && sets_cc0_p (PATTERN (q)))
546 p = q;
547 #endif
548
549 if (p)
550 p = PREV_INSN (p);
551
552 /* If we found all the uses and there was no data conflict, we
553 can move the assignment unless we can branch into the middle
554 from somewhere. */
555 if (nuses == 0 && p
556 && no_labels_between_p (p, insn)
557 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
558 && ! reg_set_between_p (temp1, p, temp3)
559 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
560 || ! modified_between_p (SET_SRC (temp4), p, temp2))
561 /* Verify that registers used by the jump are not clobbered
562 by the instruction being moved. */
563 && ! regs_set_between_p (PATTERN (temp),
564 PREV_INSN (temp2),
565 NEXT_INSN (temp2)))
566 {
567 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
568 delete_insn (temp2);
569
570 /* Set NEXT to an insn that we know won't go away. */
571 next = next_active_insn (insn);
572
573 /* Delete the jump around the set. Note that we must do
574 this before we redirect the test jumps so that it won't
575 delete the code immediately following the assignment
576 we moved (which might be a jump). */
577
578 delete_insn (insn);
579
580 /* We either have two consecutive labels or a jump to
581 a jump, so adjust all the JUMP_INSNs to branch to where
582 INSN branches to. */
583 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
584 if (GET_CODE (p) == JUMP_INSN)
585 redirect_jump (p, target);
586
587 changed = 1;
588 continue;
589 }
590 }
591
592 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
593 to x = a; if (...) goto l; x = b;
594 if A is sufficiently simple, the test doesn't involve X,
595 and nothing in the test modifies A or X.
596
597 If we have small register classes, we also can't do this if X
598 is a hard register.
599
600 If the "x = a;" insn has any REG_NOTES, we don't do this because
601 of the possibility that we are running after CSE and there is a
602 REG_EQUAL note that is only valid if the branch has already been
603 taken. If we move the insn with the REG_EQUAL note, we may
604 fold the comparison to always be false in a later CSE pass.
605 (We could also delete the REG_NOTES when moving the insn, but it
606 seems simpler to not move it.) An exception is that we can move
607 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
608 value is the same as "a".
609
610 INSN is the goto.
611
612 We set:
613
614 TEMP to the jump insn preceding "x = a;"
615 TEMP1 to X
616 TEMP2 to the insn that sets "x = b;"
617 TEMP3 to the insn that sets "x = a;"
618 TEMP4 to the set of "x = a"; */
619
620 if (this_is_simplejump
621 && (temp2 = next_active_insn (insn)) != 0
622 && GET_CODE (temp2) == INSN
623 && (temp4 = single_set (temp2)) != 0
624 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
625 && (! SMALL_REGISTER_CLASSES
626 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
627 && (temp3 = prev_active_insn (insn)) != 0
628 && GET_CODE (temp3) == INSN
629 && (temp4 = single_set (temp3)) != 0
630 && rtx_equal_p (SET_DEST (temp4), temp1)
631 && ! side_effects_p (SET_SRC (temp4))
632 && ! may_trap_p (SET_SRC (temp4))
633 && (REG_NOTES (temp3) == 0
634 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
635 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
636 && XEXP (REG_NOTES (temp3), 1) == 0
637 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
638 SET_SRC (temp4))))
639 && (temp = prev_active_insn (temp3)) != 0
640 && condjump_p (temp) && ! simplejump_p (temp)
641 /* TEMP must skip over the "x = a;" insn */
642 && prev_real_insn (JUMP_LABEL (temp)) == insn
643 && no_labels_between_p (temp, insn))
644 {
645 rtx prev_label = JUMP_LABEL (temp);
646 rtx insert_after = prev_nonnote_insn (temp);
647
648 #ifdef HAVE_cc0
649 /* We cannot insert anything between a set of cc and its use. */
650 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
651 && sets_cc0_p (PATTERN (insert_after)))
652 insert_after = prev_nonnote_insn (insert_after);
653 #endif
654 ++LABEL_NUSES (prev_label);
655
656 if (insert_after
657 && no_labels_between_p (insert_after, temp)
658 && ! reg_referenced_between_p (temp1, insert_after, temp3)
659 && ! reg_referenced_between_p (temp1, temp3,
660 NEXT_INSN (temp2))
661 && ! reg_set_between_p (temp1, insert_after, temp)
662 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
663 /* Verify that registers used by the jump are not clobbered
664 by the instruction being moved. */
665 && ! regs_set_between_p (PATTERN (temp),
666 PREV_INSN (temp3),
667 NEXT_INSN (temp3))
668 && invert_jump (temp, JUMP_LABEL (insn)))
669 {
670 emit_insn_after_with_line_notes (PATTERN (temp3),
671 insert_after, temp3);
672 delete_insn (temp3);
673 delete_insn (insn);
674 /* Set NEXT to an insn that we know won't go away. */
675 next = temp2;
676 changed = 1;
677 }
678 if (prev_label && --LABEL_NUSES (prev_label) == 0)
679 delete_insn (prev_label);
680 if (changed)
681 continue;
682 }
683
684 #ifndef HAVE_cc0
685 /* If we have if (...) x = exp; and branches are expensive,
686 EXP is a single insn, does not have any side effects, cannot
687 trap, and is not too costly, convert this to
688 t = exp; if (...) x = t;
689
690 Don't do this when we have CC0 because it is unlikely to help
691 and we'd need to worry about where to place the new insn and
692 the potential for conflicts. We also can't do this when we have
693 notes on the insn for the same reason as above.
694
695 We set:
696
697 TEMP to the "x = exp;" insn.
698 TEMP1 to the single set in the "x = exp;" insn.
699 TEMP2 to "x". */
700
701 if (! reload_completed
702 && this_is_condjump && ! this_is_simplejump
703 && BRANCH_COST >= 3
704 && (temp = next_nonnote_insn (insn)) != 0
705 && GET_CODE (temp) == INSN
706 && REG_NOTES (temp) == 0
707 && (reallabelprev == temp
708 || ((temp2 = next_active_insn (temp)) != 0
709 && simplejump_p (temp2)
710 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
711 && (temp1 = single_set (temp)) != 0
712 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
713 && (! SMALL_REGISTER_CLASSES
714 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
715 && GET_CODE (SET_SRC (temp1)) != REG
716 && GET_CODE (SET_SRC (temp1)) != SUBREG
717 && GET_CODE (SET_SRC (temp1)) != CONST_INT
718 && ! side_effects_p (SET_SRC (temp1))
719 && ! may_trap_p (SET_SRC (temp1))
720 && rtx_cost (SET_SRC (temp1), SET) < 10)
721 {
722 rtx new = gen_reg_rtx (GET_MODE (temp2));
723
724 if ((temp3 = find_insert_position (insn, temp))
725 && validate_change (temp, &SET_DEST (temp1), new, 0))
726 {
727 next = emit_insn_after (gen_move_insn (temp2, new), insn);
728 emit_insn_after_with_line_notes (PATTERN (temp),
729 PREV_INSN (temp3), temp);
730 delete_insn (temp);
731 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
732
733 if (after_regscan)
734 {
735 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
736 old_max_reg = max_reg_num ();
737 }
738 }
739 }
740
741 /* Similarly, if it takes two insns to compute EXP but they
742 have the same destination. Here TEMP3 will be the second
743 insn and TEMP4 the SET from that insn. */
744
745 if (! reload_completed
746 && this_is_condjump && ! this_is_simplejump
747 && BRANCH_COST >= 4
748 && (temp = next_nonnote_insn (insn)) != 0
749 && GET_CODE (temp) == INSN
750 && REG_NOTES (temp) == 0
751 && (temp3 = next_nonnote_insn (temp)) != 0
752 && GET_CODE (temp3) == INSN
753 && REG_NOTES (temp3) == 0
754 && (reallabelprev == temp3
755 || ((temp2 = next_active_insn (temp3)) != 0
756 && simplejump_p (temp2)
757 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
758 && (temp1 = single_set (temp)) != 0
759 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
760 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
761 && (! SMALL_REGISTER_CLASSES
762 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
763 && ! side_effects_p (SET_SRC (temp1))
764 && ! may_trap_p (SET_SRC (temp1))
765 && rtx_cost (SET_SRC (temp1), SET) < 10
766 && (temp4 = single_set (temp3)) != 0
767 && rtx_equal_p (SET_DEST (temp4), temp2)
768 && ! side_effects_p (SET_SRC (temp4))
769 && ! may_trap_p (SET_SRC (temp4))
770 && rtx_cost (SET_SRC (temp4), SET) < 10)
771 {
772 rtx new = gen_reg_rtx (GET_MODE (temp2));
773
774 if ((temp5 = find_insert_position (insn, temp))
775 && (temp6 = find_insert_position (insn, temp3))
776 && validate_change (temp, &SET_DEST (temp1), new, 0))
777 {
778 /* Use the earliest of temp5 and temp6. */
779 if (temp5 != insn)
780 temp6 = temp5;
781 next = emit_insn_after (gen_move_insn (temp2, new), insn);
782 emit_insn_after_with_line_notes (PATTERN (temp),
783 PREV_INSN (temp6), temp);
784 emit_insn_after_with_line_notes
785 (replace_rtx (PATTERN (temp3), temp2, new),
786 PREV_INSN (temp6), temp3);
787 delete_insn (temp);
788 delete_insn (temp3);
789 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
790
791 if (after_regscan)
792 {
793 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
794 old_max_reg = max_reg_num ();
795 }
796 }
797 }
798
799 /* Finally, handle the case where two insns are used to
800 compute EXP but a temporary register is used. Here we must
801 ensure that the temporary register is not used anywhere else. */
802
803 if (! reload_completed
804 && after_regscan
805 && this_is_condjump && ! this_is_simplejump
806 && BRANCH_COST >= 4
807 && (temp = next_nonnote_insn (insn)) != 0
808 && GET_CODE (temp) == INSN
809 && REG_NOTES (temp) == 0
810 && (temp3 = next_nonnote_insn (temp)) != 0
811 && GET_CODE (temp3) == INSN
812 && REG_NOTES (temp3) == 0
813 && (reallabelprev == temp3
814 || ((temp2 = next_active_insn (temp3)) != 0
815 && simplejump_p (temp2)
816 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
817 && (temp1 = single_set (temp)) != 0
818 && (temp5 = SET_DEST (temp1),
819 (GET_CODE (temp5) == REG
820 || (GET_CODE (temp5) == SUBREG
821 && (temp5 = SUBREG_REG (temp5),
822 GET_CODE (temp5) == REG))))
823 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
824 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
825 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
826 && ! side_effects_p (SET_SRC (temp1))
827 && ! may_trap_p (SET_SRC (temp1))
828 && rtx_cost (SET_SRC (temp1), SET) < 10
829 && (temp4 = single_set (temp3)) != 0
830 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
831 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
832 && (! SMALL_REGISTER_CLASSES
833 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
834 && rtx_equal_p (SET_DEST (temp4), temp2)
835 && ! side_effects_p (SET_SRC (temp4))
836 && ! may_trap_p (SET_SRC (temp4))
837 && rtx_cost (SET_SRC (temp4), SET) < 10)
838 {
839 rtx new = gen_reg_rtx (GET_MODE (temp2));
840
841 if ((temp5 = find_insert_position (insn, temp))
842 && (temp6 = find_insert_position (insn, temp3))
843 && validate_change (temp3, &SET_DEST (temp4), new, 0))
844 {
845 /* Use the earliest of temp5 and temp6. */
846 if (temp5 != insn)
847 temp6 = temp5;
848 next = emit_insn_after (gen_move_insn (temp2, new), insn);
849 emit_insn_after_with_line_notes (PATTERN (temp),
850 PREV_INSN (temp6), temp);
851 emit_insn_after_with_line_notes (PATTERN (temp3),
852 PREV_INSN (temp6), temp3);
853 delete_insn (temp);
854 delete_insn (temp3);
855 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
856
857 if (after_regscan)
858 {
859 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
860 old_max_reg = max_reg_num ();
861 }
862 }
863 }
864 #endif /* HAVE_cc0 */
865
866 /* Try to use a conditional move (if the target has them), or a
867 store-flag insn. The general case is:
868
869 1) x = a; if (...) x = b; and
870 2) if (...) x = b;
871
872 If the jump would be faster, the machine should not have defined
873 the movcc or scc insns!. These cases are often made by the
874 previous optimization.
875
876 The second case is treated as x = x; if (...) x = b;.
877
878 INSN here is the jump around the store. We set:
879
880 TEMP to the "x = b;" insn.
881 TEMP1 to X.
882 TEMP2 to B.
883 TEMP3 to A (X in the second case).
884 TEMP4 to the condition being tested.
885 TEMP5 to the earliest insn used to find the condition. */
886
887 if (/* We can't do this after reload has completed. */
888 ! reload_completed
889 && this_is_condjump && ! this_is_simplejump
890 /* Set TEMP to the "x = b;" insn. */
891 && (temp = next_nonnote_insn (insn)) != 0
892 && GET_CODE (temp) == INSN
893 && GET_CODE (PATTERN (temp)) == SET
894 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
895 && (! SMALL_REGISTER_CLASSES
896 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
897 && ! side_effects_p (temp2 = SET_SRC (PATTERN (temp)))
898 && ! may_trap_p (temp2)
899 /* Allow either form, but prefer the former if both apply.
900 There is no point in using the old value of TEMP1 if
901 it is a register, since cse will alias them. It can
902 lose if the old value were a hard register since CSE
903 won't replace hard registers. Avoid using TEMP3 if
904 small register classes and it is a hard register. */
905 && (((temp3 = reg_set_last (temp1, insn)) != 0
906 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
907 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
908 /* Make the latter case look like x = x; if (...) x = b; */
909 || (temp3 = temp1, 1))
910 /* INSN must either branch to the insn after TEMP or the insn
911 after TEMP must branch to the same place as INSN. */
912 && (reallabelprev == temp
913 || ((temp4 = next_active_insn (temp)) != 0
914 && simplejump_p (temp4)
915 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
916 && (temp4 = get_condition (insn, &temp5)) != 0
917 /* We must be comparing objects whose modes imply the size.
918 We could handle BLKmode if (1) emit_store_flag could
919 and (2) we could find the size reliably. */
920 && GET_MODE (XEXP (temp4, 0)) != BLKmode
921 /* Even if branches are cheap, the store_flag optimization
922 can win when the operation to be performed can be
923 expressed directly. */
924 #ifdef HAVE_cc0
925 /* If the previous insn sets CC0 and something else, we can't
926 do this since we are going to delete that insn. */
927
928 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
929 && GET_CODE (temp6) == INSN
930 && (sets_cc0_p (PATTERN (temp6)) == -1
931 || (sets_cc0_p (PATTERN (temp6)) == 1
932 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
933 #endif
934 )
935 {
936 #ifdef HAVE_conditional_move
937 /* First try a conditional move. */
938 {
939 enum rtx_code code = GET_CODE (temp4);
940 rtx var = temp1;
941 rtx cond0, cond1, aval, bval;
942 rtx target;
943
944 /* Copy the compared variables into cond0 and cond1, so that
945 any side effects performed in or after the old comparison,
946 will not affect our compare which will come later. */
947 /* ??? Is it possible to just use the comparison in the jump
948 insn? After all, we're going to delete it. We'd have
949 to modify emit_conditional_move to take a comparison rtx
950 instead or write a new function. */
951 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
952 /* We want the target to be able to simplify comparisons with
953 zero (and maybe other constants as well), so don't create
954 pseudos for them. There's no need to either. */
955 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
956 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
957 cond1 = XEXP (temp4, 1);
958 else
959 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
960
961 aval = temp3;
962 bval = temp2;
963
964 start_sequence ();
965 target = emit_conditional_move (var, code,
966 cond0, cond1, VOIDmode,
967 aval, bval, GET_MODE (var),
968 (code == LTU || code == GEU
969 || code == LEU || code == GTU));
970
971 if (target)
972 {
973 rtx seq1, seq2, last;
974 int copy_ok;
975
976 /* Save the conditional move sequence but don't emit it
977 yet. On some machines, like the alpha, it is possible
978 that temp5 == insn, so next generate the sequence that
979 saves the compared values and then emit both
980 sequences ensuring seq1 occurs before seq2. */
981 seq2 = get_insns ();
982 end_sequence ();
983
984 /* "Now that we can't fail..." Famous last words.
985 Generate the copy insns that preserve the compared
986 values. */
987 start_sequence ();
988 emit_move_insn (cond0, XEXP (temp4, 0));
989 if (cond1 != XEXP (temp4, 1))
990 emit_move_insn (cond1, XEXP (temp4, 1));
991 seq1 = get_insns ();
992 end_sequence ();
993
994 /* Validate the sequence -- this may be some weird
995 bit-extract-and-test instruction for which there
996 exists no complimentary bit-extract insn. */
997 copy_ok = 1;
998 for (last = seq1; last ; last = NEXT_INSN (last))
999 if (recog_memoized (last) < 0)
1000 {
1001 copy_ok = 0;
1002 break;
1003 }
1004
1005 if (copy_ok)
1006 {
1007 emit_insns_before (seq1, temp5);
1008
1009 /* Insert conditional move after insn, to be sure
1010 that the jump and a possible compare won't be
1011 separated. */
1012 last = emit_insns_after (seq2, insn);
1013
1014 /* ??? We can also delete the insn that sets X to A.
1015 Flow will do it too though. */
1016 delete_insn (temp);
1017 next = NEXT_INSN (insn);
1018 delete_jump (insn);
1019
1020 if (after_regscan)
1021 {
1022 reg_scan_update (seq1, NEXT_INSN (last),
1023 old_max_reg);
1024 old_max_reg = max_reg_num ();
1025 }
1026
1027 changed = 1;
1028 continue;
1029 }
1030 }
1031 else
1032 end_sequence ();
1033 }
1034 #endif
1035
1036 /* That didn't work, try a store-flag insn.
1037
1038 We further divide the cases into:
1039
1040 1) x = a; if (...) x = b; and either A or B is zero,
1041 2) if (...) x = 0; and jumps are expensive,
1042 3) x = a; if (...) x = b; and A and B are constants where all
1043 the set bits in A are also set in B and jumps are expensive,
1044 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1045 more expensive, and
1046 5) if (...) x = b; if jumps are even more expensive. */
1047
1048 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1049 && ((GET_CODE (temp3) == CONST_INT)
1050 /* Make the latter case look like
1051 x = x; if (...) x = 0; */
1052 || (temp3 = temp1,
1053 ((BRANCH_COST >= 2
1054 && temp2 == const0_rtx)
1055 || BRANCH_COST >= 3)))
1056 /* If B is zero, OK; if A is zero, can only do (1) if we
1057 can reverse the condition. See if (3) applies possibly
1058 by reversing the condition. Prefer reversing to (4) when
1059 branches are very expensive. */
1060 && (((BRANCH_COST >= 2
1061 || STORE_FLAG_VALUE == -1
1062 || (STORE_FLAG_VALUE == 1
1063 /* Check that the mask is a power of two,
1064 so that it can probably be generated
1065 with a shift. */
1066 && GET_CODE (temp3) == CONST_INT
1067 && exact_log2 (INTVAL (temp3)) >= 0))
1068 && (reversep = 0, temp2 == const0_rtx))
1069 || ((BRANCH_COST >= 2
1070 || STORE_FLAG_VALUE == -1
1071 || (STORE_FLAG_VALUE == 1
1072 && GET_CODE (temp2) == CONST_INT
1073 && exact_log2 (INTVAL (temp2)) >= 0))
1074 && temp3 == const0_rtx
1075 && (reversep = can_reverse_comparison_p (temp4, insn)))
1076 || (BRANCH_COST >= 2
1077 && GET_CODE (temp2) == CONST_INT
1078 && GET_CODE (temp3) == CONST_INT
1079 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1080 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1081 && (reversep = can_reverse_comparison_p (temp4,
1082 insn)))))
1083 || BRANCH_COST >= 3)
1084 )
1085 {
1086 enum rtx_code code = GET_CODE (temp4);
1087 rtx uval, cval, var = temp1;
1088 int normalizep;
1089 rtx target;
1090
1091 /* If necessary, reverse the condition. */
1092 if (reversep)
1093 code = reverse_condition (code), uval = temp2, cval = temp3;
1094 else
1095 uval = temp3, cval = temp2;
1096
1097 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1098 is the constant 1, it is best to just compute the result
1099 directly. If UVAL is constant and STORE_FLAG_VALUE
1100 includes all of its bits, it is best to compute the flag
1101 value unnormalized and `and' it with UVAL. Otherwise,
1102 normalize to -1 and `and' with UVAL. */
1103 normalizep = (cval != const0_rtx ? -1
1104 : (uval == const1_rtx ? 1
1105 : (GET_CODE (uval) == CONST_INT
1106 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1107 ? 0 : -1));
1108
1109 /* We will be putting the store-flag insn immediately in
1110 front of the comparison that was originally being done,
1111 so we know all the variables in TEMP4 will be valid.
1112 However, this might be in front of the assignment of
1113 A to VAR. If it is, it would clobber the store-flag
1114 we will be emitting.
1115
1116 Therefore, emit into a temporary which will be copied to
1117 VAR immediately after TEMP. */
1118
1119 start_sequence ();
1120 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1121 XEXP (temp4, 0), XEXP (temp4, 1),
1122 VOIDmode,
1123 (code == LTU || code == LEU
1124 || code == GEU || code == GTU),
1125 normalizep);
1126 if (target)
1127 {
1128 rtx seq;
1129 rtx before = insn;
1130
1131 seq = get_insns ();
1132 end_sequence ();
1133
1134 /* Put the store-flag insns in front of the first insn
1135 used to compute the condition to ensure that we
1136 use the same values of them as the current
1137 comparison. However, the remainder of the insns we
1138 generate will be placed directly in front of the
1139 jump insn, in case any of the pseudos we use
1140 are modified earlier. */
1141
1142 emit_insns_before (seq, temp5);
1143
1144 start_sequence ();
1145
1146 /* Both CVAL and UVAL are non-zero. */
1147 if (cval != const0_rtx && uval != const0_rtx)
1148 {
1149 rtx tem1, tem2;
1150
1151 tem1 = expand_and (uval, target, NULL_RTX);
1152 if (GET_CODE (cval) == CONST_INT
1153 && GET_CODE (uval) == CONST_INT
1154 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1155 tem2 = cval;
1156 else
1157 {
1158 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1159 target, NULL_RTX, 0);
1160 tem2 = expand_and (cval, tem2,
1161 (GET_CODE (tem2) == REG
1162 ? tem2 : 0));
1163 }
1164
1165 /* If we usually make new pseudos, do so here. This
1166 turns out to help machines that have conditional
1167 move insns. */
1168 /* ??? Conditional moves have already been handled.
1169 This may be obsolete. */
1170
1171 if (flag_expensive_optimizations)
1172 target = 0;
1173
1174 target = expand_binop (GET_MODE (var), ior_optab,
1175 tem1, tem2, target,
1176 1, OPTAB_WIDEN);
1177 }
1178 else if (normalizep != 1)
1179 {
1180 /* We know that either CVAL or UVAL is zero. If
1181 UVAL is zero, negate TARGET and `and' with CVAL.
1182 Otherwise, `and' with UVAL. */
1183 if (uval == const0_rtx)
1184 {
1185 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1186 target, NULL_RTX, 0);
1187 uval = cval;
1188 }
1189
1190 target = expand_and (uval, target,
1191 (GET_CODE (target) == REG
1192 && ! preserve_subexpressions_p ()
1193 ? target : NULL_RTX));
1194 }
1195
1196 emit_move_insn (var, target);
1197 seq = get_insns ();
1198 end_sequence ();
1199 #ifdef HAVE_cc0
1200 /* If INSN uses CC0, we must not separate it from the
1201 insn that sets cc0. */
1202 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1203 before = prev_nonnote_insn (before);
1204 #endif
1205 emit_insns_before (seq, before);
1206
1207 delete_insn (temp);
1208 next = NEXT_INSN (insn);
1209 delete_jump (insn);
1210
1211 if (after_regscan)
1212 {
1213 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1214 old_max_reg = max_reg_num ();
1215 }
1216
1217 changed = 1;
1218 continue;
1219 }
1220 else
1221 end_sequence ();
1222 }
1223 }
1224
1225 /* If branches are expensive, convert
1226 if (foo) bar++; to bar += (foo != 0);
1227 and similarly for "bar--;"
1228
1229 INSN is the conditional branch around the arithmetic. We set:
1230
1231 TEMP is the arithmetic insn.
1232 TEMP1 is the SET doing the arithmetic.
1233 TEMP2 is the operand being incremented or decremented.
1234 TEMP3 to the condition being tested.
1235 TEMP4 to the earliest insn used to find the condition. */
1236
1237 if ((BRANCH_COST >= 2
1238 #ifdef HAVE_incscc
1239 || HAVE_incscc
1240 #endif
1241 #ifdef HAVE_decscc
1242 || HAVE_decscc
1243 #endif
1244 )
1245 && ! reload_completed
1246 && this_is_condjump && ! this_is_simplejump
1247 && (temp = next_nonnote_insn (insn)) != 0
1248 && (temp1 = single_set (temp)) != 0
1249 && (temp2 = SET_DEST (temp1),
1250 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1251 && GET_CODE (SET_SRC (temp1)) == PLUS
1252 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1253 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1254 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1255 && ! side_effects_p (temp2)
1256 && ! may_trap_p (temp2)
1257 /* INSN must either branch to the insn after TEMP or the insn
1258 after TEMP must branch to the same place as INSN. */
1259 && (reallabelprev == temp
1260 || ((temp3 = next_active_insn (temp)) != 0
1261 && simplejump_p (temp3)
1262 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1263 && (temp3 = get_condition (insn, &temp4)) != 0
1264 /* We must be comparing objects whose modes imply the size.
1265 We could handle BLKmode if (1) emit_store_flag could
1266 and (2) we could find the size reliably. */
1267 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1268 && can_reverse_comparison_p (temp3, insn))
1269 {
1270 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1271 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1272
1273 start_sequence ();
1274
1275 /* It must be the case that TEMP2 is not modified in the range
1276 [TEMP4, INSN). The one exception we make is if the insn
1277 before INSN sets TEMP2 to something which is also unchanged
1278 in that range. In that case, we can move the initialization
1279 into our sequence. */
1280
1281 if ((temp5 = prev_active_insn (insn)) != 0
1282 && no_labels_between_p (temp5, insn)
1283 && GET_CODE (temp5) == INSN
1284 && (temp6 = single_set (temp5)) != 0
1285 && rtx_equal_p (temp2, SET_DEST (temp6))
1286 && (CONSTANT_P (SET_SRC (temp6))
1287 || GET_CODE (SET_SRC (temp6)) == REG
1288 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1289 {
1290 emit_insn (PATTERN (temp5));
1291 init_insn = temp5;
1292 init = SET_SRC (temp6);
1293 }
1294
1295 if (CONSTANT_P (init)
1296 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1297 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1298 XEXP (temp3, 0), XEXP (temp3, 1),
1299 VOIDmode,
1300 (code == LTU || code == LEU
1301 || code == GTU || code == GEU), 1);
1302
1303 /* If we can do the store-flag, do the addition or
1304 subtraction. */
1305
1306 if (target)
1307 target = expand_binop (GET_MODE (temp2),
1308 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1309 ? add_optab : sub_optab),
1310 temp2, target, temp2, 0, OPTAB_WIDEN);
1311
1312 if (target != 0)
1313 {
1314 /* Put the result back in temp2 in case it isn't already.
1315 Then replace the jump, possible a CC0-setting insn in
1316 front of the jump, and TEMP, with the sequence we have
1317 made. */
1318
1319 if (target != temp2)
1320 emit_move_insn (temp2, target);
1321
1322 seq = get_insns ();
1323 end_sequence ();
1324
1325 emit_insns_before (seq, temp4);
1326 delete_insn (temp);
1327
1328 if (init_insn)
1329 delete_insn (init_insn);
1330
1331 next = NEXT_INSN (insn);
1332 #ifdef HAVE_cc0
1333 delete_insn (prev_nonnote_insn (insn));
1334 #endif
1335 delete_insn (insn);
1336
1337 if (after_regscan)
1338 {
1339 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1340 old_max_reg = max_reg_num ();
1341 }
1342
1343 changed = 1;
1344 continue;
1345 }
1346 else
1347 end_sequence ();
1348 }
1349
1350 /* Simplify if (...) x = 1; else {...} if (x) ...
1351 We recognize this case scanning backwards as well.
1352
1353 TEMP is the assignment to x;
1354 TEMP1 is the label at the head of the second if. */
1355 /* ?? This should call get_condition to find the values being
1356 compared, instead of looking for a COMPARE insn when HAVE_cc0
1357 is not defined. This would allow it to work on the m88k. */
1358 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1359 is not defined and the condition is tested by a separate compare
1360 insn. This is because the code below assumes that the result
1361 of the compare dies in the following branch.
1362
1363 Not only that, but there might be other insns between the
1364 compare and branch whose results are live. Those insns need
1365 to be executed.
1366
1367 A way to fix this is to move the insns at JUMP_LABEL (insn)
1368 to before INSN. If we are running before flow, they will
1369 be deleted if they aren't needed. But this doesn't work
1370 well after flow.
1371
1372 This is really a special-case of jump threading, anyway. The
1373 right thing to do is to replace this and jump threading with
1374 much simpler code in cse.
1375
1376 This code has been turned off in the non-cc0 case in the
1377 meantime. */
1378
1379 #ifdef HAVE_cc0
1380 else if (this_is_simplejump
1381 /* Safe to skip USE and CLOBBER insns here
1382 since they will not be deleted. */
1383 && (temp = prev_active_insn (insn))
1384 && no_labels_between_p (temp, insn)
1385 && GET_CODE (temp) == INSN
1386 && GET_CODE (PATTERN (temp)) == SET
1387 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1388 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1389 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1390 /* If we find that the next value tested is `x'
1391 (TEMP1 is the insn where this happens), win. */
1392 && GET_CODE (temp1) == INSN
1393 && GET_CODE (PATTERN (temp1)) == SET
1394 #ifdef HAVE_cc0
1395 /* Does temp1 `tst' the value of x? */
1396 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1397 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1398 && (temp1 = next_nonnote_insn (temp1))
1399 #else
1400 /* Does temp1 compare the value of x against zero? */
1401 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1402 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1403 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1404 == SET_DEST (PATTERN (temp)))
1405 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1406 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1407 #endif
1408 && condjump_p (temp1))
1409 {
1410 /* Get the if_then_else from the condjump. */
1411 rtx choice = SET_SRC (PATTERN (temp1));
1412 if (GET_CODE (choice) == IF_THEN_ELSE)
1413 {
1414 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1415 rtx val = SET_SRC (PATTERN (temp));
1416 rtx cond
1417 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1418 val, const0_rtx);
1419 rtx ultimate;
1420
1421 if (cond == const_true_rtx)
1422 ultimate = XEXP (choice, 1);
1423 else if (cond == const0_rtx)
1424 ultimate = XEXP (choice, 2);
1425 else
1426 ultimate = 0;
1427
1428 if (ultimate == pc_rtx)
1429 ultimate = get_label_after (temp1);
1430 else if (ultimate && GET_CODE (ultimate) != RETURN)
1431 ultimate = XEXP (ultimate, 0);
1432
1433 if (ultimate && JUMP_LABEL(insn) != ultimate)
1434 changed |= redirect_jump (insn, ultimate);
1435 }
1436 }
1437 #endif
1438
1439 #if 0
1440 /* @@ This needs a bit of work before it will be right.
1441
1442 Any type of comparison can be accepted for the first and
1443 second compare. When rewriting the first jump, we must
1444 compute the what conditions can reach label3, and use the
1445 appropriate code. We can not simply reverse/swap the code
1446 of the first jump. In some cases, the second jump must be
1447 rewritten also.
1448
1449 For example,
1450 < == converts to > ==
1451 < != converts to == >
1452 etc.
1453
1454 If the code is written to only accept an '==' test for the second
1455 compare, then all that needs to be done is to swap the condition
1456 of the first branch.
1457
1458 It is questionable whether we want this optimization anyways,
1459 since if the user wrote code like this because he/she knew that
1460 the jump to label1 is taken most of the time, then rewriting
1461 this gives slower code. */
1462 /* @@ This should call get_condition to find the values being
1463 compared, instead of looking for a COMPARE insn when HAVE_cc0
1464 is not defined. This would allow it to work on the m88k. */
1465 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1466 is not defined and the condition is tested by a separate compare
1467 insn. This is because the code below assumes that the result
1468 of the compare dies in the following branch. */
1469
1470 /* Simplify test a ~= b
1471 condjump label1;
1472 test a == b
1473 condjump label2;
1474 jump label3;
1475 label1:
1476
1477 rewriting as
1478 test a ~~= b
1479 condjump label3
1480 test a == b
1481 condjump label2
1482 label1:
1483
1484 where ~= is an inequality, e.g. >, and ~~= is the swapped
1485 inequality, e.g. <.
1486
1487 We recognize this case scanning backwards.
1488
1489 TEMP is the conditional jump to `label2';
1490 TEMP1 is the test for `a == b';
1491 TEMP2 is the conditional jump to `label1';
1492 TEMP3 is the test for `a ~= b'. */
1493 else if (this_is_simplejump
1494 && (temp = prev_active_insn (insn))
1495 && no_labels_between_p (temp, insn)
1496 && condjump_p (temp)
1497 && (temp1 = prev_active_insn (temp))
1498 && no_labels_between_p (temp1, temp)
1499 && GET_CODE (temp1) == INSN
1500 && GET_CODE (PATTERN (temp1)) == SET
1501 #ifdef HAVE_cc0
1502 && sets_cc0_p (PATTERN (temp1)) == 1
1503 #else
1504 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1505 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1506 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1507 #endif
1508 && (temp2 = prev_active_insn (temp1))
1509 && no_labels_between_p (temp2, temp1)
1510 && condjump_p (temp2)
1511 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1512 && (temp3 = prev_active_insn (temp2))
1513 && no_labels_between_p (temp3, temp2)
1514 && GET_CODE (PATTERN (temp3)) == SET
1515 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1516 SET_DEST (PATTERN (temp1)))
1517 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1518 SET_SRC (PATTERN (temp3)))
1519 && ! inequality_comparisons_p (PATTERN (temp))
1520 && inequality_comparisons_p (PATTERN (temp2)))
1521 {
1522 rtx fallthrough_label = JUMP_LABEL (temp2);
1523
1524 ++LABEL_NUSES (fallthrough_label);
1525 if (swap_jump (temp2, JUMP_LABEL (insn)))
1526 {
1527 delete_insn (insn);
1528 changed = 1;
1529 }
1530
1531 if (--LABEL_NUSES (fallthrough_label) == 0)
1532 delete_insn (fallthrough_label);
1533 }
1534 #endif
1535 /* Simplify if (...) {... x = 1;} if (x) ...
1536
1537 We recognize this case backwards.
1538
1539 TEMP is the test of `x';
1540 TEMP1 is the assignment to `x' at the end of the
1541 previous statement. */
1542 /* @@ This should call get_condition to find the values being
1543 compared, instead of looking for a COMPARE insn when HAVE_cc0
1544 is not defined. This would allow it to work on the m88k. */
1545 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1546 is not defined and the condition is tested by a separate compare
1547 insn. This is because the code below assumes that the result
1548 of the compare dies in the following branch. */
1549
1550 /* ??? This has to be turned off. The problem is that the
1551 unconditional jump might indirectly end up branching to the
1552 label between TEMP1 and TEMP. We can't detect this, in general,
1553 since it may become a jump to there after further optimizations.
1554 If that jump is done, it will be deleted, so we will retry
1555 this optimization in the next pass, thus an infinite loop.
1556
1557 The present code prevents this by putting the jump after the
1558 label, but this is not logically correct. */
1559 #if 0
1560 else if (this_is_condjump
1561 /* Safe to skip USE and CLOBBER insns here
1562 since they will not be deleted. */
1563 && (temp = prev_active_insn (insn))
1564 && no_labels_between_p (temp, insn)
1565 && GET_CODE (temp) == INSN
1566 && GET_CODE (PATTERN (temp)) == SET
1567 #ifdef HAVE_cc0
1568 && sets_cc0_p (PATTERN (temp)) == 1
1569 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1570 #else
1571 /* Temp must be a compare insn, we can not accept a register
1572 to register move here, since it may not be simply a
1573 tst insn. */
1574 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1575 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1576 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1577 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1578 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1579 #endif
1580 /* May skip USE or CLOBBER insns here
1581 for checking for opportunity, since we
1582 take care of them later. */
1583 && (temp1 = prev_active_insn (temp))
1584 && GET_CODE (temp1) == INSN
1585 && GET_CODE (PATTERN (temp1)) == SET
1586 #ifdef HAVE_cc0
1587 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1588 #else
1589 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1590 == SET_DEST (PATTERN (temp1)))
1591 #endif
1592 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1593 /* If this isn't true, cse will do the job. */
1594 && ! no_labels_between_p (temp1, temp))
1595 {
1596 /* Get the if_then_else from the condjump. */
1597 rtx choice = SET_SRC (PATTERN (insn));
1598 if (GET_CODE (choice) == IF_THEN_ELSE
1599 && (GET_CODE (XEXP (choice, 0)) == EQ
1600 || GET_CODE (XEXP (choice, 0)) == NE))
1601 {
1602 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1603 rtx last_insn;
1604 rtx ultimate;
1605 rtx p;
1606
1607 /* Get the place that condjump will jump to
1608 if it is reached from here. */
1609 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1610 == want_nonzero)
1611 ultimate = XEXP (choice, 1);
1612 else
1613 ultimate = XEXP (choice, 2);
1614 /* Get it as a CODE_LABEL. */
1615 if (ultimate == pc_rtx)
1616 ultimate = get_label_after (insn);
1617 else
1618 /* Get the label out of the LABEL_REF. */
1619 ultimate = XEXP (ultimate, 0);
1620
1621 /* Insert the jump immediately before TEMP, specifically
1622 after the label that is between TEMP1 and TEMP. */
1623 last_insn = PREV_INSN (temp);
1624
1625 /* If we would be branching to the next insn, the jump
1626 would immediately be deleted and the re-inserted in
1627 a subsequent pass over the code. So don't do anything
1628 in that case. */
1629 if (next_active_insn (last_insn)
1630 != next_active_insn (ultimate))
1631 {
1632 emit_barrier_after (last_insn);
1633 p = emit_jump_insn_after (gen_jump (ultimate),
1634 last_insn);
1635 JUMP_LABEL (p) = ultimate;
1636 ++LABEL_NUSES (ultimate);
1637 if (INSN_UID (ultimate) < max_jump_chain
1638 && INSN_CODE (p) < max_jump_chain)
1639 {
1640 jump_chain[INSN_UID (p)]
1641 = jump_chain[INSN_UID (ultimate)];
1642 jump_chain[INSN_UID (ultimate)] = p;
1643 }
1644 changed = 1;
1645 continue;
1646 }
1647 }
1648 }
1649 #endif
1650 /* Detect a conditional jump going to the same place
1651 as an immediately following unconditional jump. */
1652 else if (this_is_condjump
1653 && (temp = next_active_insn (insn)) != 0
1654 && simplejump_p (temp)
1655 && (next_active_insn (JUMP_LABEL (insn))
1656 == next_active_insn (JUMP_LABEL (temp))))
1657 {
1658 rtx tem = temp;
1659
1660 /* ??? Optional. Disables some optimizations, but makes
1661 gcov output more accurate with -O. */
1662 if (flag_test_coverage && !reload_completed)
1663 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1664 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1665 break;
1666
1667 if (tem == temp)
1668 {
1669 delete_jump (insn);
1670 changed = 1;
1671 continue;
1672 }
1673 }
1674 #ifdef HAVE_trap
1675 /* Detect a conditional jump jumping over an unconditional trap. */
1676 else if (HAVE_trap
1677 && this_is_condjump && ! this_is_simplejump
1678 && reallabelprev != 0
1679 && GET_CODE (reallabelprev) == INSN
1680 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1681 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1682 && prev_active_insn (reallabelprev) == insn
1683 && no_labels_between_p (insn, reallabelprev)
1684 && (temp2 = get_condition (insn, &temp4))
1685 && can_reverse_comparison_p (temp2, insn))
1686 {
1687 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1688 XEXP (temp2, 0), XEXP (temp2, 1),
1689 TRAP_CODE (PATTERN (reallabelprev)));
1690
1691 if (new)
1692 {
1693 emit_insn_before (new, temp4);
1694 delete_insn (reallabelprev);
1695 delete_jump (insn);
1696 changed = 1;
1697 continue;
1698 }
1699 }
1700 /* Detect a jump jumping to an unconditional trap. */
1701 else if (HAVE_trap && this_is_condjump
1702 && (temp = next_active_insn (JUMP_LABEL (insn)))
1703 && GET_CODE (temp) == INSN
1704 && GET_CODE (PATTERN (temp)) == TRAP_IF
1705 && (this_is_simplejump
1706 || (temp2 = get_condition (insn, &temp4))))
1707 {
1708 rtx tc = TRAP_CONDITION (PATTERN (temp));
1709
1710 if (tc == const_true_rtx
1711 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1712 {
1713 rtx new;
1714 /* Replace an unconditional jump to a trap with a trap. */
1715 if (this_is_simplejump)
1716 {
1717 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1718 delete_jump (insn);
1719 changed = 1;
1720 continue;
1721 }
1722 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1723 XEXP (temp2, 1),
1724 TRAP_CODE (PATTERN (temp)));
1725 if (new)
1726 {
1727 emit_insn_before (new, temp4);
1728 delete_jump (insn);
1729 changed = 1;
1730 continue;
1731 }
1732 }
1733 /* If the trap condition and jump condition are mutually
1734 exclusive, redirect the jump to the following insn. */
1735 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1736 && ! this_is_simplejump
1737 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1738 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1739 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1740 && redirect_jump (insn, get_label_after (temp)))
1741 {
1742 changed = 1;
1743 continue;
1744 }
1745 }
1746 #endif
1747
1748 /* Detect a conditional jump jumping over an unconditional jump. */
1749
1750 else if ((this_is_condjump || this_is_condjump_in_parallel)
1751 && ! this_is_simplejump
1752 && reallabelprev != 0
1753 && GET_CODE (reallabelprev) == JUMP_INSN
1754 && prev_active_insn (reallabelprev) == insn
1755 && no_labels_between_p (insn, reallabelprev)
1756 && simplejump_p (reallabelprev))
1757 {
1758 /* When we invert the unconditional jump, we will be
1759 decrementing the usage count of its old label.
1760 Make sure that we don't delete it now because that
1761 might cause the following code to be deleted. */
1762 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1763 rtx prev_label = JUMP_LABEL (insn);
1764
1765 if (prev_label)
1766 ++LABEL_NUSES (prev_label);
1767
1768 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1769 {
1770 /* It is very likely that if there are USE insns before
1771 this jump, they hold REG_DEAD notes. These REG_DEAD
1772 notes are no longer valid due to this optimization,
1773 and will cause the life-analysis that following passes
1774 (notably delayed-branch scheduling) to think that
1775 these registers are dead when they are not.
1776
1777 To prevent this trouble, we just remove the USE insns
1778 from the insn chain. */
1779
1780 while (prev_uses && GET_CODE (prev_uses) == INSN
1781 && GET_CODE (PATTERN (prev_uses)) == USE)
1782 {
1783 rtx useless = prev_uses;
1784 prev_uses = prev_nonnote_insn (prev_uses);
1785 delete_insn (useless);
1786 }
1787
1788 delete_insn (reallabelprev);
1789 next = insn;
1790 changed = 1;
1791 }
1792
1793 /* We can now safely delete the label if it is unreferenced
1794 since the delete_insn above has deleted the BARRIER. */
1795 if (prev_label && --LABEL_NUSES (prev_label) == 0)
1796 delete_insn (prev_label);
1797 continue;
1798 }
1799 else
1800 {
1801 /* Detect a jump to a jump. */
1802
1803 nlabel = follow_jumps (JUMP_LABEL (insn));
1804 if (nlabel != JUMP_LABEL (insn)
1805 && redirect_jump (insn, nlabel))
1806 {
1807 changed = 1;
1808 next = insn;
1809 }
1810
1811 /* Look for if (foo) bar; else break; */
1812 /* The insns look like this:
1813 insn = condjump label1;
1814 ...range1 (some insns)...
1815 jump label2;
1816 label1:
1817 ...range2 (some insns)...
1818 jump somewhere unconditionally
1819 label2: */
1820 {
1821 rtx label1 = next_label (insn);
1822 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1823 /* Don't do this optimization on the first round, so that
1824 jump-around-a-jump gets simplified before we ask here
1825 whether a jump is unconditional.
1826
1827 Also don't do it when we are called after reload since
1828 it will confuse reorg. */
1829 if (! first
1830 && (reload_completed ? ! flag_delayed_branch : 1)
1831 /* Make sure INSN is something we can invert. */
1832 && condjump_p (insn)
1833 && label1 != 0
1834 && JUMP_LABEL (insn) == label1
1835 && LABEL_NUSES (label1) == 1
1836 && GET_CODE (range1end) == JUMP_INSN
1837 && simplejump_p (range1end))
1838 {
1839 rtx label2 = next_label (label1);
1840 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1841 if (range1end != range2end
1842 && JUMP_LABEL (range1end) == label2
1843 && GET_CODE (range2end) == JUMP_INSN
1844 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1845 /* Invert the jump condition, so we
1846 still execute the same insns in each case. */
1847 && invert_jump (insn, label1))
1848 {
1849 rtx range1beg = next_active_insn (insn);
1850 rtx range2beg = next_active_insn (label1);
1851 rtx range1after, range2after;
1852 rtx range1before, range2before;
1853 rtx rangenext;
1854
1855 /* Include in each range any notes before it, to be
1856 sure that we get the line number note if any, even
1857 if there are other notes here. */
1858 while (PREV_INSN (range1beg)
1859 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
1860 range1beg = PREV_INSN (range1beg);
1861
1862 while (PREV_INSN (range2beg)
1863 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
1864 range2beg = PREV_INSN (range2beg);
1865
1866 /* Don't move NOTEs for blocks or loops; shift them
1867 outside the ranges, where they'll stay put. */
1868 range1beg = squeeze_notes (range1beg, range1end);
1869 range2beg = squeeze_notes (range2beg, range2end);
1870
1871 /* Get current surrounds of the 2 ranges. */
1872 range1before = PREV_INSN (range1beg);
1873 range2before = PREV_INSN (range2beg);
1874 range1after = NEXT_INSN (range1end);
1875 range2after = NEXT_INSN (range2end);
1876
1877 /* Splice range2 where range1 was. */
1878 NEXT_INSN (range1before) = range2beg;
1879 PREV_INSN (range2beg) = range1before;
1880 NEXT_INSN (range2end) = range1after;
1881 PREV_INSN (range1after) = range2end;
1882 /* Splice range1 where range2 was. */
1883 NEXT_INSN (range2before) = range1beg;
1884 PREV_INSN (range1beg) = range2before;
1885 NEXT_INSN (range1end) = range2after;
1886 PREV_INSN (range2after) = range1end;
1887
1888 /* Check for a loop end note between the end of
1889 range2, and the next code label. If there is one,
1890 then what we have really seen is
1891 if (foo) break; end_of_loop;
1892 and moved the break sequence outside the loop.
1893 We must move the LOOP_END note to where the
1894 loop really ends now, or we will confuse loop
1895 optimization. Stop if we find a LOOP_BEG note
1896 first, since we don't want to move the LOOP_END
1897 note in that case. */
1898 for (;range2after != label2; range2after = rangenext)
1899 {
1900 rangenext = NEXT_INSN (range2after);
1901 if (GET_CODE (range2after) == NOTE)
1902 {
1903 if (NOTE_LINE_NUMBER (range2after)
1904 == NOTE_INSN_LOOP_END)
1905 {
1906 NEXT_INSN (PREV_INSN (range2after))
1907 = rangenext;
1908 PREV_INSN (rangenext)
1909 = PREV_INSN (range2after);
1910 PREV_INSN (range2after)
1911 = PREV_INSN (range1beg);
1912 NEXT_INSN (range2after) = range1beg;
1913 NEXT_INSN (PREV_INSN (range1beg))
1914 = range2after;
1915 PREV_INSN (range1beg) = range2after;
1916 }
1917 else if (NOTE_LINE_NUMBER (range2after)
1918 == NOTE_INSN_LOOP_BEG)
1919 break;
1920 }
1921 }
1922 changed = 1;
1923 continue;
1924 }
1925 }
1926 }
1927
1928 /* Now that the jump has been tensioned,
1929 try cross jumping: check for identical code
1930 before the jump and before its target label. */
1931
1932 /* First, cross jumping of conditional jumps: */
1933
1934 if (cross_jump && condjump_p (insn))
1935 {
1936 rtx newjpos, newlpos;
1937 rtx x = prev_real_insn (JUMP_LABEL (insn));
1938
1939 /* A conditional jump may be crossjumped
1940 only if the place it jumps to follows
1941 an opposing jump that comes back here. */
1942
1943 if (x != 0 && ! jump_back_p (x, insn))
1944 /* We have no opposing jump;
1945 cannot cross jump this insn. */
1946 x = 0;
1947
1948 newjpos = 0;
1949 /* TARGET is nonzero if it is ok to cross jump
1950 to code before TARGET. If so, see if matches. */
1951 if (x != 0)
1952 find_cross_jump (insn, x, 2,
1953 &newjpos, &newlpos);
1954
1955 if (newjpos != 0)
1956 {
1957 do_cross_jump (insn, newjpos, newlpos);
1958 /* Make the old conditional jump
1959 into an unconditional one. */
1960 SET_SRC (PATTERN (insn))
1961 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
1962 INSN_CODE (insn) = -1;
1963 emit_barrier_after (insn);
1964 /* Add to jump_chain unless this is a new label
1965 whose UID is too large. */
1966 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1967 {
1968 jump_chain[INSN_UID (insn)]
1969 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1970 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1971 }
1972 changed = 1;
1973 next = insn;
1974 }
1975 }
1976
1977 /* Cross jumping of unconditional jumps:
1978 a few differences. */
1979
1980 if (cross_jump && simplejump_p (insn))
1981 {
1982 rtx newjpos, newlpos;
1983 rtx target;
1984
1985 newjpos = 0;
1986
1987 /* TARGET is nonzero if it is ok to cross jump
1988 to code before TARGET. If so, see if matches. */
1989 find_cross_jump (insn, JUMP_LABEL (insn), 1,
1990 &newjpos, &newlpos);
1991
1992 /* If cannot cross jump to code before the label,
1993 see if we can cross jump to another jump to
1994 the same label. */
1995 /* Try each other jump to this label. */
1996 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
1997 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1998 target != 0 && newjpos == 0;
1999 target = jump_chain[INSN_UID (target)])
2000 if (target != insn
2001 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2002 /* Ignore TARGET if it's deleted. */
2003 && ! INSN_DELETED_P (target))
2004 find_cross_jump (insn, target, 2,
2005 &newjpos, &newlpos);
2006
2007 if (newjpos != 0)
2008 {
2009 do_cross_jump (insn, newjpos, newlpos);
2010 changed = 1;
2011 next = insn;
2012 }
2013 }
2014
2015 /* This code was dead in the previous jump.c! */
2016 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2017 {
2018 /* Return insns all "jump to the same place"
2019 so we can cross-jump between any two of them. */
2020
2021 rtx newjpos, newlpos, target;
2022
2023 newjpos = 0;
2024
2025 /* If cannot cross jump to code before the label,
2026 see if we can cross jump to another jump to
2027 the same label. */
2028 /* Try each other jump to this label. */
2029 for (target = jump_chain[0];
2030 target != 0 && newjpos == 0;
2031 target = jump_chain[INSN_UID (target)])
2032 if (target != insn
2033 && ! INSN_DELETED_P (target)
2034 && GET_CODE (PATTERN (target)) == RETURN)
2035 find_cross_jump (insn, target, 2,
2036 &newjpos, &newlpos);
2037
2038 if (newjpos != 0)
2039 {
2040 do_cross_jump (insn, newjpos, newlpos);
2041 changed = 1;
2042 next = insn;
2043 }
2044 }
2045 }
2046 }
2047
2048 first = 0;
2049 }
2050
2051 /* Delete extraneous line number notes.
2052 Note that two consecutive notes for different lines are not really
2053 extraneous. There should be some indication where that line belonged,
2054 even if it became empty. */
2055
2056 {
2057 rtx last_note = 0;
2058
2059 for (insn = f; insn; insn = NEXT_INSN (insn))
2060 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2061 {
2062 /* Delete this note if it is identical to previous note. */
2063 if (last_note
2064 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2065 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2066 {
2067 delete_insn (insn);
2068 continue;
2069 }
2070
2071 last_note = insn;
2072 }
2073 }
2074
2075 #ifdef HAVE_return
2076 if (HAVE_return)
2077 {
2078 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2079 in front of it. If the machine allows it at this point (we might be
2080 after reload for a leaf routine), it will improve optimization for it
2081 to be there. We do this both here and at the start of this pass since
2082 the RETURN might have been deleted by some of our optimizations. */
2083 insn = get_last_insn ();
2084 while (insn && GET_CODE (insn) == NOTE)
2085 insn = PREV_INSN (insn);
2086
2087 if (insn && GET_CODE (insn) != BARRIER)
2088 {
2089 emit_jump_insn (gen_return ());
2090 emit_barrier ();
2091 }
2092 }
2093 #endif
2094
2095 /* CAN_REACH_END is persistent for each function. Once set it should
2096 not be cleared. This is especially true for the case where we
2097 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2098 the front-end before compiling each function. */
2099 if (calculate_can_reach_end (last_insn, 0, 1))
2100 can_reach_end = 1;
2101
2102 /* Show JUMP_CHAIN no longer valid. */
2103 jump_chain = 0;
2104 }
2105 \f
2106 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2107 notes whose labels don't occur in the insn any more. Returns the
2108 largest INSN_UID found. */
2109 static int
2110 init_label_info (f)
2111 rtx f;
2112 {
2113 int largest_uid = 0;
2114 rtx insn;
2115
2116 for (insn = f; insn; insn = NEXT_INSN (insn))
2117 {
2118 if (GET_CODE (insn) == CODE_LABEL)
2119 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2120 else if (GET_CODE (insn) == JUMP_INSN)
2121 JUMP_LABEL (insn) = 0;
2122 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2123 {
2124 rtx note, next;
2125
2126 for (note = REG_NOTES (insn); note; note = next)
2127 {
2128 next = XEXP (note, 1);
2129 if (REG_NOTE_KIND (note) == REG_LABEL
2130 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2131 remove_note (insn, note);
2132 }
2133 }
2134 if (INSN_UID (insn) > largest_uid)
2135 largest_uid = INSN_UID (insn);
2136 }
2137
2138 return largest_uid;
2139 }
2140
2141 /* Delete insns following barriers, up to next label.
2142
2143 Also delete no-op jumps created by gcse. */
2144 static void
2145 delete_barrier_successors (f)
2146 rtx f;
2147 {
2148 rtx insn;
2149
2150 for (insn = f; insn;)
2151 {
2152 if (GET_CODE (insn) == BARRIER)
2153 {
2154 insn = NEXT_INSN (insn);
2155 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2156 {
2157 if (GET_CODE (insn) == NOTE
2158 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2159 insn = NEXT_INSN (insn);
2160 else
2161 insn = delete_insn (insn);
2162 }
2163 /* INSN is now the code_label. */
2164 }
2165 /* Also remove (set (pc) (pc)) insns which can be created by
2166 gcse. We eliminate such insns now to avoid having them
2167 cause problems later. */
2168 else if (GET_CODE (insn) == JUMP_INSN
2169 && SET_SRC (PATTERN (insn)) == pc_rtx
2170 && SET_DEST (PATTERN (insn)) == pc_rtx)
2171 insn = delete_insn (insn);
2172
2173 else
2174 insn = NEXT_INSN (insn);
2175 }
2176 }
2177
2178 /* Mark the label each jump jumps to.
2179 Combine consecutive labels, and count uses of labels.
2180
2181 For each label, make a chain (using `jump_chain')
2182 of all the *unconditional* jumps that jump to it;
2183 also make a chain of all returns.
2184
2185 CROSS_JUMP indicates whether we are doing cross jumping
2186 and if we are whether we will be paying attention to
2187 death notes or not. */
2188
2189 static void
2190 mark_all_labels (f, cross_jump)
2191 rtx f;
2192 int cross_jump;
2193 {
2194 rtx insn;
2195
2196 for (insn = f; insn; insn = NEXT_INSN (insn))
2197 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2198 {
2199 mark_jump_label (PATTERN (insn), insn, cross_jump);
2200 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2201 {
2202 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2203 {
2204 jump_chain[INSN_UID (insn)]
2205 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2206 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2207 }
2208 if (GET_CODE (PATTERN (insn)) == RETURN)
2209 {
2210 jump_chain[INSN_UID (insn)] = jump_chain[0];
2211 jump_chain[0] = insn;
2212 }
2213 }
2214 }
2215 }
2216
2217 /* Delete all labels already not referenced.
2218 Also find and return the last insn. */
2219
2220 static rtx
2221 delete_unreferenced_labels (f)
2222 rtx f;
2223 {
2224 rtx final = NULL_RTX;
2225 rtx insn;
2226
2227 for (insn = f; insn; )
2228 {
2229 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
2230 insn = delete_insn (insn);
2231 else
2232 {
2233 final = insn;
2234 insn = NEXT_INSN (insn);
2235 }
2236 }
2237
2238 return final;
2239 }
2240
2241 /* Delete various simple forms of moves which have no necessary
2242 side effect. */
2243
2244 static void
2245 delete_noop_moves (f)
2246 rtx f;
2247 {
2248 rtx insn, next;
2249
2250 for (insn = f; insn; )
2251 {
2252 next = NEXT_INSN (insn);
2253
2254 if (GET_CODE (insn) == INSN)
2255 {
2256 register rtx body = PATTERN (insn);
2257
2258 /* Combine stack_adjusts with following push_insns. */
2259 #ifdef PUSH_ROUNDING
2260 if (GET_CODE (body) == SET
2261 && SET_DEST (body) == stack_pointer_rtx
2262 && GET_CODE (SET_SRC (body)) == PLUS
2263 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2264 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2265 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2266 {
2267 rtx p;
2268 rtx stack_adjust_insn = insn;
2269 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2270 int total_pushed = 0;
2271 int pushes = 0;
2272
2273 /* Find all successive push insns. */
2274 p = insn;
2275 /* Don't convert more than three pushes;
2276 that starts adding too many displaced addresses
2277 and the whole thing starts becoming a losing
2278 proposition. */
2279 while (pushes < 3)
2280 {
2281 rtx pbody, dest;
2282 p = next_nonnote_insn (p);
2283 if (p == 0 || GET_CODE (p) != INSN)
2284 break;
2285 pbody = PATTERN (p);
2286 if (GET_CODE (pbody) != SET)
2287 break;
2288 dest = SET_DEST (pbody);
2289 /* Allow a no-op move between the adjust and the push. */
2290 if (GET_CODE (dest) == REG
2291 && GET_CODE (SET_SRC (pbody)) == REG
2292 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2293 continue;
2294 if (! (GET_CODE (dest) == MEM
2295 && GET_CODE (XEXP (dest, 0)) == POST_INC
2296 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2297 break;
2298 pushes++;
2299 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2300 > stack_adjust_amount)
2301 break;
2302 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2303 }
2304
2305 /* Discard the amount pushed from the stack adjust;
2306 maybe eliminate it entirely. */
2307 if (total_pushed >= stack_adjust_amount)
2308 {
2309 delete_computation (stack_adjust_insn);
2310 total_pushed = stack_adjust_amount;
2311 }
2312 else
2313 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2314 = GEN_INT (stack_adjust_amount - total_pushed);
2315
2316 /* Change the appropriate push insns to ordinary stores. */
2317 p = insn;
2318 while (total_pushed > 0)
2319 {
2320 rtx pbody, dest;
2321 p = next_nonnote_insn (p);
2322 if (GET_CODE (p) != INSN)
2323 break;
2324 pbody = PATTERN (p);
2325 if (GET_CODE (pbody) != SET)
2326 break;
2327 dest = SET_DEST (pbody);
2328 /* Allow a no-op move between the adjust and the push. */
2329 if (GET_CODE (dest) == REG
2330 && GET_CODE (SET_SRC (pbody)) == REG
2331 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2332 continue;
2333 if (! (GET_CODE (dest) == MEM
2334 && GET_CODE (XEXP (dest, 0)) == POST_INC
2335 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2336 break;
2337 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2338 /* If this push doesn't fully fit in the space
2339 of the stack adjust that we deleted,
2340 make another stack adjust here for what we
2341 didn't use up. There should be peepholes
2342 to recognize the resulting sequence of insns. */
2343 if (total_pushed < 0)
2344 {
2345 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2346 GEN_INT (- total_pushed)),
2347 p);
2348 break;
2349 }
2350 XEXP (dest, 0)
2351 = plus_constant (stack_pointer_rtx, total_pushed);
2352 }
2353 }
2354 #endif
2355
2356 /* Detect and delete no-op move instructions
2357 resulting from not allocating a parameter in a register. */
2358
2359 if (GET_CODE (body) == SET
2360 && (SET_DEST (body) == SET_SRC (body)
2361 || (GET_CODE (SET_DEST (body)) == MEM
2362 && GET_CODE (SET_SRC (body)) == MEM
2363 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2364 && ! (GET_CODE (SET_DEST (body)) == MEM
2365 && MEM_VOLATILE_P (SET_DEST (body)))
2366 && ! (GET_CODE (SET_SRC (body)) == MEM
2367 && MEM_VOLATILE_P (SET_SRC (body))))
2368 delete_computation (insn);
2369
2370 /* Detect and ignore no-op move instructions
2371 resulting from smart or fortuitous register allocation. */
2372
2373 else if (GET_CODE (body) == SET)
2374 {
2375 int sreg = true_regnum (SET_SRC (body));
2376 int dreg = true_regnum (SET_DEST (body));
2377
2378 if (sreg == dreg && sreg >= 0)
2379 delete_insn (insn);
2380 else if (sreg >= 0 && dreg >= 0)
2381 {
2382 rtx trial;
2383 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2384 sreg, NULL_PTR, dreg,
2385 GET_MODE (SET_SRC (body)));
2386
2387 if (tem != 0
2388 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2389 {
2390 /* DREG may have been the target of a REG_DEAD note in
2391 the insn which makes INSN redundant. If so, reorg
2392 would still think it is dead. So search for such a
2393 note and delete it if we find it. */
2394 if (! find_regno_note (insn, REG_UNUSED, dreg))
2395 for (trial = prev_nonnote_insn (insn);
2396 trial && GET_CODE (trial) != CODE_LABEL;
2397 trial = prev_nonnote_insn (trial))
2398 if (find_regno_note (trial, REG_DEAD, dreg))
2399 {
2400 remove_death (dreg, trial);
2401 break;
2402 }
2403
2404 /* Deleting insn could lose a death-note for SREG. */
2405 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2406 {
2407 /* Change this into a USE so that we won't emit
2408 code for it, but still can keep the note. */
2409 PATTERN (insn)
2410 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2411 INSN_CODE (insn) = -1;
2412 /* Remove all reg notes but the REG_DEAD one. */
2413 REG_NOTES (insn) = trial;
2414 XEXP (trial, 1) = NULL_RTX;
2415 }
2416 else
2417 delete_insn (insn);
2418 }
2419 }
2420 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2421 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2422 NULL_PTR, 0,
2423 GET_MODE (SET_DEST (body))))
2424 {
2425 /* This handles the case where we have two consecutive
2426 assignments of the same constant to pseudos that didn't
2427 get a hard reg. Each SET from the constant will be
2428 converted into a SET of the spill register and an
2429 output reload will be made following it. This produces
2430 two loads of the same constant into the same spill
2431 register. */
2432
2433 rtx in_insn = insn;
2434
2435 /* Look back for a death note for the first reg.
2436 If there is one, it is no longer accurate. */
2437 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2438 {
2439 if ((GET_CODE (in_insn) == INSN
2440 || GET_CODE (in_insn) == JUMP_INSN)
2441 && find_regno_note (in_insn, REG_DEAD, dreg))
2442 {
2443 remove_death (dreg, in_insn);
2444 break;
2445 }
2446 in_insn = PREV_INSN (in_insn);
2447 }
2448
2449 /* Delete the second load of the value. */
2450 delete_insn (insn);
2451 }
2452 }
2453 else if (GET_CODE (body) == PARALLEL)
2454 {
2455 /* If each part is a set between two identical registers or
2456 a USE or CLOBBER, delete the insn. */
2457 int i, sreg, dreg;
2458 rtx tem;
2459
2460 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2461 {
2462 tem = XVECEXP (body, 0, i);
2463 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2464 continue;
2465
2466 if (GET_CODE (tem) != SET
2467 || (sreg = true_regnum (SET_SRC (tem))) < 0
2468 || (dreg = true_regnum (SET_DEST (tem))) < 0
2469 || dreg != sreg)
2470 break;
2471 }
2472
2473 if (i < 0)
2474 delete_insn (insn);
2475 }
2476 /* Also delete insns to store bit fields if they are no-ops. */
2477 /* Not worth the hair to detect this in the big-endian case. */
2478 else if (! BYTES_BIG_ENDIAN
2479 && GET_CODE (body) == SET
2480 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2481 && XEXP (SET_DEST (body), 2) == const0_rtx
2482 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2483 && ! (GET_CODE (SET_SRC (body)) == MEM
2484 && MEM_VOLATILE_P (SET_SRC (body))))
2485 delete_insn (insn);
2486 }
2487 insn = next;
2488 }
2489 }
2490
2491 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2492 If so indicate that this function can drop off the end by returning
2493 1, else return 0.
2494
2495 CHECK_DELETED indicates whether we must check if the note being
2496 searched for has the deleted flag set.
2497
2498 DELETE_FINAL_NOTE indicates whether we should delete the note
2499 if we find it. */
2500
2501 static int
2502 calculate_can_reach_end (last, check_deleted, delete_final_note)
2503 rtx last;
2504 int check_deleted;
2505 int delete_final_note;
2506 {
2507 rtx insn = last;
2508 int n_labels = 1;
2509
2510 while (insn != NULL_RTX)
2511 {
2512 int ok = 0;
2513
2514 /* One label can follow the end-note: the return label. */
2515 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2516 ok = 1;
2517 /* Ordinary insns can follow it if returning a structure. */
2518 else if (GET_CODE (insn) == INSN)
2519 ok = 1;
2520 /* If machine uses explicit RETURN insns, no epilogue,
2521 then one of them follows the note. */
2522 else if (GET_CODE (insn) == JUMP_INSN
2523 && GET_CODE (PATTERN (insn)) == RETURN)
2524 ok = 1;
2525 /* A barrier can follow the return insn. */
2526 else if (GET_CODE (insn) == BARRIER)
2527 ok = 1;
2528 /* Other kinds of notes can follow also. */
2529 else if (GET_CODE (insn) == NOTE
2530 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2531 ok = 1;
2532
2533 if (ok != 1)
2534 break;
2535
2536 insn = PREV_INSN (insn);
2537 }
2538
2539 /* See if we backed up to the appropriate type of note. */
2540 if (insn != NULL_RTX
2541 && GET_CODE (insn) == NOTE
2542 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
2543 && (check_deleted == 0
2544 || ! INSN_DELETED_P (insn)))
2545 {
2546 if (delete_final_note)
2547 delete_insn (insn);
2548 return 1;
2549 }
2550
2551 return 0;
2552 }
2553
2554 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2555 jump. Assume that this unconditional jump is to the exit test code. If
2556 the code is sufficiently simple, make a copy of it before INSN,
2557 followed by a jump to the exit of the loop. Then delete the unconditional
2558 jump after INSN.
2559
2560 Return 1 if we made the change, else 0.
2561
2562 This is only safe immediately after a regscan pass because it uses the
2563 values of regno_first_uid and regno_last_uid. */
2564
2565 static int
2566 duplicate_loop_exit_test (loop_start)
2567 rtx loop_start;
2568 {
2569 rtx insn, set, reg, p, link;
2570 rtx copy = 0;
2571 int num_insns = 0;
2572 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2573 rtx lastexit;
2574 int max_reg = max_reg_num ();
2575 rtx *reg_map = 0;
2576
2577 /* Scan the exit code. We do not perform this optimization if any insn:
2578
2579 is a CALL_INSN
2580 is a CODE_LABEL
2581 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2582 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2583 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2584 is not valid.
2585
2586 We also do not do this if we find an insn with ASM_OPERANDS. While
2587 this restriction should not be necessary, copying an insn with
2588 ASM_OPERANDS can confuse asm_noperands in some cases.
2589
2590 Also, don't do this if the exit code is more than 20 insns. */
2591
2592 for (insn = exitcode;
2593 insn
2594 && ! (GET_CODE (insn) == NOTE
2595 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2596 insn = NEXT_INSN (insn))
2597 {
2598 switch (GET_CODE (insn))
2599 {
2600 case CODE_LABEL:
2601 case CALL_INSN:
2602 return 0;
2603 case NOTE:
2604 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2605 a jump immediately after the loop start that branches outside
2606 the loop but within an outer loop, near the exit test.
2607 If we copied this exit test and created a phony
2608 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2609 before the exit test look like these could be safely moved
2610 out of the loop even if they actually may be never executed.
2611 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2612
2613 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2614 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2615 return 0;
2616
2617 if (optimize < 2
2618 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2619 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2620 /* If we were to duplicate this code, we would not move
2621 the BLOCK notes, and so debugging the moved code would
2622 be difficult. Thus, we only move the code with -O2 or
2623 higher. */
2624 return 0;
2625
2626 break;
2627 case JUMP_INSN:
2628 case INSN:
2629 /* The code below would grossly mishandle REG_WAS_0 notes,
2630 so get rid of them here. */
2631 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2632 remove_note (insn, p);
2633 if (++num_insns > 20
2634 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2635 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
2636 || asm_noperands (PATTERN (insn)) > 0)
2637 return 0;
2638 break;
2639 default:
2640 break;
2641 }
2642 }
2643
2644 /* Unless INSN is zero, we can do the optimization. */
2645 if (insn == 0)
2646 return 0;
2647
2648 lastexit = insn;
2649
2650 /* See if any insn sets a register only used in the loop exit code and
2651 not a user variable. If so, replace it with a new register. */
2652 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2653 if (GET_CODE (insn) == INSN
2654 && (set = single_set (insn)) != 0
2655 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2656 || (GET_CODE (reg) == SUBREG
2657 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2658 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2659 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2660 {
2661 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2662 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2663 break;
2664
2665 if (p != lastexit)
2666 {
2667 /* We can do the replacement. Allocate reg_map if this is the
2668 first replacement we found. */
2669 if (reg_map == 0)
2670 {
2671 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2672 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2673 }
2674
2675 REG_LOOP_TEST_P (reg) = 1;
2676
2677 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2678 }
2679 }
2680
2681 /* Now copy each insn. */
2682 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2683 switch (GET_CODE (insn))
2684 {
2685 case BARRIER:
2686 copy = emit_barrier_before (loop_start);
2687 break;
2688 case NOTE:
2689 /* Only copy line-number notes. */
2690 if (NOTE_LINE_NUMBER (insn) >= 0)
2691 {
2692 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2693 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2694 }
2695 break;
2696
2697 case INSN:
2698 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2699 if (reg_map)
2700 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2701
2702 mark_jump_label (PATTERN (copy), copy, 0);
2703
2704 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2705 make them. */
2706 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2707 if (REG_NOTE_KIND (link) != REG_LABEL)
2708 REG_NOTES (copy)
2709 = copy_rtx (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2710 XEXP (link, 0),
2711 REG_NOTES (copy)));
2712 if (reg_map && REG_NOTES (copy))
2713 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2714 break;
2715
2716 case JUMP_INSN:
2717 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2718 if (reg_map)
2719 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2720 mark_jump_label (PATTERN (copy), copy, 0);
2721 if (REG_NOTES (insn))
2722 {
2723 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2724 if (reg_map)
2725 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2726 }
2727
2728 /* If this is a simple jump, add it to the jump chain. */
2729
2730 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2731 && simplejump_p (copy))
2732 {
2733 jump_chain[INSN_UID (copy)]
2734 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2735 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2736 }
2737 break;
2738
2739 default:
2740 abort ();
2741 }
2742
2743 /* Now clean up by emitting a jump to the end label and deleting the jump
2744 at the start of the loop. */
2745 if (! copy || GET_CODE (copy) != BARRIER)
2746 {
2747 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2748 loop_start);
2749 mark_jump_label (PATTERN (copy), copy, 0);
2750 if (INSN_UID (copy) < max_jump_chain
2751 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2752 {
2753 jump_chain[INSN_UID (copy)]
2754 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2755 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2756 }
2757 emit_barrier_before (loop_start);
2758 }
2759
2760 /* Mark the exit code as the virtual top of the converted loop. */
2761 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2762
2763 delete_insn (next_nonnote_insn (loop_start));
2764
2765 return 1;
2766 }
2767 \f
2768 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2769 loop-end notes between START and END out before START. Assume that
2770 END is not such a note. START may be such a note. Returns the value
2771 of the new starting insn, which may be different if the original start
2772 was such a note. */
2773
2774 rtx
2775 squeeze_notes (start, end)
2776 rtx start, end;
2777 {
2778 rtx insn;
2779 rtx next;
2780
2781 for (insn = start; insn != end; insn = next)
2782 {
2783 next = NEXT_INSN (insn);
2784 if (GET_CODE (insn) == NOTE
2785 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2786 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2787 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2788 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2789 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2790 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2791 {
2792 if (insn == start)
2793 start = next;
2794 else
2795 {
2796 rtx prev = PREV_INSN (insn);
2797 PREV_INSN (insn) = PREV_INSN (start);
2798 NEXT_INSN (insn) = start;
2799 NEXT_INSN (PREV_INSN (insn)) = insn;
2800 PREV_INSN (NEXT_INSN (insn)) = insn;
2801 NEXT_INSN (prev) = next;
2802 PREV_INSN (next) = prev;
2803 }
2804 }
2805 }
2806
2807 return start;
2808 }
2809 \f
2810 /* Compare the instructions before insn E1 with those before E2
2811 to find an opportunity for cross jumping.
2812 (This means detecting identical sequences of insns followed by
2813 jumps to the same place, or followed by a label and a jump
2814 to that label, and replacing one with a jump to the other.)
2815
2816 Assume E1 is a jump that jumps to label E2
2817 (that is not always true but it might as well be).
2818 Find the longest possible equivalent sequences
2819 and store the first insns of those sequences into *F1 and *F2.
2820 Store zero there if no equivalent preceding instructions are found.
2821
2822 We give up if we find a label in stream 1.
2823 Actually we could transfer that label into stream 2. */
2824
2825 static void
2826 find_cross_jump (e1, e2, minimum, f1, f2)
2827 rtx e1, e2;
2828 int minimum;
2829 rtx *f1, *f2;
2830 {
2831 register rtx i1 = e1, i2 = e2;
2832 register rtx p1, p2;
2833 int lose = 0;
2834
2835 rtx last1 = 0, last2 = 0;
2836 rtx afterlast1 = 0, afterlast2 = 0;
2837
2838 *f1 = 0;
2839 *f2 = 0;
2840
2841 while (1)
2842 {
2843 i1 = prev_nonnote_insn (i1);
2844
2845 i2 = PREV_INSN (i2);
2846 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2847 i2 = PREV_INSN (i2);
2848
2849 if (i1 == 0)
2850 break;
2851
2852 /* Don't allow the range of insns preceding E1 or E2
2853 to include the other (E2 or E1). */
2854 if (i2 == e1 || i1 == e2)
2855 break;
2856
2857 /* If we will get to this code by jumping, those jumps will be
2858 tensioned to go directly to the new label (before I2),
2859 so this cross-jumping won't cost extra. So reduce the minimum. */
2860 if (GET_CODE (i1) == CODE_LABEL)
2861 {
2862 --minimum;
2863 break;
2864 }
2865
2866 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2867 break;
2868
2869 /* Avoid moving insns across EH regions if either of the insns
2870 can throw. */
2871 if (flag_exceptions
2872 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2873 && !in_same_eh_region (i1, i2))
2874 break;
2875
2876 p1 = PATTERN (i1);
2877 p2 = PATTERN (i2);
2878
2879 /* If this is a CALL_INSN, compare register usage information.
2880 If we don't check this on stack register machines, the two
2881 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2882 numbers of stack registers in the same basic block.
2883 If we don't check this on machines with delay slots, a delay slot may
2884 be filled that clobbers a parameter expected by the subroutine.
2885
2886 ??? We take the simple route for now and assume that if they're
2887 equal, they were constructed identically. */
2888
2889 if (GET_CODE (i1) == CALL_INSN
2890 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2891 CALL_INSN_FUNCTION_USAGE (i2)))
2892 lose = 1;
2893
2894 #ifdef STACK_REGS
2895 /* If cross_jump_death_matters is not 0, the insn's mode
2896 indicates whether or not the insn contains any stack-like
2897 regs. */
2898
2899 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
2900 {
2901 /* If register stack conversion has already been done, then
2902 death notes must also be compared before it is certain that
2903 the two instruction streams match. */
2904
2905 rtx note;
2906 HARD_REG_SET i1_regset, i2_regset;
2907
2908 CLEAR_HARD_REG_SET (i1_regset);
2909 CLEAR_HARD_REG_SET (i2_regset);
2910
2911 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2912 if (REG_NOTE_KIND (note) == REG_DEAD
2913 && STACK_REG_P (XEXP (note, 0)))
2914 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2915
2916 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2917 if (REG_NOTE_KIND (note) == REG_DEAD
2918 && STACK_REG_P (XEXP (note, 0)))
2919 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2920
2921 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2922
2923 lose = 1;
2924
2925 done:
2926 ;
2927 }
2928 #endif
2929
2930 /* Don't allow old-style asm or volatile extended asms to be accepted
2931 for cross jumping purposes. It is conceptually correct to allow
2932 them, since cross-jumping preserves the dynamic instruction order
2933 even though it is changing the static instruction order. However,
2934 if an asm is being used to emit an assembler pseudo-op, such as
2935 the MIPS `.set reorder' pseudo-op, then the static instruction order
2936 matters and it must be preserved. */
2937 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2938 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2939 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2940 lose = 1;
2941
2942 if (lose || GET_CODE (p1) != GET_CODE (p2)
2943 || ! rtx_renumbered_equal_p (p1, p2))
2944 {
2945 /* The following code helps take care of G++ cleanups. */
2946 rtx equiv1;
2947 rtx equiv2;
2948
2949 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2950 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2951 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2952 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2953 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2954 /* If the equivalences are not to a constant, they may
2955 reference pseudos that no longer exist, so we can't
2956 use them. */
2957 && CONSTANT_P (XEXP (equiv1, 0))
2958 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2959 {
2960 rtx s1 = single_set (i1);
2961 rtx s2 = single_set (i2);
2962 if (s1 != 0 && s2 != 0
2963 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2964 {
2965 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2966 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2967 if (! rtx_renumbered_equal_p (p1, p2))
2968 cancel_changes (0);
2969 else if (apply_change_group ())
2970 goto win;
2971 }
2972 }
2973
2974 /* Insns fail to match; cross jumping is limited to the following
2975 insns. */
2976
2977 #ifdef HAVE_cc0
2978 /* Don't allow the insn after a compare to be shared by
2979 cross-jumping unless the compare is also shared.
2980 Here, if either of these non-matching insns is a compare,
2981 exclude the following insn from possible cross-jumping. */
2982 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2983 last1 = afterlast1, last2 = afterlast2, ++minimum;
2984 #endif
2985
2986 /* If cross-jumping here will feed a jump-around-jump
2987 optimization, this jump won't cost extra, so reduce
2988 the minimum. */
2989 if (GET_CODE (i1) == JUMP_INSN
2990 && JUMP_LABEL (i1)
2991 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2992 --minimum;
2993 break;
2994 }
2995
2996 win:
2997 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2998 {
2999 /* Ok, this insn is potentially includable in a cross-jump here. */
3000 afterlast1 = last1, afterlast2 = last2;
3001 last1 = i1, last2 = i2, --minimum;
3002 }
3003 }
3004
3005 if (minimum <= 0 && last1 != 0 && last1 != e1)
3006 *f1 = last1, *f2 = last2;
3007 }
3008
3009 static void
3010 do_cross_jump (insn, newjpos, newlpos)
3011 rtx insn, newjpos, newlpos;
3012 {
3013 /* Find an existing label at this point
3014 or make a new one if there is none. */
3015 register rtx label = get_label_before (newlpos);
3016
3017 /* Make the same jump insn jump to the new point. */
3018 if (GET_CODE (PATTERN (insn)) == RETURN)
3019 {
3020 /* Remove from jump chain of returns. */
3021 delete_from_jump_chain (insn);
3022 /* Change the insn. */
3023 PATTERN (insn) = gen_jump (label);
3024 INSN_CODE (insn) = -1;
3025 JUMP_LABEL (insn) = label;
3026 LABEL_NUSES (label)++;
3027 /* Add to new the jump chain. */
3028 if (INSN_UID (label) < max_jump_chain
3029 && INSN_UID (insn) < max_jump_chain)
3030 {
3031 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3032 jump_chain[INSN_UID (label)] = insn;
3033 }
3034 }
3035 else
3036 redirect_jump (insn, label);
3037
3038 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3039 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3040 the NEWJPOS stream. */
3041
3042 while (newjpos != insn)
3043 {
3044 rtx lnote;
3045
3046 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3047 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3048 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3049 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3050 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3051 remove_note (newlpos, lnote);
3052
3053 delete_insn (newjpos);
3054 newjpos = next_real_insn (newjpos);
3055 newlpos = next_real_insn (newlpos);
3056 }
3057 }
3058 \f
3059 /* Return the label before INSN, or put a new label there. */
3060
3061 rtx
3062 get_label_before (insn)
3063 rtx insn;
3064 {
3065 rtx label;
3066
3067 /* Find an existing label at this point
3068 or make a new one if there is none. */
3069 label = prev_nonnote_insn (insn);
3070
3071 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3072 {
3073 rtx prev = PREV_INSN (insn);
3074
3075 label = gen_label_rtx ();
3076 emit_label_after (label, prev);
3077 LABEL_NUSES (label) = 0;
3078 }
3079 return label;
3080 }
3081
3082 /* Return the label after INSN, or put a new label there. */
3083
3084 rtx
3085 get_label_after (insn)
3086 rtx insn;
3087 {
3088 rtx label;
3089
3090 /* Find an existing label at this point
3091 or make a new one if there is none. */
3092 label = next_nonnote_insn (insn);
3093
3094 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3095 {
3096 label = gen_label_rtx ();
3097 emit_label_after (label, insn);
3098 LABEL_NUSES (label) = 0;
3099 }
3100 return label;
3101 }
3102 \f
3103 /* Return 1 if INSN is a jump that jumps to right after TARGET
3104 only on the condition that TARGET itself would drop through.
3105 Assumes that TARGET is a conditional jump. */
3106
3107 static int
3108 jump_back_p (insn, target)
3109 rtx insn, target;
3110 {
3111 rtx cinsn, ctarget;
3112 enum rtx_code codei, codet;
3113
3114 if (simplejump_p (insn) || ! condjump_p (insn)
3115 || simplejump_p (target)
3116 || target != prev_real_insn (JUMP_LABEL (insn)))
3117 return 0;
3118
3119 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3120 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3121
3122 codei = GET_CODE (cinsn);
3123 codet = GET_CODE (ctarget);
3124
3125 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3126 {
3127 if (! can_reverse_comparison_p (cinsn, insn))
3128 return 0;
3129 codei = reverse_condition (codei);
3130 }
3131
3132 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3133 {
3134 if (! can_reverse_comparison_p (ctarget, target))
3135 return 0;
3136 codet = reverse_condition (codet);
3137 }
3138
3139 return (codei == codet
3140 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3141 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3142 }
3143 \f
3144 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3145 return non-zero if it is safe to reverse this comparison. It is if our
3146 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3147 this is known to be an integer comparison. */
3148
3149 int
3150 can_reverse_comparison_p (comparison, insn)
3151 rtx comparison;
3152 rtx insn;
3153 {
3154 rtx arg0;
3155
3156 /* If this is not actually a comparison, we can't reverse it. */
3157 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3158 return 0;
3159
3160 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3161 /* If this is an NE comparison, it is safe to reverse it to an EQ
3162 comparison and vice versa, even for floating point. If no operands
3163 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3164 always false and NE is always true, so the reversal is also valid. */
3165 || flag_fast_math
3166 || GET_CODE (comparison) == NE
3167 || GET_CODE (comparison) == EQ)
3168 return 1;
3169
3170 arg0 = XEXP (comparison, 0);
3171
3172 /* Make sure ARG0 is one of the actual objects being compared. If we
3173 can't do this, we can't be sure the comparison can be reversed.
3174
3175 Handle cc0 and a MODE_CC register. */
3176 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3177 #ifdef HAVE_cc0
3178 || arg0 == cc0_rtx
3179 #endif
3180 )
3181 {
3182 rtx prev = prev_nonnote_insn (insn);
3183 rtx set;
3184
3185 /* If the comparison itself was a loop invariant, it could have been
3186 hoisted out of the loop. If we proceed to unroll such a loop, then
3187 we may not be able to find the comparison when copying the loop.
3188
3189 Returning zero in that case is the safe thing to do. */
3190 if (prev == 0)
3191 return 0;
3192
3193 set = single_set (prev);
3194 if (set == 0 || SET_DEST (set) != arg0)
3195 return 0;
3196
3197 arg0 = SET_SRC (set);
3198
3199 if (GET_CODE (arg0) == COMPARE)
3200 arg0 = XEXP (arg0, 0);
3201 }
3202
3203 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3204 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3205 return (GET_CODE (arg0) == CONST_INT
3206 || (GET_MODE (arg0) != VOIDmode
3207 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3208 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3209 }
3210
3211 /* Given an rtx-code for a comparison, return the code
3212 for the negated comparison.
3213 WATCH OUT! reverse_condition is not safe to use on a jump
3214 that might be acting on the results of an IEEE floating point comparison,
3215 because of the special treatment of non-signaling nans in comparisons.
3216 Use can_reverse_comparison_p to be sure. */
3217
3218 enum rtx_code
3219 reverse_condition (code)
3220 enum rtx_code code;
3221 {
3222 switch (code)
3223 {
3224 case EQ:
3225 return NE;
3226
3227 case NE:
3228 return EQ;
3229
3230 case GT:
3231 return LE;
3232
3233 case GE:
3234 return LT;
3235
3236 case LT:
3237 return GE;
3238
3239 case LE:
3240 return GT;
3241
3242 case GTU:
3243 return LEU;
3244
3245 case GEU:
3246 return LTU;
3247
3248 case LTU:
3249 return GEU;
3250
3251 case LEU:
3252 return GTU;
3253
3254 default:
3255 abort ();
3256 return UNKNOWN;
3257 }
3258 }
3259
3260 /* Similar, but return the code when two operands of a comparison are swapped.
3261 This IS safe for IEEE floating-point. */
3262
3263 enum rtx_code
3264 swap_condition (code)
3265 enum rtx_code code;
3266 {
3267 switch (code)
3268 {
3269 case EQ:
3270 case NE:
3271 return code;
3272
3273 case GT:
3274 return LT;
3275
3276 case GE:
3277 return LE;
3278
3279 case LT:
3280 return GT;
3281
3282 case LE:
3283 return GE;
3284
3285 case GTU:
3286 return LTU;
3287
3288 case GEU:
3289 return LEU;
3290
3291 case LTU:
3292 return GTU;
3293
3294 case LEU:
3295 return GEU;
3296
3297 default:
3298 abort ();
3299 return UNKNOWN;
3300 }
3301 }
3302
3303 /* Given a comparison CODE, return the corresponding unsigned comparison.
3304 If CODE is an equality comparison or already an unsigned comparison,
3305 CODE is returned. */
3306
3307 enum rtx_code
3308 unsigned_condition (code)
3309 enum rtx_code code;
3310 {
3311 switch (code)
3312 {
3313 case EQ:
3314 case NE:
3315 case GTU:
3316 case GEU:
3317 case LTU:
3318 case LEU:
3319 return code;
3320
3321 case GT:
3322 return GTU;
3323
3324 case GE:
3325 return GEU;
3326
3327 case LT:
3328 return LTU;
3329
3330 case LE:
3331 return LEU;
3332
3333 default:
3334 abort ();
3335 }
3336 }
3337
3338 /* Similarly, return the signed version of a comparison. */
3339
3340 enum rtx_code
3341 signed_condition (code)
3342 enum rtx_code code;
3343 {
3344 switch (code)
3345 {
3346 case EQ:
3347 case NE:
3348 case GT:
3349 case GE:
3350 case LT:
3351 case LE:
3352 return code;
3353
3354 case GTU:
3355 return GT;
3356
3357 case GEU:
3358 return GE;
3359
3360 case LTU:
3361 return LT;
3362
3363 case LEU:
3364 return LE;
3365
3366 default:
3367 abort ();
3368 }
3369 }
3370 \f
3371 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3372 truth of CODE1 implies the truth of CODE2. */
3373
3374 int
3375 comparison_dominates_p (code1, code2)
3376 enum rtx_code code1, code2;
3377 {
3378 if (code1 == code2)
3379 return 1;
3380
3381 switch (code1)
3382 {
3383 case EQ:
3384 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3385 return 1;
3386 break;
3387
3388 case LT:
3389 if (code2 == LE || code2 == NE)
3390 return 1;
3391 break;
3392
3393 case GT:
3394 if (code2 == GE || code2 == NE)
3395 return 1;
3396 break;
3397
3398 case LTU:
3399 if (code2 == LEU || code2 == NE)
3400 return 1;
3401 break;
3402
3403 case GTU:
3404 if (code2 == GEU || code2 == NE)
3405 return 1;
3406 break;
3407
3408 default:
3409 break;
3410 }
3411
3412 return 0;
3413 }
3414 \f
3415 /* Return 1 if INSN is an unconditional jump and nothing else. */
3416
3417 int
3418 simplejump_p (insn)
3419 rtx insn;
3420 {
3421 return (GET_CODE (insn) == JUMP_INSN
3422 && GET_CODE (PATTERN (insn)) == SET
3423 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3424 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3425 }
3426
3427 /* Return nonzero if INSN is a (possibly) conditional jump
3428 and nothing more. */
3429
3430 int
3431 condjump_p (insn)
3432 rtx insn;
3433 {
3434 register rtx x = PATTERN (insn);
3435 if (GET_CODE (x) != SET)
3436 return 0;
3437 if (GET_CODE (SET_DEST (x)) != PC)
3438 return 0;
3439 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3440 return 1;
3441 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3442 return 0;
3443 if (XEXP (SET_SRC (x), 2) == pc_rtx
3444 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3445 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3446 return 1;
3447 if (XEXP (SET_SRC (x), 1) == pc_rtx
3448 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3449 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3450 return 1;
3451 return 0;
3452 }
3453
3454 /* Return nonzero if INSN is a (possibly) conditional jump
3455 and nothing more. */
3456
3457 int
3458 condjump_in_parallel_p (insn)
3459 rtx insn;
3460 {
3461 register rtx x = PATTERN (insn);
3462
3463 if (GET_CODE (x) != PARALLEL)
3464 return 0;
3465 else
3466 x = XVECEXP (x, 0, 0);
3467
3468 if (GET_CODE (x) != SET)
3469 return 0;
3470 if (GET_CODE (SET_DEST (x)) != PC)
3471 return 0;
3472 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3473 return 1;
3474 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3475 return 0;
3476 if (XEXP (SET_SRC (x), 2) == pc_rtx
3477 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3478 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3479 return 1;
3480 if (XEXP (SET_SRC (x), 1) == pc_rtx
3481 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3482 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3483 return 1;
3484 return 0;
3485 }
3486
3487 /* Return the label of a conditional jump. */
3488
3489 rtx
3490 condjump_label (insn)
3491 rtx insn;
3492 {
3493 register rtx x = PATTERN (insn);
3494
3495 if (GET_CODE (x) == PARALLEL)
3496 x = XVECEXP (x, 0, 0);
3497 if (GET_CODE (x) != SET)
3498 return NULL_RTX;
3499 if (GET_CODE (SET_DEST (x)) != PC)
3500 return NULL_RTX;
3501 x = SET_SRC (x);
3502 if (GET_CODE (x) == LABEL_REF)
3503 return x;
3504 if (GET_CODE (x) != IF_THEN_ELSE)
3505 return NULL_RTX;
3506 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3507 return XEXP (x, 1);
3508 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3509 return XEXP (x, 2);
3510 return NULL_RTX;
3511 }
3512
3513 /* Return true if INSN is a (possibly conditional) return insn. */
3514
3515 static int
3516 returnjump_p_1 (loc, data)
3517 rtx *loc;
3518 void *data ATTRIBUTE_UNUSED;
3519 {
3520 rtx x = *loc;
3521 return GET_CODE (x) == RETURN;
3522 }
3523
3524 int
3525 returnjump_p (insn)
3526 rtx insn;
3527 {
3528 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3529 }
3530
3531 /* Return true if INSN is a jump that only transfers control and
3532 nothing more. */
3533
3534 int
3535 onlyjump_p (insn)
3536 rtx insn;
3537 {
3538 rtx set;
3539
3540 if (GET_CODE (insn) != JUMP_INSN)
3541 return 0;
3542
3543 set = single_set (insn);
3544 if (set == NULL)
3545 return 0;
3546 if (GET_CODE (SET_DEST (set)) != PC)
3547 return 0;
3548 if (side_effects_p (SET_SRC (set)))
3549 return 0;
3550
3551 return 1;
3552 }
3553
3554 #ifdef HAVE_cc0
3555
3556 /* Return 1 if X is an RTX that does nothing but set the condition codes
3557 and CLOBBER or USE registers.
3558 Return -1 if X does explicitly set the condition codes,
3559 but also does other things. */
3560
3561 int
3562 sets_cc0_p (x)
3563 rtx x ATTRIBUTE_UNUSED;
3564 {
3565 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3566 return 1;
3567 if (GET_CODE (x) == PARALLEL)
3568 {
3569 int i;
3570 int sets_cc0 = 0;
3571 int other_things = 0;
3572 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3573 {
3574 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3575 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3576 sets_cc0 = 1;
3577 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3578 other_things = 1;
3579 }
3580 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3581 }
3582 return 0;
3583 }
3584 #endif
3585 \f
3586 /* Follow any unconditional jump at LABEL;
3587 return the ultimate label reached by any such chain of jumps.
3588 If LABEL is not followed by a jump, return LABEL.
3589 If the chain loops or we can't find end, return LABEL,
3590 since that tells caller to avoid changing the insn.
3591
3592 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3593 a USE or CLOBBER. */
3594
3595 rtx
3596 follow_jumps (label)
3597 rtx label;
3598 {
3599 register rtx insn;
3600 register rtx next;
3601 register rtx value = label;
3602 register int depth;
3603
3604 for (depth = 0;
3605 (depth < 10
3606 && (insn = next_active_insn (value)) != 0
3607 && GET_CODE (insn) == JUMP_INSN
3608 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3609 || GET_CODE (PATTERN (insn)) == RETURN)
3610 && (next = NEXT_INSN (insn))
3611 && GET_CODE (next) == BARRIER);
3612 depth++)
3613 {
3614 /* Don't chain through the insn that jumps into a loop
3615 from outside the loop,
3616 since that would create multiple loop entry jumps
3617 and prevent loop optimization. */
3618 rtx tem;
3619 if (!reload_completed)
3620 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3621 if (GET_CODE (tem) == NOTE
3622 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3623 /* ??? Optional. Disables some optimizations, but makes
3624 gcov output more accurate with -O. */
3625 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3626 return value;
3627
3628 /* If we have found a cycle, make the insn jump to itself. */
3629 if (JUMP_LABEL (insn) == label)
3630 return label;
3631
3632 tem = next_active_insn (JUMP_LABEL (insn));
3633 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3634 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3635 break;
3636
3637 value = JUMP_LABEL (insn);
3638 }
3639 if (depth == 10)
3640 return label;
3641 return value;
3642 }
3643
3644 /* Assuming that field IDX of X is a vector of label_refs,
3645 replace each of them by the ultimate label reached by it.
3646 Return nonzero if a change is made.
3647 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3648
3649 static int
3650 tension_vector_labels (x, idx)
3651 register rtx x;
3652 register int idx;
3653 {
3654 int changed = 0;
3655 register int i;
3656 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3657 {
3658 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3659 register rtx nlabel = follow_jumps (olabel);
3660 if (nlabel && nlabel != olabel)
3661 {
3662 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3663 ++LABEL_NUSES (nlabel);
3664 if (--LABEL_NUSES (olabel) == 0)
3665 delete_insn (olabel);
3666 changed = 1;
3667 }
3668 }
3669 return changed;
3670 }
3671 \f
3672 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3673 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3674 in INSN, then store one of them in JUMP_LABEL (INSN).
3675 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3676 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3677 Also, when there are consecutive labels, canonicalize on the last of them.
3678
3679 Note that two labels separated by a loop-beginning note
3680 must be kept distinct if we have not yet done loop-optimization,
3681 because the gap between them is where loop-optimize
3682 will want to move invariant code to. CROSS_JUMP tells us
3683 that loop-optimization is done with.
3684
3685 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3686 two labels distinct if they are separated by only USE or CLOBBER insns. */
3687
3688 static void
3689 mark_jump_label (x, insn, cross_jump)
3690 register rtx x;
3691 rtx insn;
3692 int cross_jump;
3693 {
3694 register RTX_CODE code = GET_CODE (x);
3695 register int i;
3696 register char *fmt;
3697
3698 switch (code)
3699 {
3700 case PC:
3701 case CC0:
3702 case REG:
3703 case SUBREG:
3704 case CONST_INT:
3705 case SYMBOL_REF:
3706 case CONST_DOUBLE:
3707 case CLOBBER:
3708 case CALL:
3709 return;
3710
3711 case MEM:
3712 /* If this is a constant-pool reference, see if it is a label. */
3713 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3714 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3715 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3716 break;
3717
3718 case LABEL_REF:
3719 {
3720 rtx label = XEXP (x, 0);
3721 rtx olabel = label;
3722 rtx note;
3723 rtx next;
3724
3725 if (GET_CODE (label) != CODE_LABEL)
3726 abort ();
3727
3728 /* Ignore references to labels of containing functions. */
3729 if (LABEL_REF_NONLOCAL_P (x))
3730 break;
3731
3732 /* If there are other labels following this one,
3733 replace it with the last of the consecutive labels. */
3734 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3735 {
3736 if (GET_CODE (next) == CODE_LABEL)
3737 label = next;
3738 else if (cross_jump && GET_CODE (next) == INSN
3739 && (GET_CODE (PATTERN (next)) == USE
3740 || GET_CODE (PATTERN (next)) == CLOBBER))
3741 continue;
3742 else if (GET_CODE (next) != NOTE)
3743 break;
3744 else if (! cross_jump
3745 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3746 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3747 /* ??? Optional. Disables some optimizations, but
3748 makes gcov output more accurate with -O. */
3749 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3750 break;
3751 }
3752
3753 XEXP (x, 0) = label;
3754 if (! insn || ! INSN_DELETED_P (insn))
3755 ++LABEL_NUSES (label);
3756
3757 if (insn)
3758 {
3759 if (GET_CODE (insn) == JUMP_INSN)
3760 JUMP_LABEL (insn) = label;
3761
3762 /* If we've changed OLABEL and we had a REG_LABEL note
3763 for it, update it as well. */
3764 else if (label != olabel
3765 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3766 XEXP (note, 0) = label;
3767
3768 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3769 is one. */
3770 else if (! find_reg_note (insn, REG_LABEL, label))
3771 {
3772 /* This code used to ignore labels which refered to dispatch
3773 tables to avoid flow.c generating worse code.
3774
3775 However, in the presense of global optimizations like
3776 gcse which call find_basic_blocks without calling
3777 life_analysis, not recording such labels will lead
3778 to compiler aborts because of inconsistencies in the
3779 flow graph. So we go ahead and record the label.
3780
3781 It may also be the case that the optimization argument
3782 is no longer valid because of the more accurate cfg
3783 we build in find_basic_blocks -- it no longer pessimizes
3784 code when it finds a REG_LABEL note. */
3785 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3786 REG_NOTES (insn));
3787 }
3788 }
3789 return;
3790 }
3791
3792 /* Do walk the labels in a vector, but not the first operand of an
3793 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3794 case ADDR_VEC:
3795 case ADDR_DIFF_VEC:
3796 if (! INSN_DELETED_P (insn))
3797 {
3798 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3799
3800 for (i = 0; i < XVECLEN (x, eltnum); i++)
3801 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3802 }
3803 return;
3804
3805 default:
3806 break;
3807 }
3808
3809 fmt = GET_RTX_FORMAT (code);
3810 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3811 {
3812 if (fmt[i] == 'e')
3813 mark_jump_label (XEXP (x, i), insn, cross_jump);
3814 else if (fmt[i] == 'E')
3815 {
3816 register int j;
3817 for (j = 0; j < XVECLEN (x, i); j++)
3818 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3819 }
3820 }
3821 }
3822
3823 /* If all INSN does is set the pc, delete it,
3824 and delete the insn that set the condition codes for it
3825 if that's what the previous thing was. */
3826
3827 void
3828 delete_jump (insn)
3829 rtx insn;
3830 {
3831 register rtx set = single_set (insn);
3832
3833 if (set && GET_CODE (SET_DEST (set)) == PC)
3834 delete_computation (insn);
3835 }
3836
3837 /* Recursively delete prior insns that compute the value (used only by INSN
3838 which the caller is deleting) stored in the register mentioned by NOTE
3839 which is a REG_DEAD note associated with INSN. */
3840
3841 static void
3842 delete_prior_computation (note, insn)
3843 rtx note;
3844 rtx insn;
3845 {
3846 rtx our_prev;
3847 rtx reg = XEXP (note, 0);
3848
3849 for (our_prev = prev_nonnote_insn (insn);
3850 our_prev && GET_CODE (our_prev) == INSN;
3851 our_prev = prev_nonnote_insn (our_prev))
3852 {
3853 rtx pat = PATTERN (our_prev);
3854
3855 /* If we reach a SEQUENCE, it is too complex to try to
3856 do anything with it, so give up. */
3857 if (GET_CODE (pat) == SEQUENCE)
3858 break;
3859
3860 if (GET_CODE (pat) == USE
3861 && GET_CODE (XEXP (pat, 0)) == INSN)
3862 /* reorg creates USEs that look like this. We leave them
3863 alone because reorg needs them for its own purposes. */
3864 break;
3865
3866 if (reg_set_p (reg, pat))
3867 {
3868 if (side_effects_p (pat))
3869 break;
3870
3871 if (GET_CODE (pat) == PARALLEL)
3872 {
3873 /* If we find a SET of something else, we can't
3874 delete the insn. */
3875
3876 int i;
3877
3878 for (i = 0; i < XVECLEN (pat, 0); i++)
3879 {
3880 rtx part = XVECEXP (pat, 0, i);
3881
3882 if (GET_CODE (part) == SET
3883 && SET_DEST (part) != reg)
3884 break;
3885 }
3886
3887 if (i == XVECLEN (pat, 0))
3888 delete_computation (our_prev);
3889 }
3890 else if (GET_CODE (pat) == SET
3891 && GET_CODE (SET_DEST (pat)) == REG)
3892 {
3893 int dest_regno = REGNO (SET_DEST (pat));
3894 int dest_endregno
3895 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
3896 ? HARD_REGNO_NREGS (dest_regno,
3897 GET_MODE (SET_DEST (pat))) : 1);
3898 int regno = REGNO (reg);
3899 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
3900 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
3901
3902 if (dest_regno >= regno
3903 && dest_endregno <= endregno)
3904 delete_computation (our_prev);
3905
3906 /* We may have a multi-word hard register and some, but not
3907 all, of the words of the register are needed in subsequent
3908 insns. Write REG_UNUSED notes for those parts that were not
3909 needed. */
3910 else if (dest_regno <= regno
3911 && dest_endregno >= endregno
3912 && ! find_regno_note (our_prev, REG_UNUSED, REGNO(reg)))
3913 {
3914 int i;
3915
3916 REG_NOTES (our_prev)
3917 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
3918
3919 for (i = dest_regno; i < dest_endregno; i++)
3920 if (! find_regno_note (our_prev, REG_UNUSED, i))
3921 break;
3922
3923 if (i == dest_endregno)
3924 delete_computation (our_prev);
3925 }
3926 }
3927
3928 break;
3929 }
3930
3931 /* If PAT references the register that dies here, it is an
3932 additional use. Hence any prior SET isn't dead. However, this
3933 insn becomes the new place for the REG_DEAD note. */
3934 if (reg_overlap_mentioned_p (reg, pat))
3935 {
3936 XEXP (note, 1) = REG_NOTES (our_prev);
3937 REG_NOTES (our_prev) = note;
3938 break;
3939 }
3940 }
3941 }
3942
3943 /* Delete INSN and recursively delete insns that compute values used only
3944 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3945 If we are running before flow.c, we need do nothing since flow.c will
3946 delete dead code. We also can't know if the registers being used are
3947 dead or not at this point.
3948
3949 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3950 nothing other than set a register that dies in this insn, we can delete
3951 that insn as well.
3952
3953 On machines with CC0, if CC0 is used in this insn, we may be able to
3954 delete the insn that set it. */
3955
3956 static void
3957 delete_computation (insn)
3958 rtx insn;
3959 {
3960 rtx note, next;
3961 rtx set;
3962
3963 #ifdef HAVE_cc0
3964 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3965 {
3966 rtx prev = prev_nonnote_insn (insn);
3967 /* We assume that at this stage
3968 CC's are always set explicitly
3969 and always immediately before the jump that
3970 will use them. So if the previous insn
3971 exists to set the CC's, delete it
3972 (unless it performs auto-increments, etc.). */
3973 if (prev && GET_CODE (prev) == INSN
3974 && sets_cc0_p (PATTERN (prev)))
3975 {
3976 if (sets_cc0_p (PATTERN (prev)) > 0
3977 && ! side_effects_p (PATTERN (prev)))
3978 delete_computation (prev);
3979 else
3980 /* Otherwise, show that cc0 won't be used. */
3981 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
3982 cc0_rtx, REG_NOTES (prev));
3983 }
3984 }
3985 #endif
3986
3987 #ifdef INSN_SCHEDULING
3988 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
3989 reload has completed. The schedulers need to be fixed. Until
3990 they are, we must not rely on the death notes here. */
3991 if (reload_completed && flag_schedule_insns_after_reload)
3992 {
3993 delete_insn (insn);
3994 return;
3995 }
3996 #endif
3997
3998 set = single_set (insn);
3999
4000 for (note = REG_NOTES (insn); note; note = next)
4001 {
4002 next = XEXP (note, 1);
4003
4004 if (REG_NOTE_KIND (note) != REG_DEAD
4005 /* Verify that the REG_NOTE is legitimate. */
4006 || GET_CODE (XEXP (note, 0)) != REG)
4007 continue;
4008
4009 if (set && reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
4010 set = NULL_RTX;
4011
4012 delete_prior_computation (note, insn);
4013 }
4014
4015 /* The REG_DEAD note may have been omitted for a register
4016 which is both set and used by the insn. */
4017 if (set
4018 && GET_CODE (SET_DEST (set)) == REG
4019 && reg_mentioned_p (SET_DEST (set), SET_SRC (set)))
4020 {
4021 note = gen_rtx_EXPR_LIST (REG_DEAD, SET_DEST (set), NULL_RTX);
4022 delete_prior_computation (note, insn);
4023 }
4024
4025 delete_insn (insn);
4026 }
4027 \f
4028 /* Delete insn INSN from the chain of insns and update label ref counts.
4029 May delete some following insns as a consequence; may even delete
4030 a label elsewhere and insns that follow it.
4031
4032 Returns the first insn after INSN that was not deleted. */
4033
4034 rtx
4035 delete_insn (insn)
4036 register rtx insn;
4037 {
4038 register rtx next = NEXT_INSN (insn);
4039 register rtx prev = PREV_INSN (insn);
4040 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4041 register int dont_really_delete = 0;
4042
4043 while (next && INSN_DELETED_P (next))
4044 next = NEXT_INSN (next);
4045
4046 /* This insn is already deleted => return first following nondeleted. */
4047 if (INSN_DELETED_P (insn))
4048 return next;
4049
4050 if (was_code_label)
4051 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4052
4053 /* Don't delete user-declared labels. Convert them to special NOTEs
4054 instead. */
4055 if (was_code_label && LABEL_NAME (insn) != 0
4056 && optimize && ! dont_really_delete)
4057 {
4058 PUT_CODE (insn, NOTE);
4059 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4060 NOTE_SOURCE_FILE (insn) = 0;
4061 dont_really_delete = 1;
4062 }
4063 else
4064 /* Mark this insn as deleted. */
4065 INSN_DELETED_P (insn) = 1;
4066
4067 /* If this is an unconditional jump, delete it from the jump chain. */
4068 if (simplejump_p (insn))
4069 delete_from_jump_chain (insn);
4070
4071 /* If instruction is followed by a barrier,
4072 delete the barrier too. */
4073
4074 if (next != 0 && GET_CODE (next) == BARRIER)
4075 {
4076 INSN_DELETED_P (next) = 1;
4077 next = NEXT_INSN (next);
4078 }
4079
4080 /* Patch out INSN (and the barrier if any) */
4081
4082 if (optimize && ! dont_really_delete)
4083 {
4084 if (prev)
4085 {
4086 NEXT_INSN (prev) = next;
4087 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4088 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4089 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4090 }
4091
4092 if (next)
4093 {
4094 PREV_INSN (next) = prev;
4095 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4096 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4097 }
4098
4099 if (prev && NEXT_INSN (prev) == 0)
4100 set_last_insn (prev);
4101 }
4102
4103 /* If deleting a jump, decrement the count of the label,
4104 and delete the label if it is now unused. */
4105
4106 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4107 {
4108 rtx lab = JUMP_LABEL (insn), lab_next;
4109
4110 if (--LABEL_NUSES (lab) == 0)
4111 {
4112 /* This can delete NEXT or PREV,
4113 either directly if NEXT is JUMP_LABEL (INSN),
4114 or indirectly through more levels of jumps. */
4115 delete_insn (lab);
4116
4117 /* I feel a little doubtful about this loop,
4118 but I see no clean and sure alternative way
4119 to find the first insn after INSN that is not now deleted.
4120 I hope this works. */
4121 while (next && INSN_DELETED_P (next))
4122 next = NEXT_INSN (next);
4123 return next;
4124 }
4125 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4126 && GET_CODE (lab_next) == JUMP_INSN
4127 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4128 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4129 {
4130 /* If we're deleting the tablejump, delete the dispatch table.
4131 We may not be able to kill the label immediately preceeding
4132 just yet, as it might be referenced in code leading up to
4133 the tablejump. */
4134 delete_insn (lab_next);
4135 }
4136 }
4137
4138 /* Likewise if we're deleting a dispatch table. */
4139
4140 if (GET_CODE (insn) == JUMP_INSN
4141 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4142 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4143 {
4144 rtx pat = PATTERN (insn);
4145 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4146 int len = XVECLEN (pat, diff_vec_p);
4147
4148 for (i = 0; i < len; i++)
4149 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4150 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4151 while (next && INSN_DELETED_P (next))
4152 next = NEXT_INSN (next);
4153 return next;
4154 }
4155
4156 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4157 prev = PREV_INSN (prev);
4158
4159 /* If INSN was a label and a dispatch table follows it,
4160 delete the dispatch table. The tablejump must have gone already.
4161 It isn't useful to fall through into a table. */
4162
4163 if (was_code_label
4164 && NEXT_INSN (insn) != 0
4165 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4166 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4167 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4168 next = delete_insn (NEXT_INSN (insn));
4169
4170 /* If INSN was a label, delete insns following it if now unreachable. */
4171
4172 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4173 {
4174 register RTX_CODE code;
4175 while (next != 0
4176 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4177 || code == NOTE || code == BARRIER
4178 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4179 {
4180 if (code == NOTE
4181 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4182 next = NEXT_INSN (next);
4183 /* Keep going past other deleted labels to delete what follows. */
4184 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4185 next = NEXT_INSN (next);
4186 else
4187 /* Note: if this deletes a jump, it can cause more
4188 deletion of unreachable code, after a different label.
4189 As long as the value from this recursive call is correct,
4190 this invocation functions correctly. */
4191 next = delete_insn (next);
4192 }
4193 }
4194
4195 return next;
4196 }
4197
4198 /* Advance from INSN till reaching something not deleted
4199 then return that. May return INSN itself. */
4200
4201 rtx
4202 next_nondeleted_insn (insn)
4203 rtx insn;
4204 {
4205 while (INSN_DELETED_P (insn))
4206 insn = NEXT_INSN (insn);
4207 return insn;
4208 }
4209 \f
4210 /* Delete a range of insns from FROM to TO, inclusive.
4211 This is for the sake of peephole optimization, so assume
4212 that whatever these insns do will still be done by a new
4213 peephole insn that will replace them. */
4214
4215 void
4216 delete_for_peephole (from, to)
4217 register rtx from, to;
4218 {
4219 register rtx insn = from;
4220
4221 while (1)
4222 {
4223 register rtx next = NEXT_INSN (insn);
4224 register rtx prev = PREV_INSN (insn);
4225
4226 if (GET_CODE (insn) != NOTE)
4227 {
4228 INSN_DELETED_P (insn) = 1;
4229
4230 /* Patch this insn out of the chain. */
4231 /* We don't do this all at once, because we
4232 must preserve all NOTEs. */
4233 if (prev)
4234 NEXT_INSN (prev) = next;
4235
4236 if (next)
4237 PREV_INSN (next) = prev;
4238 }
4239
4240 if (insn == to)
4241 break;
4242 insn = next;
4243 }
4244
4245 /* Note that if TO is an unconditional jump
4246 we *do not* delete the BARRIER that follows,
4247 since the peephole that replaces this sequence
4248 is also an unconditional jump in that case. */
4249 }
4250 \f
4251 /* Invert the condition of the jump JUMP, and make it jump
4252 to label NLABEL instead of where it jumps now. */
4253
4254 int
4255 invert_jump (jump, nlabel)
4256 rtx jump, nlabel;
4257 {
4258 /* We have to either invert the condition and change the label or
4259 do neither. Either operation could fail. We first try to invert
4260 the jump. If that succeeds, we try changing the label. If that fails,
4261 we invert the jump back to what it was. */
4262
4263 if (! invert_exp (PATTERN (jump), jump))
4264 return 0;
4265
4266 if (redirect_jump (jump, nlabel))
4267 {
4268 if (flag_branch_probabilities)
4269 {
4270 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4271
4272 /* An inverted jump means that a probability taken becomes a
4273 probability not taken. Subtract the branch probability from the
4274 probability base to convert it back to a taken probability.
4275 (We don't flip the probability on a branch that's never taken. */
4276 if (note && XINT (XEXP (note, 0), 0) >= 0)
4277 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4278 }
4279
4280 return 1;
4281 }
4282
4283 if (! invert_exp (PATTERN (jump), jump))
4284 /* This should just be putting it back the way it was. */
4285 abort ();
4286
4287 return 0;
4288 }
4289
4290 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4291
4292 Return 1 if we can do so, 0 if we cannot find a way to do so that
4293 matches a pattern. */
4294
4295 int
4296 invert_exp (x, insn)
4297 rtx x;
4298 rtx insn;
4299 {
4300 register RTX_CODE code;
4301 register int i;
4302 register char *fmt;
4303
4304 code = GET_CODE (x);
4305
4306 if (code == IF_THEN_ELSE)
4307 {
4308 register rtx comp = XEXP (x, 0);
4309 register rtx tem;
4310
4311 /* We can do this in two ways: The preferable way, which can only
4312 be done if this is not an integer comparison, is to reverse
4313 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4314 of the IF_THEN_ELSE. If we can't do either, fail. */
4315
4316 if (can_reverse_comparison_p (comp, insn)
4317 && validate_change (insn, &XEXP (x, 0),
4318 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4319 GET_MODE (comp), XEXP (comp, 0),
4320 XEXP (comp, 1)), 0))
4321 return 1;
4322
4323 tem = XEXP (x, 1);
4324 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4325 validate_change (insn, &XEXP (x, 2), tem, 1);
4326 return apply_change_group ();
4327 }
4328
4329 fmt = GET_RTX_FORMAT (code);
4330 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4331 {
4332 if (fmt[i] == 'e')
4333 if (! invert_exp (XEXP (x, i), insn))
4334 return 0;
4335 if (fmt[i] == 'E')
4336 {
4337 register int j;
4338 for (j = 0; j < XVECLEN (x, i); j++)
4339 if (!invert_exp (XVECEXP (x, i, j), insn))
4340 return 0;
4341 }
4342 }
4343
4344 return 1;
4345 }
4346 \f
4347 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4348 If the old jump target label is unused as a result,
4349 it and the code following it may be deleted.
4350
4351 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4352 RETURN insn.
4353
4354 The return value will be 1 if the change was made, 0 if it wasn't (this
4355 can only occur for NLABEL == 0). */
4356
4357 int
4358 redirect_jump (jump, nlabel)
4359 rtx jump, nlabel;
4360 {
4361 register rtx olabel = JUMP_LABEL (jump);
4362
4363 if (nlabel == olabel)
4364 return 1;
4365
4366 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4367 return 0;
4368
4369 /* If this is an unconditional branch, delete it from the jump_chain of
4370 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4371 have UID's in range and JUMP_CHAIN is valid). */
4372 if (jump_chain && (simplejump_p (jump)
4373 || GET_CODE (PATTERN (jump)) == RETURN))
4374 {
4375 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4376
4377 delete_from_jump_chain (jump);
4378 if (label_index < max_jump_chain
4379 && INSN_UID (jump) < max_jump_chain)
4380 {
4381 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4382 jump_chain[label_index] = jump;
4383 }
4384 }
4385
4386 JUMP_LABEL (jump) = nlabel;
4387 if (nlabel)
4388 ++LABEL_NUSES (nlabel);
4389
4390 if (olabel && --LABEL_NUSES (olabel) == 0)
4391 delete_insn (olabel);
4392
4393 return 1;
4394 }
4395
4396 /* Delete the instruction JUMP from any jump chain it might be on. */
4397
4398 static void
4399 delete_from_jump_chain (jump)
4400 rtx jump;
4401 {
4402 int index;
4403 rtx olabel = JUMP_LABEL (jump);
4404
4405 /* Handle unconditional jumps. */
4406 if (jump_chain && olabel != 0
4407 && INSN_UID (olabel) < max_jump_chain
4408 && simplejump_p (jump))
4409 index = INSN_UID (olabel);
4410 /* Handle return insns. */
4411 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4412 index = 0;
4413 else return;
4414
4415 if (jump_chain[index] == jump)
4416 jump_chain[index] = jump_chain[INSN_UID (jump)];
4417 else
4418 {
4419 rtx insn;
4420
4421 for (insn = jump_chain[index];
4422 insn != 0;
4423 insn = jump_chain[INSN_UID (insn)])
4424 if (jump_chain[INSN_UID (insn)] == jump)
4425 {
4426 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4427 break;
4428 }
4429 }
4430 }
4431
4432 /* If NLABEL is nonzero, throughout the rtx at LOC,
4433 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4434 zero, alter (RETURN) to (LABEL_REF NLABEL).
4435
4436 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4437 validity with validate_change. Convert (set (pc) (label_ref olabel))
4438 to (return).
4439
4440 Return 0 if we found a change we would like to make but it is invalid.
4441 Otherwise, return 1. */
4442
4443 int
4444 redirect_exp (loc, olabel, nlabel, insn)
4445 rtx *loc;
4446 rtx olabel, nlabel;
4447 rtx insn;
4448 {
4449 register rtx x = *loc;
4450 register RTX_CODE code = GET_CODE (x);
4451 register int i;
4452 register char *fmt;
4453
4454 if (code == LABEL_REF)
4455 {
4456 if (XEXP (x, 0) == olabel)
4457 {
4458 if (nlabel)
4459 XEXP (x, 0) = nlabel;
4460 else
4461 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4462 return 1;
4463 }
4464 }
4465 else if (code == RETURN && olabel == 0)
4466 {
4467 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4468 if (loc == &PATTERN (insn))
4469 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4470 return validate_change (insn, loc, x, 0);
4471 }
4472
4473 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4474 && GET_CODE (SET_SRC (x)) == LABEL_REF
4475 && XEXP (SET_SRC (x), 0) == olabel)
4476 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4477
4478 fmt = GET_RTX_FORMAT (code);
4479 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4480 {
4481 if (fmt[i] == 'e')
4482 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4483 return 0;
4484 if (fmt[i] == 'E')
4485 {
4486 register int j;
4487 for (j = 0; j < XVECLEN (x, i); j++)
4488 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4489 return 0;
4490 }
4491 }
4492
4493 return 1;
4494 }
4495 \f
4496 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4497
4498 If the old jump target label (before the dispatch table) becomes unused,
4499 it and the dispatch table may be deleted. In that case, find the insn
4500 before the jump references that label and delete it and logical successors
4501 too. */
4502
4503 static void
4504 redirect_tablejump (jump, nlabel)
4505 rtx jump, nlabel;
4506 {
4507 register rtx olabel = JUMP_LABEL (jump);
4508
4509 /* Add this jump to the jump_chain of NLABEL. */
4510 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4511 && INSN_UID (jump) < max_jump_chain)
4512 {
4513 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4514 jump_chain[INSN_UID (nlabel)] = jump;
4515 }
4516
4517 PATTERN (jump) = gen_jump (nlabel);
4518 JUMP_LABEL (jump) = nlabel;
4519 ++LABEL_NUSES (nlabel);
4520 INSN_CODE (jump) = -1;
4521
4522 if (--LABEL_NUSES (olabel) == 0)
4523 {
4524 delete_labelref_insn (jump, olabel, 0);
4525 delete_insn (olabel);
4526 }
4527 }
4528
4529 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4530 If we found one, delete it and then delete this insn if DELETE_THIS is
4531 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4532
4533 static int
4534 delete_labelref_insn (insn, label, delete_this)
4535 rtx insn, label;
4536 int delete_this;
4537 {
4538 int deleted = 0;
4539 rtx link;
4540
4541 if (GET_CODE (insn) != NOTE
4542 && reg_mentioned_p (label, PATTERN (insn)))
4543 {
4544 if (delete_this)
4545 {
4546 delete_insn (insn);
4547 deleted = 1;
4548 }
4549 else
4550 return 1;
4551 }
4552
4553 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4554 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4555 {
4556 if (delete_this)
4557 {
4558 delete_insn (insn);
4559 deleted = 1;
4560 }
4561 else
4562 return 1;
4563 }
4564
4565 return deleted;
4566 }
4567 \f
4568 /* Like rtx_equal_p except that it considers two REGs as equal
4569 if they renumber to the same value and considers two commutative
4570 operations to be the same if the order of the operands has been
4571 reversed.
4572
4573 ??? Addition is not commutative on the PA due to the weird implicit
4574 space register selection rules for memory addresses. Therefore, we
4575 don't consider a + b == b + a.
4576
4577 We could/should make this test a little tighter. Possibly only
4578 disabling it on the PA via some backend macro or only disabling this
4579 case when the PLUS is inside a MEM. */
4580
4581 int
4582 rtx_renumbered_equal_p (x, y)
4583 rtx x, y;
4584 {
4585 register int i;
4586 register RTX_CODE code = GET_CODE (x);
4587 register char *fmt;
4588
4589 if (x == y)
4590 return 1;
4591
4592 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4593 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4594 && GET_CODE (SUBREG_REG (y)) == REG)))
4595 {
4596 int reg_x = -1, reg_y = -1;
4597 int word_x = 0, word_y = 0;
4598
4599 if (GET_MODE (x) != GET_MODE (y))
4600 return 0;
4601
4602 /* If we haven't done any renumbering, don't
4603 make any assumptions. */
4604 if (reg_renumber == 0)
4605 return rtx_equal_p (x, y);
4606
4607 if (code == SUBREG)
4608 {
4609 reg_x = REGNO (SUBREG_REG (x));
4610 word_x = SUBREG_WORD (x);
4611
4612 if (reg_renumber[reg_x] >= 0)
4613 {
4614 reg_x = reg_renumber[reg_x] + word_x;
4615 word_x = 0;
4616 }
4617 }
4618
4619 else
4620 {
4621 reg_x = REGNO (x);
4622 if (reg_renumber[reg_x] >= 0)
4623 reg_x = reg_renumber[reg_x];
4624 }
4625
4626 if (GET_CODE (y) == SUBREG)
4627 {
4628 reg_y = REGNO (SUBREG_REG (y));
4629 word_y = SUBREG_WORD (y);
4630
4631 if (reg_renumber[reg_y] >= 0)
4632 {
4633 reg_y = reg_renumber[reg_y];
4634 word_y = 0;
4635 }
4636 }
4637
4638 else
4639 {
4640 reg_y = REGNO (y);
4641 if (reg_renumber[reg_y] >= 0)
4642 reg_y = reg_renumber[reg_y];
4643 }
4644
4645 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4646 }
4647
4648 /* Now we have disposed of all the cases
4649 in which different rtx codes can match. */
4650 if (code != GET_CODE (y))
4651 return 0;
4652
4653 switch (code)
4654 {
4655 case PC:
4656 case CC0:
4657 case ADDR_VEC:
4658 case ADDR_DIFF_VEC:
4659 return 0;
4660
4661 case CONST_INT:
4662 return INTVAL (x) == INTVAL (y);
4663
4664 case LABEL_REF:
4665 /* We can't assume nonlocal labels have their following insns yet. */
4666 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4667 return XEXP (x, 0) == XEXP (y, 0);
4668
4669 /* Two label-refs are equivalent if they point at labels
4670 in the same position in the instruction stream. */
4671 return (next_real_insn (XEXP (x, 0))
4672 == next_real_insn (XEXP (y, 0)));
4673
4674 case SYMBOL_REF:
4675 return XSTR (x, 0) == XSTR (y, 0);
4676
4677 case CODE_LABEL:
4678 /* If we didn't match EQ equality above, they aren't the same. */
4679 return 0;
4680
4681 default:
4682 break;
4683 }
4684
4685 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4686
4687 if (GET_MODE (x) != GET_MODE (y))
4688 return 0;
4689
4690 /* For commutative operations, the RTX match if the operand match in any
4691 order. Also handle the simple binary and unary cases without a loop.
4692
4693 ??? Don't consider PLUS a commutative operator; see comments above. */
4694 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4695 && code != PLUS)
4696 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4697 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4698 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4699 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4700 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4701 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4702 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4703 else if (GET_RTX_CLASS (code) == '1')
4704 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4705
4706 /* Compare the elements. If any pair of corresponding elements
4707 fail to match, return 0 for the whole things. */
4708
4709 fmt = GET_RTX_FORMAT (code);
4710 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4711 {
4712 register int j;
4713 switch (fmt[i])
4714 {
4715 case 'w':
4716 if (XWINT (x, i) != XWINT (y, i))
4717 return 0;
4718 break;
4719
4720 case 'i':
4721 if (XINT (x, i) != XINT (y, i))
4722 return 0;
4723 break;
4724
4725 case 's':
4726 if (strcmp (XSTR (x, i), XSTR (y, i)))
4727 return 0;
4728 break;
4729
4730 case 'e':
4731 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4732 return 0;
4733 break;
4734
4735 case 'u':
4736 if (XEXP (x, i) != XEXP (y, i))
4737 return 0;
4738 /* fall through. */
4739 case '0':
4740 break;
4741
4742 case 'E':
4743 if (XVECLEN (x, i) != XVECLEN (y, i))
4744 return 0;
4745 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4746 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4747 return 0;
4748 break;
4749
4750 default:
4751 abort ();
4752 }
4753 }
4754 return 1;
4755 }
4756 \f
4757 /* If X is a hard register or equivalent to one or a subregister of one,
4758 return the hard register number. If X is a pseudo register that was not
4759 assigned a hard register, return the pseudo register number. Otherwise,
4760 return -1. Any rtx is valid for X. */
4761
4762 int
4763 true_regnum (x)
4764 rtx x;
4765 {
4766 if (GET_CODE (x) == REG)
4767 {
4768 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4769 return reg_renumber[REGNO (x)];
4770 return REGNO (x);
4771 }
4772 if (GET_CODE (x) == SUBREG)
4773 {
4774 int base = true_regnum (SUBREG_REG (x));
4775 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4776 return SUBREG_WORD (x) + base;
4777 }
4778 return -1;
4779 }
4780 \f
4781 /* Optimize code of the form:
4782
4783 for (x = a[i]; x; ...)
4784 ...
4785 for (x = a[i]; x; ...)
4786 ...
4787 foo:
4788
4789 Loop optimize will change the above code into
4790
4791 if (x = a[i])
4792 for (;;)
4793 { ...; if (! (x = ...)) break; }
4794 if (x = a[i])
4795 for (;;)
4796 { ...; if (! (x = ...)) break; }
4797 foo:
4798
4799 In general, if the first test fails, the program can branch
4800 directly to `foo' and skip the second try which is doomed to fail.
4801 We run this after loop optimization and before flow analysis. */
4802
4803 /* When comparing the insn patterns, we track the fact that different
4804 pseudo-register numbers may have been used in each computation.
4805 The following array stores an equivalence -- same_regs[I] == J means
4806 that pseudo register I was used in the first set of tests in a context
4807 where J was used in the second set. We also count the number of such
4808 pending equivalences. If nonzero, the expressions really aren't the
4809 same. */
4810
4811 static int *same_regs;
4812
4813 static int num_same_regs;
4814
4815 /* Track any registers modified between the target of the first jump and
4816 the second jump. They never compare equal. */
4817
4818 static char *modified_regs;
4819
4820 /* Record if memory was modified. */
4821
4822 static int modified_mem;
4823
4824 /* Called via note_stores on each insn between the target of the first
4825 branch and the second branch. It marks any changed registers. */
4826
4827 static void
4828 mark_modified_reg (dest, x)
4829 rtx dest;
4830 rtx x ATTRIBUTE_UNUSED;
4831 {
4832 int regno, i;
4833
4834 if (GET_CODE (dest) == SUBREG)
4835 dest = SUBREG_REG (dest);
4836
4837 if (GET_CODE (dest) == MEM)
4838 modified_mem = 1;
4839
4840 if (GET_CODE (dest) != REG)
4841 return;
4842
4843 regno = REGNO (dest);
4844 if (regno >= FIRST_PSEUDO_REGISTER)
4845 modified_regs[regno] = 1;
4846 else
4847 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4848 modified_regs[regno + i] = 1;
4849 }
4850
4851 /* F is the first insn in the chain of insns. */
4852
4853 void
4854 thread_jumps (f, max_reg, flag_before_loop)
4855 rtx f;
4856 int max_reg;
4857 int flag_before_loop;
4858 {
4859 /* Basic algorithm is to find a conditional branch,
4860 the label it may branch to, and the branch after
4861 that label. If the two branches test the same condition,
4862 walk back from both branch paths until the insn patterns
4863 differ, or code labels are hit. If we make it back to
4864 the target of the first branch, then we know that the first branch
4865 will either always succeed or always fail depending on the relative
4866 senses of the two branches. So adjust the first branch accordingly
4867 in this case. */
4868
4869 rtx label, b1, b2, t1, t2;
4870 enum rtx_code code1, code2;
4871 rtx b1op0, b1op1, b2op0, b2op1;
4872 int changed = 1;
4873 int i;
4874 int *all_reset;
4875
4876 /* Allocate register tables and quick-reset table. */
4877 modified_regs = (char *) alloca (max_reg * sizeof (char));
4878 same_regs = (int *) alloca (max_reg * sizeof (int));
4879 all_reset = (int *) alloca (max_reg * sizeof (int));
4880 for (i = 0; i < max_reg; i++)
4881 all_reset[i] = -1;
4882
4883 while (changed)
4884 {
4885 changed = 0;
4886
4887 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4888 {
4889 /* Get to a candidate branch insn. */
4890 if (GET_CODE (b1) != JUMP_INSN
4891 || ! condjump_p (b1) || simplejump_p (b1)
4892 || JUMP_LABEL (b1) == 0)
4893 continue;
4894
4895 bzero (modified_regs, max_reg * sizeof (char));
4896 modified_mem = 0;
4897
4898 bcopy ((char *) all_reset, (char *) same_regs,
4899 max_reg * sizeof (int));
4900 num_same_regs = 0;
4901
4902 label = JUMP_LABEL (b1);
4903
4904 /* Look for a branch after the target. Record any registers and
4905 memory modified between the target and the branch. Stop when we
4906 get to a label since we can't know what was changed there. */
4907 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4908 {
4909 if (GET_CODE (b2) == CODE_LABEL)
4910 break;
4911
4912 else if (GET_CODE (b2) == JUMP_INSN)
4913 {
4914 /* If this is an unconditional jump and is the only use of
4915 its target label, we can follow it. */
4916 if (simplejump_p (b2)
4917 && JUMP_LABEL (b2) != 0
4918 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4919 {
4920 b2 = JUMP_LABEL (b2);
4921 continue;
4922 }
4923 else
4924 break;
4925 }
4926
4927 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4928 continue;
4929
4930 if (GET_CODE (b2) == CALL_INSN)
4931 {
4932 modified_mem = 1;
4933 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4934 if (call_used_regs[i] && ! fixed_regs[i]
4935 && i != STACK_POINTER_REGNUM
4936 && i != FRAME_POINTER_REGNUM
4937 && i != HARD_FRAME_POINTER_REGNUM
4938 && i != ARG_POINTER_REGNUM)
4939 modified_regs[i] = 1;
4940 }
4941
4942 note_stores (PATTERN (b2), mark_modified_reg);
4943 }
4944
4945 /* Check the next candidate branch insn from the label
4946 of the first. */
4947 if (b2 == 0
4948 || GET_CODE (b2) != JUMP_INSN
4949 || b2 == b1
4950 || ! condjump_p (b2)
4951 || simplejump_p (b2))
4952 continue;
4953
4954 /* Get the comparison codes and operands, reversing the
4955 codes if appropriate. If we don't have comparison codes,
4956 we can't do anything. */
4957 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4958 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4959 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4960 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4961 code1 = reverse_condition (code1);
4962
4963 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4964 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4965 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4966 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4967 code2 = reverse_condition (code2);
4968
4969 /* If they test the same things and knowing that B1 branches
4970 tells us whether or not B2 branches, check if we
4971 can thread the branch. */
4972 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4973 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4974 && (comparison_dominates_p (code1, code2)
4975 || (comparison_dominates_p (code1, reverse_condition (code2))
4976 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
4977 0),
4978 b1))))
4979 {
4980 t1 = prev_nonnote_insn (b1);
4981 t2 = prev_nonnote_insn (b2);
4982
4983 while (t1 != 0 && t2 != 0)
4984 {
4985 if (t2 == label)
4986 {
4987 /* We have reached the target of the first branch.
4988 If there are no pending register equivalents,
4989 we know that this branch will either always
4990 succeed (if the senses of the two branches are
4991 the same) or always fail (if not). */
4992 rtx new_label;
4993
4994 if (num_same_regs != 0)
4995 break;
4996
4997 if (comparison_dominates_p (code1, code2))
4998 new_label = JUMP_LABEL (b2);
4999 else
5000 new_label = get_label_after (b2);
5001
5002 if (JUMP_LABEL (b1) != new_label)
5003 {
5004 rtx prev = PREV_INSN (new_label);
5005
5006 if (flag_before_loop
5007 && GET_CODE (prev) == NOTE
5008 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5009 {
5010 /* Don't thread to the loop label. If a loop
5011 label is reused, loop optimization will
5012 be disabled for that loop. */
5013 new_label = gen_label_rtx ();
5014 emit_label_after (new_label, PREV_INSN (prev));
5015 }
5016 changed |= redirect_jump (b1, new_label);
5017 }
5018 break;
5019 }
5020
5021 /* If either of these is not a normal insn (it might be
5022 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5023 have already been skipped above.) Similarly, fail
5024 if the insns are different. */
5025 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5026 || recog_memoized (t1) != recog_memoized (t2)
5027 || ! rtx_equal_for_thread_p (PATTERN (t1),
5028 PATTERN (t2), t2))
5029 break;
5030
5031 t1 = prev_nonnote_insn (t1);
5032 t2 = prev_nonnote_insn (t2);
5033 }
5034 }
5035 }
5036 }
5037 }
5038 \f
5039 /* This is like RTX_EQUAL_P except that it knows about our handling of
5040 possibly equivalent registers and knows to consider volatile and
5041 modified objects as not equal.
5042
5043 YINSN is the insn containing Y. */
5044
5045 int
5046 rtx_equal_for_thread_p (x, y, yinsn)
5047 rtx x, y;
5048 rtx yinsn;
5049 {
5050 register int i;
5051 register int j;
5052 register enum rtx_code code;
5053 register char *fmt;
5054
5055 code = GET_CODE (x);
5056 /* Rtx's of different codes cannot be equal. */
5057 if (code != GET_CODE (y))
5058 return 0;
5059
5060 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5061 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5062
5063 if (GET_MODE (x) != GET_MODE (y))
5064 return 0;
5065
5066 /* For floating-point, consider everything unequal. This is a bit
5067 pessimistic, but this pass would only rarely do anything for FP
5068 anyway. */
5069 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5070 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5071 return 0;
5072
5073 /* For commutative operations, the RTX match if the operand match in any
5074 order. Also handle the simple binary and unary cases without a loop. */
5075 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5076 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5077 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5078 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5079 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5080 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5081 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5082 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5083 else if (GET_RTX_CLASS (code) == '1')
5084 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5085
5086 /* Handle special-cases first. */
5087 switch (code)
5088 {
5089 case REG:
5090 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5091 return 1;
5092
5093 /* If neither is user variable or hard register, check for possible
5094 equivalence. */
5095 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5096 || REGNO (x) < FIRST_PSEUDO_REGISTER
5097 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5098 return 0;
5099
5100 if (same_regs[REGNO (x)] == -1)
5101 {
5102 same_regs[REGNO (x)] = REGNO (y);
5103 num_same_regs++;
5104
5105 /* If this is the first time we are seeing a register on the `Y'
5106 side, see if it is the last use. If not, we can't thread the
5107 jump, so mark it as not equivalent. */
5108 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5109 return 0;
5110
5111 return 1;
5112 }
5113 else
5114 return (same_regs[REGNO (x)] == REGNO (y));
5115
5116 break;
5117
5118 case MEM:
5119 /* If memory modified or either volatile, not equivalent.
5120 Else, check address. */
5121 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5122 return 0;
5123
5124 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5125
5126 case ASM_INPUT:
5127 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5128 return 0;
5129
5130 break;
5131
5132 case SET:
5133 /* Cancel a pending `same_regs' if setting equivalenced registers.
5134 Then process source. */
5135 if (GET_CODE (SET_DEST (x)) == REG
5136 && GET_CODE (SET_DEST (y)) == REG)
5137 {
5138 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
5139 {
5140 same_regs[REGNO (SET_DEST (x))] = -1;
5141 num_same_regs--;
5142 }
5143 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5144 return 0;
5145 }
5146 else
5147 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5148 return 0;
5149
5150 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5151
5152 case LABEL_REF:
5153 return XEXP (x, 0) == XEXP (y, 0);
5154
5155 case SYMBOL_REF:
5156 return XSTR (x, 0) == XSTR (y, 0);
5157
5158 default:
5159 break;
5160 }
5161
5162 if (x == y)
5163 return 1;
5164
5165 fmt = GET_RTX_FORMAT (code);
5166 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5167 {
5168 switch (fmt[i])
5169 {
5170 case 'w':
5171 if (XWINT (x, i) != XWINT (y, i))
5172 return 0;
5173 break;
5174
5175 case 'n':
5176 case 'i':
5177 if (XINT (x, i) != XINT (y, i))
5178 return 0;
5179 break;
5180
5181 case 'V':
5182 case 'E':
5183 /* Two vectors must have the same length. */
5184 if (XVECLEN (x, i) != XVECLEN (y, i))
5185 return 0;
5186
5187 /* And the corresponding elements must match. */
5188 for (j = 0; j < XVECLEN (x, i); j++)
5189 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5190 XVECEXP (y, i, j), yinsn) == 0)
5191 return 0;
5192 break;
5193
5194 case 'e':
5195 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5196 return 0;
5197 break;
5198
5199 case 'S':
5200 case 's':
5201 if (strcmp (XSTR (x, i), XSTR (y, i)))
5202 return 0;
5203 break;
5204
5205 case 'u':
5206 /* These are just backpointers, so they don't matter. */
5207 break;
5208
5209 case '0':
5210 break;
5211
5212 /* It is believed that rtx's at this level will never
5213 contain anything but integers and other rtx's,
5214 except for within LABEL_REFs and SYMBOL_REFs. */
5215 default:
5216 abort ();
5217 }
5218 }
5219 return 1;
5220 }
5221 \f
5222
5223 #ifndef HAVE_cc0
5224 /* Return the insn that NEW can be safely inserted in front of starting at
5225 the jump insn INSN. Return 0 if it is not safe to do this jump
5226 optimization. Note that NEW must contain a single set. */
5227
5228 static rtx
5229 find_insert_position (insn, new)
5230 rtx insn;
5231 rtx new;
5232 {
5233 int i;
5234 rtx prev;
5235
5236 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5237 if (GET_CODE (PATTERN (new)) != PARALLEL)
5238 return insn;
5239
5240 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5241 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5242 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5243 insn))
5244 break;
5245
5246 if (i < 0)
5247 return insn;
5248
5249 /* There is a good chance that the previous insn PREV sets the thing
5250 being clobbered (often the CC in a hard reg). If PREV does not
5251 use what NEW sets, we can insert NEW before PREV. */
5252
5253 prev = prev_active_insn (insn);
5254 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5255 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5256 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5257 insn)
5258 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5259 prev))
5260 return 0;
5261
5262 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5263 }
5264 #endif /* !HAVE_cc0 */