jump.c (duplicate_loop_exit_test): Call reg_scan_update after creating new registers.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "hard-reg-set.h"
59 #include "regs.h"
60 #include "insn-config.h"
61 #include "insn-flags.h"
62 #include "insn-attr.h"
63 #include "recog.h"
64 #include "function.h"
65 #include "expr.h"
66 #include "real.h"
67 #include "except.h"
68 #include "toplev.h"
69
70 /* ??? Eventually must record somehow the labels used by jumps
71 from nested functions. */
72 /* Pre-record the next or previous real insn for each label?
73 No, this pass is very fast anyway. */
74 /* Condense consecutive labels?
75 This would make life analysis faster, maybe. */
76 /* Optimize jump y; x: ... y: jumpif... x?
77 Don't know if it is worth bothering with. */
78 /* Optimize two cases of conditional jump to conditional jump?
79 This can never delete any instruction or make anything dead,
80 or even change what is live at any point.
81 So perhaps let combiner do it. */
82
83 /* Vector indexed by uid.
84 For each CODE_LABEL, index by its uid to get first unconditional jump
85 that jumps to the label.
86 For each JUMP_INSN, index by its uid to get the next unconditional jump
87 that jumps to the same label.
88 Element 0 is the start of a chain of all return insns.
89 (It is safe to use element 0 because insn uid 0 is not used. */
90
91 static rtx *jump_chain;
92
93 /* Maximum index in jump_chain. */
94
95 static int max_jump_chain;
96
97 /* Set nonzero by jump_optimize if control can fall through
98 to the end of the function. */
99 int can_reach_end;
100
101 /* Indicates whether death notes are significant in cross jump analysis.
102 Normally they are not significant, because of A and B jump to C,
103 and R dies in A, it must die in B. But this might not be true after
104 stack register conversion, and we must compare death notes in that
105 case. */
106
107 static int cross_jump_death_matters = 0;
108
109 static int init_label_info PROTO((rtx));
110 static void delete_barrier_successors PROTO((rtx));
111 static void mark_all_labels PROTO((rtx, int));
112 static rtx delete_unreferenced_labels PROTO((rtx));
113 static void delete_noop_moves PROTO((rtx));
114 static int calculate_can_reach_end PROTO((rtx, int, int));
115 static int duplicate_loop_exit_test PROTO((rtx));
116 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
117 static void do_cross_jump PROTO((rtx, rtx, rtx));
118 static int jump_back_p PROTO((rtx, rtx));
119 static int tension_vector_labels PROTO((rtx, int));
120 static void mark_jump_label PROTO((rtx, rtx, int));
121 static void delete_computation PROTO((rtx));
122 static void delete_from_jump_chain PROTO((rtx));
123 static int delete_labelref_insn PROTO((rtx, rtx, int));
124 static void mark_modified_reg PROTO((rtx, rtx));
125 static void redirect_tablejump PROTO((rtx, rtx));
126 static void jump_optimize_1 PROTO ((rtx, int, int, int, int));
127 #ifndef HAVE_cc0
128 static rtx find_insert_position PROTO((rtx, rtx));
129 #endif
130
131 /* Main external entry point into the jump optimizer. See comments before
132 jump_optimize_1 for descriptions of the arguments. */
133 void
134 jump_optimize (f, cross_jump, noop_moves, after_regscan)
135 rtx f;
136 int cross_jump;
137 int noop_moves;
138 int after_regscan;
139 {
140 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
141 }
142
143 /* Alternate entry into the jump optimizer. This entry point only rebuilds
144 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
145 instructions. */
146 void
147 rebuild_jump_labels (f)
148 rtx f;
149 {
150 jump_optimize_1 (f, 0, 0, 0, 1);
151 }
152
153 \f
154 /* Delete no-op jumps and optimize jumps to jumps
155 and jumps around jumps.
156 Delete unused labels and unreachable code.
157
158 If CROSS_JUMP is 1, detect matching code
159 before a jump and its destination and unify them.
160 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
161
162 If NOOP_MOVES is nonzero, delete no-op move insns.
163
164 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
165 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
166
167 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
168 and JUMP_LABEL field for jumping insns.
169
170 If `optimize' is zero, don't change any code,
171 just determine whether control drops off the end of the function.
172 This case occurs when we have -W and not -O.
173 It works because `delete_insn' checks the value of `optimize'
174 and refrains from actually deleting when that is 0. */
175
176 static void
177 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
178 rtx f;
179 int cross_jump;
180 int noop_moves;
181 int after_regscan;
182 int mark_labels_only;
183 {
184 register rtx insn, next;
185 int changed;
186 int old_max_reg;
187 int first = 1;
188 int max_uid = 0;
189 rtx last_insn;
190
191 cross_jump_death_matters = (cross_jump == 2);
192 max_uid = init_label_info (f) + 1;
193
194 /* If we are performing cross jump optimizations, then initialize
195 tables mapping UIDs to EH regions to avoid incorrect movement
196 of insns from one EH region to another. */
197 if (flag_exceptions && cross_jump)
198 init_insn_eh_region (f, max_uid);
199
200 delete_barrier_successors (f);
201
202 /* Leave some extra room for labels and duplicate exit test insns
203 we make. */
204 max_jump_chain = max_uid * 14 / 10;
205 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
206 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
207
208 mark_all_labels (f, cross_jump);
209
210 /* Keep track of labels used from static data;
211 they cannot ever be deleted. */
212
213 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
214 LABEL_NUSES (XEXP (insn, 0))++;
215
216 check_exception_handler_labels ();
217
218 /* Keep track of labels used for marking handlers for exception
219 regions; they cannot usually be deleted. */
220
221 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
222 LABEL_NUSES (XEXP (insn, 0))++;
223
224 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
225 notes and recompute LABEL_NUSES. */
226 if (mark_labels_only)
227 return;
228
229 exception_optimize ();
230
231 last_insn = delete_unreferenced_labels (f);
232
233 if (!optimize)
234 {
235 /* CAN_REACH_END is persistent for each function. Once set it should
236 not be cleared. This is especially true for the case where we
237 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
238 the front-end before compiling each function. */
239 if (calculate_can_reach_end (last_insn, 1, 0))
240 can_reach_end = 1;
241
242 /* Zero the "deleted" flag of all the "deleted" insns. */
243 for (insn = f; insn; insn = NEXT_INSN (insn))
244 INSN_DELETED_P (insn) = 0;
245
246 /* Show that the jump chain is not valid. */
247 jump_chain = 0;
248 return;
249 }
250
251 #ifdef HAVE_return
252 if (HAVE_return)
253 {
254 /* If we fall through to the epilogue, see if we can insert a RETURN insn
255 in front of it. If the machine allows it at this point (we might be
256 after reload for a leaf routine), it will improve optimization for it
257 to be there. */
258 insn = get_last_insn ();
259 while (insn && GET_CODE (insn) == NOTE)
260 insn = PREV_INSN (insn);
261
262 if (insn && GET_CODE (insn) != BARRIER)
263 {
264 emit_jump_insn (gen_return ());
265 emit_barrier ();
266 }
267 }
268 #endif
269
270 if (noop_moves)
271 delete_noop_moves (f);
272
273 /* If we haven't yet gotten to reload and we have just run regscan,
274 delete any insn that sets a register that isn't used elsewhere.
275 This helps some of the optimizations below by having less insns
276 being jumped around. */
277
278 if (! reload_completed && after_regscan)
279 for (insn = f; insn; insn = next)
280 {
281 rtx set = single_set (insn);
282
283 next = NEXT_INSN (insn);
284
285 if (set && GET_CODE (SET_DEST (set)) == REG
286 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
287 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
288 /* We use regno_last_note_uid so as not to delete the setting
289 of a reg that's used in notes. A subsequent optimization
290 might arrange to use that reg for real. */
291 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
292 && ! side_effects_p (SET_SRC (set))
293 && ! find_reg_note (insn, REG_RETVAL, 0))
294 delete_insn (insn);
295 }
296
297 /* Now iterate optimizing jumps until nothing changes over one pass. */
298 changed = 1;
299 old_max_reg = max_reg_num ();
300 while (changed)
301 {
302 changed = 0;
303
304 for (insn = f; insn; insn = next)
305 {
306 rtx reallabelprev;
307 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
308 rtx nlabel;
309 int this_is_simplejump, this_is_condjump, reversep = 0;
310 int this_is_condjump_in_parallel;
311
312 #if 0
313 /* If NOT the first iteration, if this is the last jump pass
314 (just before final), do the special peephole optimizations.
315 Avoiding the first iteration gives ordinary jump opts
316 a chance to work before peephole opts. */
317
318 if (reload_completed && !first && !flag_no_peephole)
319 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
320 peephole (insn);
321 #endif
322
323 /* That could have deleted some insns after INSN, so check now
324 what the following insn is. */
325
326 next = NEXT_INSN (insn);
327
328 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
329 jump. Try to optimize by duplicating the loop exit test if so.
330 This is only safe immediately after regscan, because it uses
331 the values of regno_first_uid and regno_last_uid. */
332 if (after_regscan && GET_CODE (insn) == NOTE
333 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
334 && (temp1 = next_nonnote_insn (insn)) != 0
335 && simplejump_p (temp1))
336 {
337 temp = PREV_INSN (insn);
338 if (duplicate_loop_exit_test (insn))
339 {
340 changed = 1;
341 next = NEXT_INSN (temp);
342 continue;
343 }
344 }
345
346 if (GET_CODE (insn) != JUMP_INSN)
347 continue;
348
349 this_is_simplejump = simplejump_p (insn);
350 this_is_condjump = condjump_p (insn);
351 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
352
353 /* Tension the labels in dispatch tables. */
354
355 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
356 changed |= tension_vector_labels (PATTERN (insn), 0);
357 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
358 changed |= tension_vector_labels (PATTERN (insn), 1);
359
360 /* If a dispatch table always goes to the same place,
361 get rid of it and replace the insn that uses it. */
362
363 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
364 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
365 {
366 int i;
367 rtx pat = PATTERN (insn);
368 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
369 int len = XVECLEN (pat, diff_vec_p);
370 rtx dispatch = prev_real_insn (insn);
371
372 for (i = 0; i < len; i++)
373 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
374 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
375 break;
376 if (i == len
377 && dispatch != 0
378 && GET_CODE (dispatch) == JUMP_INSN
379 && JUMP_LABEL (dispatch) != 0
380 /* Don't mess with a casesi insn. */
381 && !(GET_CODE (PATTERN (dispatch)) == SET
382 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
383 == IF_THEN_ELSE))
384 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
385 {
386 redirect_tablejump (dispatch,
387 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
388 changed = 1;
389 }
390 }
391
392 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
393
394 /* If a jump references the end of the function, try to turn
395 it into a RETURN insn, possibly a conditional one. */
396 if (JUMP_LABEL (insn)
397 && (next_active_insn (JUMP_LABEL (insn)) == 0
398 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
399 == RETURN))
400 changed |= redirect_jump (insn, NULL_RTX);
401
402 /* Detect jump to following insn. */
403 if (reallabelprev == insn && condjump_p (insn))
404 {
405 next = next_real_insn (JUMP_LABEL (insn));
406 delete_jump (insn);
407 changed = 1;
408 continue;
409 }
410
411 /* If we have an unconditional jump preceded by a USE, try to put
412 the USE before the target and jump there. This simplifies many
413 of the optimizations below since we don't have to worry about
414 dealing with these USE insns. We only do this if the label
415 being branch to already has the identical USE or if code
416 never falls through to that label. */
417
418 if (this_is_simplejump
419 && (temp = prev_nonnote_insn (insn)) != 0
420 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
421 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
422 && (GET_CODE (temp1) == BARRIER
423 || (GET_CODE (temp1) == INSN
424 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
425 /* Don't do this optimization if we have a loop containing only
426 the USE instruction, and the loop start label has a usage
427 count of 1. This is because we will redo this optimization
428 everytime through the outer loop, and jump opt will never
429 exit. */
430 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
431 && temp2 == JUMP_LABEL (insn)
432 && LABEL_NUSES (temp2) == 1))
433 {
434 if (GET_CODE (temp1) == BARRIER)
435 {
436 emit_insn_after (PATTERN (temp), temp1);
437 temp1 = NEXT_INSN (temp1);
438 }
439
440 delete_insn (temp);
441 redirect_jump (insn, get_label_before (temp1));
442 reallabelprev = prev_real_insn (temp1);
443 changed = 1;
444 }
445
446 /* Simplify if (...) x = a; else x = b; by converting it
447 to x = b; if (...) x = a;
448 if B is sufficiently simple, the test doesn't involve X,
449 and nothing in the test modifies B or X.
450
451 If we have small register classes, we also can't do this if X
452 is a hard register.
453
454 If the "x = b;" insn has any REG_NOTES, we don't do this because
455 of the possibility that we are running after CSE and there is a
456 REG_EQUAL note that is only valid if the branch has already been
457 taken. If we move the insn with the REG_EQUAL note, we may
458 fold the comparison to always be false in a later CSE pass.
459 (We could also delete the REG_NOTES when moving the insn, but it
460 seems simpler to not move it.) An exception is that we can move
461 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
462 value is the same as "b".
463
464 INSN is the branch over the `else' part.
465
466 We set:
467
468 TEMP to the jump insn preceding "x = a;"
469 TEMP1 to X
470 TEMP2 to the insn that sets "x = b;"
471 TEMP3 to the insn that sets "x = a;"
472 TEMP4 to the set of "x = b"; */
473
474 if (this_is_simplejump
475 && (temp3 = prev_active_insn (insn)) != 0
476 && GET_CODE (temp3) == INSN
477 && (temp4 = single_set (temp3)) != 0
478 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
479 && (! SMALL_REGISTER_CLASSES
480 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
481 && (temp2 = next_active_insn (insn)) != 0
482 && GET_CODE (temp2) == INSN
483 && (temp4 = single_set (temp2)) != 0
484 && rtx_equal_p (SET_DEST (temp4), temp1)
485 && ! side_effects_p (SET_SRC (temp4))
486 && ! may_trap_p (SET_SRC (temp4))
487 && (REG_NOTES (temp2) == 0
488 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
489 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
490 && XEXP (REG_NOTES (temp2), 1) == 0
491 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
492 SET_SRC (temp4))))
493 && (temp = prev_active_insn (temp3)) != 0
494 && condjump_p (temp) && ! simplejump_p (temp)
495 /* TEMP must skip over the "x = a;" insn */
496 && prev_real_insn (JUMP_LABEL (temp)) == insn
497 && no_labels_between_p (insn, JUMP_LABEL (temp))
498 /* There must be no other entries to the "x = b;" insn. */
499 && no_labels_between_p (JUMP_LABEL (temp), temp2)
500 /* INSN must either branch to the insn after TEMP2 or the insn
501 after TEMP2 must branch to the same place as INSN. */
502 && (reallabelprev == temp2
503 || ((temp5 = next_active_insn (temp2)) != 0
504 && simplejump_p (temp5)
505 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
506 {
507 /* The test expression, X, may be a complicated test with
508 multiple branches. See if we can find all the uses of
509 the label that TEMP branches to without hitting a CALL_INSN
510 or a jump to somewhere else. */
511 rtx target = JUMP_LABEL (temp);
512 int nuses = LABEL_NUSES (target);
513 rtx p;
514 #ifdef HAVE_cc0
515 rtx q;
516 #endif
517
518 /* Set P to the first jump insn that goes around "x = a;". */
519 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
520 {
521 if (GET_CODE (p) == JUMP_INSN)
522 {
523 if (condjump_p (p) && ! simplejump_p (p)
524 && JUMP_LABEL (p) == target)
525 {
526 nuses--;
527 if (nuses == 0)
528 break;
529 }
530 else
531 break;
532 }
533 else if (GET_CODE (p) == CALL_INSN)
534 break;
535 }
536
537 #ifdef HAVE_cc0
538 /* We cannot insert anything between a set of cc and its use
539 so if P uses cc0, we must back up to the previous insn. */
540 q = prev_nonnote_insn (p);
541 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
542 && sets_cc0_p (PATTERN (q)))
543 p = q;
544 #endif
545
546 if (p)
547 p = PREV_INSN (p);
548
549 /* If we found all the uses and there was no data conflict, we
550 can move the assignment unless we can branch into the middle
551 from somewhere. */
552 if (nuses == 0 && p
553 && no_labels_between_p (p, insn)
554 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
555 && ! reg_set_between_p (temp1, p, temp3)
556 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
557 || ! modified_between_p (SET_SRC (temp4), p, temp2))
558 /* Verify that registers used by the jump are not clobbered
559 by the instruction being moved. */
560 && ! regs_set_between_p (PATTERN (temp),
561 PREV_INSN (temp2),
562 NEXT_INSN (temp2)))
563 {
564 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
565 delete_insn (temp2);
566
567 /* Set NEXT to an insn that we know won't go away. */
568 next = next_active_insn (insn);
569
570 /* Delete the jump around the set. Note that we must do
571 this before we redirect the test jumps so that it won't
572 delete the code immediately following the assignment
573 we moved (which might be a jump). */
574
575 delete_insn (insn);
576
577 /* We either have two consecutive labels or a jump to
578 a jump, so adjust all the JUMP_INSNs to branch to where
579 INSN branches to. */
580 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
581 if (GET_CODE (p) == JUMP_INSN)
582 redirect_jump (p, target);
583
584 changed = 1;
585 continue;
586 }
587 }
588
589 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
590 to x = a; if (...) goto l; x = b;
591 if A is sufficiently simple, the test doesn't involve X,
592 and nothing in the test modifies A or X.
593
594 If we have small register classes, we also can't do this if X
595 is a hard register.
596
597 If the "x = a;" insn has any REG_NOTES, we don't do this because
598 of the possibility that we are running after CSE and there is a
599 REG_EQUAL note that is only valid if the branch has already been
600 taken. If we move the insn with the REG_EQUAL note, we may
601 fold the comparison to always be false in a later CSE pass.
602 (We could also delete the REG_NOTES when moving the insn, but it
603 seems simpler to not move it.) An exception is that we can move
604 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
605 value is the same as "a".
606
607 INSN is the goto.
608
609 We set:
610
611 TEMP to the jump insn preceding "x = a;"
612 TEMP1 to X
613 TEMP2 to the insn that sets "x = b;"
614 TEMP3 to the insn that sets "x = a;"
615 TEMP4 to the set of "x = a"; */
616
617 if (this_is_simplejump
618 && (temp2 = next_active_insn (insn)) != 0
619 && GET_CODE (temp2) == INSN
620 && (temp4 = single_set (temp2)) != 0
621 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
622 && (! SMALL_REGISTER_CLASSES
623 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
624 && (temp3 = prev_active_insn (insn)) != 0
625 && GET_CODE (temp3) == INSN
626 && (temp4 = single_set (temp3)) != 0
627 && rtx_equal_p (SET_DEST (temp4), temp1)
628 && ! side_effects_p (SET_SRC (temp4))
629 && ! may_trap_p (SET_SRC (temp4))
630 && (REG_NOTES (temp3) == 0
631 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
632 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
633 && XEXP (REG_NOTES (temp3), 1) == 0
634 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
635 SET_SRC (temp4))))
636 && (temp = prev_active_insn (temp3)) != 0
637 && condjump_p (temp) && ! simplejump_p (temp)
638 /* TEMP must skip over the "x = a;" insn */
639 && prev_real_insn (JUMP_LABEL (temp)) == insn
640 && no_labels_between_p (temp, insn))
641 {
642 rtx prev_label = JUMP_LABEL (temp);
643 rtx insert_after = prev_nonnote_insn (temp);
644
645 #ifdef HAVE_cc0
646 /* We cannot insert anything between a set of cc and its use. */
647 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
648 && sets_cc0_p (PATTERN (insert_after)))
649 insert_after = prev_nonnote_insn (insert_after);
650 #endif
651 ++LABEL_NUSES (prev_label);
652
653 if (insert_after
654 && no_labels_between_p (insert_after, temp)
655 && ! reg_referenced_between_p (temp1, insert_after, temp3)
656 && ! reg_referenced_between_p (temp1, temp3,
657 NEXT_INSN (temp2))
658 && ! reg_set_between_p (temp1, insert_after, temp)
659 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
660 /* Verify that registers used by the jump are not clobbered
661 by the instruction being moved. */
662 && ! regs_set_between_p (PATTERN (temp),
663 PREV_INSN (temp3),
664 NEXT_INSN (temp3))
665 && invert_jump (temp, JUMP_LABEL (insn)))
666 {
667 emit_insn_after_with_line_notes (PATTERN (temp3),
668 insert_after, temp3);
669 delete_insn (temp3);
670 delete_insn (insn);
671 /* Set NEXT to an insn that we know won't go away. */
672 next = temp2;
673 changed = 1;
674 }
675 if (prev_label && --LABEL_NUSES (prev_label) == 0)
676 delete_insn (prev_label);
677 if (changed)
678 continue;
679 }
680
681 #ifndef HAVE_cc0
682 /* If we have if (...) x = exp; and branches are expensive,
683 EXP is a single insn, does not have any side effects, cannot
684 trap, and is not too costly, convert this to
685 t = exp; if (...) x = t;
686
687 Don't do this when we have CC0 because it is unlikely to help
688 and we'd need to worry about where to place the new insn and
689 the potential for conflicts. We also can't do this when we have
690 notes on the insn for the same reason as above.
691
692 We set:
693
694 TEMP to the "x = exp;" insn.
695 TEMP1 to the single set in the "x = exp;" insn.
696 TEMP2 to "x". */
697
698 if (! reload_completed
699 && this_is_condjump && ! this_is_simplejump
700 && BRANCH_COST >= 3
701 && (temp = next_nonnote_insn (insn)) != 0
702 && GET_CODE (temp) == INSN
703 && REG_NOTES (temp) == 0
704 && (reallabelprev == temp
705 || ((temp2 = next_active_insn (temp)) != 0
706 && simplejump_p (temp2)
707 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
708 && (temp1 = single_set (temp)) != 0
709 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
710 && (! SMALL_REGISTER_CLASSES
711 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
712 && GET_CODE (SET_SRC (temp1)) != REG
713 && GET_CODE (SET_SRC (temp1)) != SUBREG
714 && GET_CODE (SET_SRC (temp1)) != CONST_INT
715 && ! side_effects_p (SET_SRC (temp1))
716 && ! may_trap_p (SET_SRC (temp1))
717 && rtx_cost (SET_SRC (temp1), SET) < 10)
718 {
719 rtx new = gen_reg_rtx (GET_MODE (temp2));
720
721 if ((temp3 = find_insert_position (insn, temp))
722 && validate_change (temp, &SET_DEST (temp1), new, 0))
723 {
724 next = emit_insn_after (gen_move_insn (temp2, new), insn);
725 emit_insn_after_with_line_notes (PATTERN (temp),
726 PREV_INSN (temp3), temp);
727 delete_insn (temp);
728 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
729
730 if (after_regscan)
731 {
732 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
733 old_max_reg = max_reg_num ();
734 }
735 }
736 }
737
738 /* Similarly, if it takes two insns to compute EXP but they
739 have the same destination. Here TEMP3 will be the second
740 insn and TEMP4 the SET from that insn. */
741
742 if (! reload_completed
743 && this_is_condjump && ! this_is_simplejump
744 && BRANCH_COST >= 4
745 && (temp = next_nonnote_insn (insn)) != 0
746 && GET_CODE (temp) == INSN
747 && REG_NOTES (temp) == 0
748 && (temp3 = next_nonnote_insn (temp)) != 0
749 && GET_CODE (temp3) == INSN
750 && REG_NOTES (temp3) == 0
751 && (reallabelprev == temp3
752 || ((temp2 = next_active_insn (temp3)) != 0
753 && simplejump_p (temp2)
754 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
755 && (temp1 = single_set (temp)) != 0
756 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
757 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
758 && (! SMALL_REGISTER_CLASSES
759 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
760 && ! side_effects_p (SET_SRC (temp1))
761 && ! may_trap_p (SET_SRC (temp1))
762 && rtx_cost (SET_SRC (temp1), SET) < 10
763 && (temp4 = single_set (temp3)) != 0
764 && rtx_equal_p (SET_DEST (temp4), temp2)
765 && ! side_effects_p (SET_SRC (temp4))
766 && ! may_trap_p (SET_SRC (temp4))
767 && rtx_cost (SET_SRC (temp4), SET) < 10)
768 {
769 rtx new = gen_reg_rtx (GET_MODE (temp2));
770
771 if ((temp5 = find_insert_position (insn, temp))
772 && (temp6 = find_insert_position (insn, temp3))
773 && validate_change (temp, &SET_DEST (temp1), new, 0))
774 {
775 /* Use the earliest of temp5 and temp6. */
776 if (temp5 != insn)
777 temp6 = temp5;
778 next = emit_insn_after (gen_move_insn (temp2, new), insn);
779 emit_insn_after_with_line_notes (PATTERN (temp),
780 PREV_INSN (temp6), temp);
781 emit_insn_after_with_line_notes
782 (replace_rtx (PATTERN (temp3), temp2, new),
783 PREV_INSN (temp6), temp3);
784 delete_insn (temp);
785 delete_insn (temp3);
786 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
787
788 if (after_regscan)
789 {
790 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
791 old_max_reg = max_reg_num ();
792 }
793 }
794 }
795
796 /* Finally, handle the case where two insns are used to
797 compute EXP but a temporary register is used. Here we must
798 ensure that the temporary register is not used anywhere else. */
799
800 if (! reload_completed
801 && after_regscan
802 && this_is_condjump && ! this_is_simplejump
803 && BRANCH_COST >= 4
804 && (temp = next_nonnote_insn (insn)) != 0
805 && GET_CODE (temp) == INSN
806 && REG_NOTES (temp) == 0
807 && (temp3 = next_nonnote_insn (temp)) != 0
808 && GET_CODE (temp3) == INSN
809 && REG_NOTES (temp3) == 0
810 && (reallabelprev == temp3
811 || ((temp2 = next_active_insn (temp3)) != 0
812 && simplejump_p (temp2)
813 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
814 && (temp1 = single_set (temp)) != 0
815 && (temp5 = SET_DEST (temp1),
816 (GET_CODE (temp5) == REG
817 || (GET_CODE (temp5) == SUBREG
818 && (temp5 = SUBREG_REG (temp5),
819 GET_CODE (temp5) == REG))))
820 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
821 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
822 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
823 && ! side_effects_p (SET_SRC (temp1))
824 && ! may_trap_p (SET_SRC (temp1))
825 && rtx_cost (SET_SRC (temp1), SET) < 10
826 && (temp4 = single_set (temp3)) != 0
827 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
828 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
829 && (! SMALL_REGISTER_CLASSES
830 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
831 && rtx_equal_p (SET_DEST (temp4), temp2)
832 && ! side_effects_p (SET_SRC (temp4))
833 && ! may_trap_p (SET_SRC (temp4))
834 && rtx_cost (SET_SRC (temp4), SET) < 10)
835 {
836 rtx new = gen_reg_rtx (GET_MODE (temp2));
837
838 if ((temp5 = find_insert_position (insn, temp))
839 && (temp6 = find_insert_position (insn, temp3))
840 && validate_change (temp3, &SET_DEST (temp4), new, 0))
841 {
842 /* Use the earliest of temp5 and temp6. */
843 if (temp5 != insn)
844 temp6 = temp5;
845 next = emit_insn_after (gen_move_insn (temp2, new), insn);
846 emit_insn_after_with_line_notes (PATTERN (temp),
847 PREV_INSN (temp6), temp);
848 emit_insn_after_with_line_notes (PATTERN (temp3),
849 PREV_INSN (temp6), temp3);
850 delete_insn (temp);
851 delete_insn (temp3);
852 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
853
854 if (after_regscan)
855 {
856 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
857 old_max_reg = max_reg_num ();
858 }
859 }
860 }
861 #endif /* HAVE_cc0 */
862
863 /* Try to use a conditional move (if the target has them), or a
864 store-flag insn. The general case is:
865
866 1) x = a; if (...) x = b; and
867 2) if (...) x = b;
868
869 If the jump would be faster, the machine should not have defined
870 the movcc or scc insns!. These cases are often made by the
871 previous optimization.
872
873 The second case is treated as x = x; if (...) x = b;.
874
875 INSN here is the jump around the store. We set:
876
877 TEMP to the "x op= b;" insn.
878 TEMP1 to X.
879 TEMP2 to B.
880 TEMP3 to A (X in the second case).
881 TEMP4 to the condition being tested.
882 TEMP5 to the earliest insn used to find the condition.
883 TEMP6 to the SET of TEMP. */
884
885 if (/* We can't do this after reload has completed. */
886 ! reload_completed
887 && this_is_condjump && ! this_is_simplejump
888 /* Set TEMP to the "x = b;" insn. */
889 && (temp = next_nonnote_insn (insn)) != 0
890 && GET_CODE (temp) == INSN
891 && (temp6 = single_set (temp)) != NULL_RTX
892 && GET_CODE (temp1 = SET_DEST (temp6)) == REG
893 && (! SMALL_REGISTER_CLASSES
894 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
895 && ! side_effects_p (temp2 = SET_SRC (temp6))
896 && ! may_trap_p (temp2)
897 /* Allow either form, but prefer the former if both apply.
898 There is no point in using the old value of TEMP1 if
899 it is a register, since cse will alias them. It can
900 lose if the old value were a hard register since CSE
901 won't replace hard registers. Avoid using TEMP3 if
902 small register classes and it is a hard register. */
903 && (((temp3 = reg_set_last (temp1, insn)) != 0
904 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
905 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
906 /* Make the latter case look like x = x; if (...) x = b; */
907 || (temp3 = temp1, 1))
908 /* INSN must either branch to the insn after TEMP or the insn
909 after TEMP must branch to the same place as INSN. */
910 && (reallabelprev == temp
911 || ((temp4 = next_active_insn (temp)) != 0
912 && simplejump_p (temp4)
913 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
914 && (temp4 = get_condition (insn, &temp5)) != 0
915 /* We must be comparing objects whose modes imply the size.
916 We could handle BLKmode if (1) emit_store_flag could
917 and (2) we could find the size reliably. */
918 && GET_MODE (XEXP (temp4, 0)) != BLKmode
919 /* Even if branches are cheap, the store_flag optimization
920 can win when the operation to be performed can be
921 expressed directly. */
922 #ifdef HAVE_cc0
923 /* If the previous insn sets CC0 and something else, we can't
924 do this since we are going to delete that insn. */
925
926 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
927 && GET_CODE (temp6) == INSN
928 && (sets_cc0_p (PATTERN (temp6)) == -1
929 || (sets_cc0_p (PATTERN (temp6)) == 1
930 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
931 #endif
932 )
933 {
934 #ifdef HAVE_conditional_move
935 /* First try a conditional move. */
936 {
937 enum rtx_code code = GET_CODE (temp4);
938 rtx var = temp1;
939 rtx cond0, cond1, aval, bval;
940 rtx target, new_insn;
941
942 /* Copy the compared variables into cond0 and cond1, so that
943 any side effects performed in or after the old comparison,
944 will not affect our compare which will come later. */
945 /* ??? Is it possible to just use the comparison in the jump
946 insn? After all, we're going to delete it. We'd have
947 to modify emit_conditional_move to take a comparison rtx
948 instead or write a new function. */
949 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
950 /* We want the target to be able to simplify comparisons with
951 zero (and maybe other constants as well), so don't create
952 pseudos for them. There's no need to either. */
953 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
954 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
955 cond1 = XEXP (temp4, 1);
956 else
957 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
958
959 /* Careful about copying these values -- an IOR or what may
960 need to do other things, like clobber flags. */
961 /* ??? Assume for the moment that AVAL is ok. */
962 aval = temp3;
963
964 start_sequence ();
965
966 /* If we're not dealing with a register or the insn is more
967 complex than a simple SET, duplicate the computation and
968 replace the destination with a new temporary. */
969 if (register_operand (temp2, GET_MODE (var))
970 && GET_CODE (PATTERN (temp)) == SET)
971 bval = temp2;
972 else
973 {
974 bval = gen_reg_rtx (GET_MODE (var));
975 new_insn = copy_rtx (temp);
976 temp6 = single_set (new_insn);
977 SET_DEST (temp6) = bval;
978 emit_insn (PATTERN (new_insn));
979 }
980
981 target = emit_conditional_move (var, code,
982 cond0, cond1, VOIDmode,
983 aval, bval, GET_MODE (var),
984 (code == LTU || code == GEU
985 || code == LEU || code == GTU));
986
987 if (target)
988 {
989 rtx seq1, seq2, last;
990 int copy_ok;
991
992 /* Save the conditional move sequence but don't emit it
993 yet. On some machines, like the alpha, it is possible
994 that temp5 == insn, so next generate the sequence that
995 saves the compared values and then emit both
996 sequences ensuring seq1 occurs before seq2. */
997 seq2 = get_insns ();
998 end_sequence ();
999
1000 /* "Now that we can't fail..." Famous last words.
1001 Generate the copy insns that preserve the compared
1002 values. */
1003 start_sequence ();
1004 emit_move_insn (cond0, XEXP (temp4, 0));
1005 if (cond1 != XEXP (temp4, 1))
1006 emit_move_insn (cond1, XEXP (temp4, 1));
1007 seq1 = get_insns ();
1008 end_sequence ();
1009
1010 /* Validate the sequence -- this may be some weird
1011 bit-extract-and-test instruction for which there
1012 exists no complimentary bit-extract insn. */
1013 copy_ok = 1;
1014 for (last = seq1; last ; last = NEXT_INSN (last))
1015 if (recog_memoized (last) < 0)
1016 {
1017 copy_ok = 0;
1018 break;
1019 }
1020
1021 if (copy_ok)
1022 {
1023 emit_insns_before (seq1, temp5);
1024
1025 /* Insert conditional move after insn, to be sure
1026 that the jump and a possible compare won't be
1027 separated. */
1028 last = emit_insns_after (seq2, insn);
1029
1030 /* ??? We can also delete the insn that sets X to A.
1031 Flow will do it too though. */
1032 delete_insn (temp);
1033 next = NEXT_INSN (insn);
1034 delete_jump (insn);
1035
1036 if (after_regscan)
1037 {
1038 reg_scan_update (seq1, NEXT_INSN (last),
1039 old_max_reg);
1040 old_max_reg = max_reg_num ();
1041 }
1042
1043 changed = 1;
1044 continue;
1045 }
1046 }
1047 else
1048 end_sequence ();
1049 }
1050 #endif
1051
1052 /* That didn't work, try a store-flag insn.
1053
1054 We further divide the cases into:
1055
1056 1) x = a; if (...) x = b; and either A or B is zero,
1057 2) if (...) x = 0; and jumps are expensive,
1058 3) x = a; if (...) x = b; and A and B are constants where all
1059 the set bits in A are also set in B and jumps are expensive,
1060 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1061 more expensive, and
1062 5) if (...) x = b; if jumps are even more expensive. */
1063
1064 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1065 && ((GET_CODE (temp3) == CONST_INT)
1066 /* Make the latter case look like
1067 x = x; if (...) x = 0; */
1068 || (temp3 = temp1,
1069 ((BRANCH_COST >= 2
1070 && temp2 == const0_rtx)
1071 || BRANCH_COST >= 3)))
1072 /* If B is zero, OK; if A is zero, can only do (1) if we
1073 can reverse the condition. See if (3) applies possibly
1074 by reversing the condition. Prefer reversing to (4) when
1075 branches are very expensive. */
1076 && (((BRANCH_COST >= 2
1077 || STORE_FLAG_VALUE == -1
1078 || (STORE_FLAG_VALUE == 1
1079 /* Check that the mask is a power of two,
1080 so that it can probably be generated
1081 with a shift. */
1082 && GET_CODE (temp3) == CONST_INT
1083 && exact_log2 (INTVAL (temp3)) >= 0))
1084 && (reversep = 0, temp2 == const0_rtx))
1085 || ((BRANCH_COST >= 2
1086 || STORE_FLAG_VALUE == -1
1087 || (STORE_FLAG_VALUE == 1
1088 && GET_CODE (temp2) == CONST_INT
1089 && exact_log2 (INTVAL (temp2)) >= 0))
1090 && temp3 == const0_rtx
1091 && (reversep = can_reverse_comparison_p (temp4, insn)))
1092 || (BRANCH_COST >= 2
1093 && GET_CODE (temp2) == CONST_INT
1094 && GET_CODE (temp3) == CONST_INT
1095 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1096 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1097 && (reversep = can_reverse_comparison_p (temp4,
1098 insn)))))
1099 || BRANCH_COST >= 3)
1100 )
1101 {
1102 enum rtx_code code = GET_CODE (temp4);
1103 rtx uval, cval, var = temp1;
1104 int normalizep;
1105 rtx target;
1106
1107 /* If necessary, reverse the condition. */
1108 if (reversep)
1109 code = reverse_condition (code), uval = temp2, cval = temp3;
1110 else
1111 uval = temp3, cval = temp2;
1112
1113 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1114 is the constant 1, it is best to just compute the result
1115 directly. If UVAL is constant and STORE_FLAG_VALUE
1116 includes all of its bits, it is best to compute the flag
1117 value unnormalized and `and' it with UVAL. Otherwise,
1118 normalize to -1 and `and' with UVAL. */
1119 normalizep = (cval != const0_rtx ? -1
1120 : (uval == const1_rtx ? 1
1121 : (GET_CODE (uval) == CONST_INT
1122 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1123 ? 0 : -1));
1124
1125 /* We will be putting the store-flag insn immediately in
1126 front of the comparison that was originally being done,
1127 so we know all the variables in TEMP4 will be valid.
1128 However, this might be in front of the assignment of
1129 A to VAR. If it is, it would clobber the store-flag
1130 we will be emitting.
1131
1132 Therefore, emit into a temporary which will be copied to
1133 VAR immediately after TEMP. */
1134
1135 start_sequence ();
1136 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1137 XEXP (temp4, 0), XEXP (temp4, 1),
1138 VOIDmode,
1139 (code == LTU || code == LEU
1140 || code == GEU || code == GTU),
1141 normalizep);
1142 if (target)
1143 {
1144 rtx seq;
1145 rtx before = insn;
1146
1147 seq = get_insns ();
1148 end_sequence ();
1149
1150 /* Put the store-flag insns in front of the first insn
1151 used to compute the condition to ensure that we
1152 use the same values of them as the current
1153 comparison. However, the remainder of the insns we
1154 generate will be placed directly in front of the
1155 jump insn, in case any of the pseudos we use
1156 are modified earlier. */
1157
1158 emit_insns_before (seq, temp5);
1159
1160 start_sequence ();
1161
1162 /* Both CVAL and UVAL are non-zero. */
1163 if (cval != const0_rtx && uval != const0_rtx)
1164 {
1165 rtx tem1, tem2;
1166
1167 tem1 = expand_and (uval, target, NULL_RTX);
1168 if (GET_CODE (cval) == CONST_INT
1169 && GET_CODE (uval) == CONST_INT
1170 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1171 tem2 = cval;
1172 else
1173 {
1174 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1175 target, NULL_RTX, 0);
1176 tem2 = expand_and (cval, tem2,
1177 (GET_CODE (tem2) == REG
1178 ? tem2 : 0));
1179 }
1180
1181 /* If we usually make new pseudos, do so here. This
1182 turns out to help machines that have conditional
1183 move insns. */
1184 /* ??? Conditional moves have already been handled.
1185 This may be obsolete. */
1186
1187 if (flag_expensive_optimizations)
1188 target = 0;
1189
1190 target = expand_binop (GET_MODE (var), ior_optab,
1191 tem1, tem2, target,
1192 1, OPTAB_WIDEN);
1193 }
1194 else if (normalizep != 1)
1195 {
1196 /* We know that either CVAL or UVAL is zero. If
1197 UVAL is zero, negate TARGET and `and' with CVAL.
1198 Otherwise, `and' with UVAL. */
1199 if (uval == const0_rtx)
1200 {
1201 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1202 target, NULL_RTX, 0);
1203 uval = cval;
1204 }
1205
1206 target = expand_and (uval, target,
1207 (GET_CODE (target) == REG
1208 && ! preserve_subexpressions_p ()
1209 ? target : NULL_RTX));
1210 }
1211
1212 emit_move_insn (var, target);
1213 seq = get_insns ();
1214 end_sequence ();
1215 #ifdef HAVE_cc0
1216 /* If INSN uses CC0, we must not separate it from the
1217 insn that sets cc0. */
1218 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1219 before = prev_nonnote_insn (before);
1220 #endif
1221 emit_insns_before (seq, before);
1222
1223 delete_insn (temp);
1224 next = NEXT_INSN (insn);
1225 delete_jump (insn);
1226
1227 if (after_regscan)
1228 {
1229 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1230 old_max_reg = max_reg_num ();
1231 }
1232
1233 changed = 1;
1234 continue;
1235 }
1236 else
1237 end_sequence ();
1238 }
1239 }
1240
1241 /* If branches are expensive, convert
1242 if (foo) bar++; to bar += (foo != 0);
1243 and similarly for "bar--;"
1244
1245 INSN is the conditional branch around the arithmetic. We set:
1246
1247 TEMP is the arithmetic insn.
1248 TEMP1 is the SET doing the arithmetic.
1249 TEMP2 is the operand being incremented or decremented.
1250 TEMP3 to the condition being tested.
1251 TEMP4 to the earliest insn used to find the condition. */
1252
1253 if ((BRANCH_COST >= 2
1254 #ifdef HAVE_incscc
1255 || HAVE_incscc
1256 #endif
1257 #ifdef HAVE_decscc
1258 || HAVE_decscc
1259 #endif
1260 )
1261 && ! reload_completed
1262 && this_is_condjump && ! this_is_simplejump
1263 && (temp = next_nonnote_insn (insn)) != 0
1264 && (temp1 = single_set (temp)) != 0
1265 && (temp2 = SET_DEST (temp1),
1266 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1267 && GET_CODE (SET_SRC (temp1)) == PLUS
1268 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1269 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1270 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1271 && ! side_effects_p (temp2)
1272 && ! may_trap_p (temp2)
1273 /* INSN must either branch to the insn after TEMP or the insn
1274 after TEMP must branch to the same place as INSN. */
1275 && (reallabelprev == temp
1276 || ((temp3 = next_active_insn (temp)) != 0
1277 && simplejump_p (temp3)
1278 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1279 && (temp3 = get_condition (insn, &temp4)) != 0
1280 /* We must be comparing objects whose modes imply the size.
1281 We could handle BLKmode if (1) emit_store_flag could
1282 and (2) we could find the size reliably. */
1283 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1284 && can_reverse_comparison_p (temp3, insn))
1285 {
1286 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1287 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1288
1289 start_sequence ();
1290
1291 /* It must be the case that TEMP2 is not modified in the range
1292 [TEMP4, INSN). The one exception we make is if the insn
1293 before INSN sets TEMP2 to something which is also unchanged
1294 in that range. In that case, we can move the initialization
1295 into our sequence. */
1296
1297 if ((temp5 = prev_active_insn (insn)) != 0
1298 && no_labels_between_p (temp5, insn)
1299 && GET_CODE (temp5) == INSN
1300 && (temp6 = single_set (temp5)) != 0
1301 && rtx_equal_p (temp2, SET_DEST (temp6))
1302 && (CONSTANT_P (SET_SRC (temp6))
1303 || GET_CODE (SET_SRC (temp6)) == REG
1304 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1305 {
1306 emit_insn (PATTERN (temp5));
1307 init_insn = temp5;
1308 init = SET_SRC (temp6);
1309 }
1310
1311 if (CONSTANT_P (init)
1312 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1313 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1314 XEXP (temp3, 0), XEXP (temp3, 1),
1315 VOIDmode,
1316 (code == LTU || code == LEU
1317 || code == GTU || code == GEU), 1);
1318
1319 /* If we can do the store-flag, do the addition or
1320 subtraction. */
1321
1322 if (target)
1323 target = expand_binop (GET_MODE (temp2),
1324 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1325 ? add_optab : sub_optab),
1326 temp2, target, temp2, 0, OPTAB_WIDEN);
1327
1328 if (target != 0)
1329 {
1330 /* Put the result back in temp2 in case it isn't already.
1331 Then replace the jump, possible a CC0-setting insn in
1332 front of the jump, and TEMP, with the sequence we have
1333 made. */
1334
1335 if (target != temp2)
1336 emit_move_insn (temp2, target);
1337
1338 seq = get_insns ();
1339 end_sequence ();
1340
1341 emit_insns_before (seq, temp4);
1342 delete_insn (temp);
1343
1344 if (init_insn)
1345 delete_insn (init_insn);
1346
1347 next = NEXT_INSN (insn);
1348 #ifdef HAVE_cc0
1349 delete_insn (prev_nonnote_insn (insn));
1350 #endif
1351 delete_insn (insn);
1352
1353 if (after_regscan)
1354 {
1355 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1356 old_max_reg = max_reg_num ();
1357 }
1358
1359 changed = 1;
1360 continue;
1361 }
1362 else
1363 end_sequence ();
1364 }
1365
1366 /* Simplify if (...) x = 1; else {...} if (x) ...
1367 We recognize this case scanning backwards as well.
1368
1369 TEMP is the assignment to x;
1370 TEMP1 is the label at the head of the second if. */
1371 /* ?? This should call get_condition to find the values being
1372 compared, instead of looking for a COMPARE insn when HAVE_cc0
1373 is not defined. This would allow it to work on the m88k. */
1374 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1375 is not defined and the condition is tested by a separate compare
1376 insn. This is because the code below assumes that the result
1377 of the compare dies in the following branch.
1378
1379 Not only that, but there might be other insns between the
1380 compare and branch whose results are live. Those insns need
1381 to be executed.
1382
1383 A way to fix this is to move the insns at JUMP_LABEL (insn)
1384 to before INSN. If we are running before flow, they will
1385 be deleted if they aren't needed. But this doesn't work
1386 well after flow.
1387
1388 This is really a special-case of jump threading, anyway. The
1389 right thing to do is to replace this and jump threading with
1390 much simpler code in cse.
1391
1392 This code has been turned off in the non-cc0 case in the
1393 meantime. */
1394
1395 #ifdef HAVE_cc0
1396 else if (this_is_simplejump
1397 /* Safe to skip USE and CLOBBER insns here
1398 since they will not be deleted. */
1399 && (temp = prev_active_insn (insn))
1400 && no_labels_between_p (temp, insn)
1401 && GET_CODE (temp) == INSN
1402 && GET_CODE (PATTERN (temp)) == SET
1403 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1404 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1405 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1406 /* If we find that the next value tested is `x'
1407 (TEMP1 is the insn where this happens), win. */
1408 && GET_CODE (temp1) == INSN
1409 && GET_CODE (PATTERN (temp1)) == SET
1410 #ifdef HAVE_cc0
1411 /* Does temp1 `tst' the value of x? */
1412 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1413 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1414 && (temp1 = next_nonnote_insn (temp1))
1415 #else
1416 /* Does temp1 compare the value of x against zero? */
1417 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1418 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1419 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1420 == SET_DEST (PATTERN (temp)))
1421 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1422 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1423 #endif
1424 && condjump_p (temp1))
1425 {
1426 /* Get the if_then_else from the condjump. */
1427 rtx choice = SET_SRC (PATTERN (temp1));
1428 if (GET_CODE (choice) == IF_THEN_ELSE)
1429 {
1430 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1431 rtx val = SET_SRC (PATTERN (temp));
1432 rtx cond
1433 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1434 val, const0_rtx);
1435 rtx ultimate;
1436
1437 if (cond == const_true_rtx)
1438 ultimate = XEXP (choice, 1);
1439 else if (cond == const0_rtx)
1440 ultimate = XEXP (choice, 2);
1441 else
1442 ultimate = 0;
1443
1444 if (ultimate == pc_rtx)
1445 ultimate = get_label_after (temp1);
1446 else if (ultimate && GET_CODE (ultimate) != RETURN)
1447 ultimate = XEXP (ultimate, 0);
1448
1449 if (ultimate && JUMP_LABEL(insn) != ultimate)
1450 changed |= redirect_jump (insn, ultimate);
1451 }
1452 }
1453 #endif
1454
1455 #if 0
1456 /* @@ This needs a bit of work before it will be right.
1457
1458 Any type of comparison can be accepted for the first and
1459 second compare. When rewriting the first jump, we must
1460 compute the what conditions can reach label3, and use the
1461 appropriate code. We can not simply reverse/swap the code
1462 of the first jump. In some cases, the second jump must be
1463 rewritten also.
1464
1465 For example,
1466 < == converts to > ==
1467 < != converts to == >
1468 etc.
1469
1470 If the code is written to only accept an '==' test for the second
1471 compare, then all that needs to be done is to swap the condition
1472 of the first branch.
1473
1474 It is questionable whether we want this optimization anyways,
1475 since if the user wrote code like this because he/she knew that
1476 the jump to label1 is taken most of the time, then rewriting
1477 this gives slower code. */
1478 /* @@ This should call get_condition to find the values being
1479 compared, instead of looking for a COMPARE insn when HAVE_cc0
1480 is not defined. This would allow it to work on the m88k. */
1481 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1482 is not defined and the condition is tested by a separate compare
1483 insn. This is because the code below assumes that the result
1484 of the compare dies in the following branch. */
1485
1486 /* Simplify test a ~= b
1487 condjump label1;
1488 test a == b
1489 condjump label2;
1490 jump label3;
1491 label1:
1492
1493 rewriting as
1494 test a ~~= b
1495 condjump label3
1496 test a == b
1497 condjump label2
1498 label1:
1499
1500 where ~= is an inequality, e.g. >, and ~~= is the swapped
1501 inequality, e.g. <.
1502
1503 We recognize this case scanning backwards.
1504
1505 TEMP is the conditional jump to `label2';
1506 TEMP1 is the test for `a == b';
1507 TEMP2 is the conditional jump to `label1';
1508 TEMP3 is the test for `a ~= b'. */
1509 else if (this_is_simplejump
1510 && (temp = prev_active_insn (insn))
1511 && no_labels_between_p (temp, insn)
1512 && condjump_p (temp)
1513 && (temp1 = prev_active_insn (temp))
1514 && no_labels_between_p (temp1, temp)
1515 && GET_CODE (temp1) == INSN
1516 && GET_CODE (PATTERN (temp1)) == SET
1517 #ifdef HAVE_cc0
1518 && sets_cc0_p (PATTERN (temp1)) == 1
1519 #else
1520 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1521 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1522 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1523 #endif
1524 && (temp2 = prev_active_insn (temp1))
1525 && no_labels_between_p (temp2, temp1)
1526 && condjump_p (temp2)
1527 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1528 && (temp3 = prev_active_insn (temp2))
1529 && no_labels_between_p (temp3, temp2)
1530 && GET_CODE (PATTERN (temp3)) == SET
1531 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1532 SET_DEST (PATTERN (temp1)))
1533 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1534 SET_SRC (PATTERN (temp3)))
1535 && ! inequality_comparisons_p (PATTERN (temp))
1536 && inequality_comparisons_p (PATTERN (temp2)))
1537 {
1538 rtx fallthrough_label = JUMP_LABEL (temp2);
1539
1540 ++LABEL_NUSES (fallthrough_label);
1541 if (swap_jump (temp2, JUMP_LABEL (insn)))
1542 {
1543 delete_insn (insn);
1544 changed = 1;
1545 }
1546
1547 if (--LABEL_NUSES (fallthrough_label) == 0)
1548 delete_insn (fallthrough_label);
1549 }
1550 #endif
1551 /* Simplify if (...) {... x = 1;} if (x) ...
1552
1553 We recognize this case backwards.
1554
1555 TEMP is the test of `x';
1556 TEMP1 is the assignment to `x' at the end of the
1557 previous statement. */
1558 /* @@ This should call get_condition to find the values being
1559 compared, instead of looking for a COMPARE insn when HAVE_cc0
1560 is not defined. This would allow it to work on the m88k. */
1561 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1562 is not defined and the condition is tested by a separate compare
1563 insn. This is because the code below assumes that the result
1564 of the compare dies in the following branch. */
1565
1566 /* ??? This has to be turned off. The problem is that the
1567 unconditional jump might indirectly end up branching to the
1568 label between TEMP1 and TEMP. We can't detect this, in general,
1569 since it may become a jump to there after further optimizations.
1570 If that jump is done, it will be deleted, so we will retry
1571 this optimization in the next pass, thus an infinite loop.
1572
1573 The present code prevents this by putting the jump after the
1574 label, but this is not logically correct. */
1575 #if 0
1576 else if (this_is_condjump
1577 /* Safe to skip USE and CLOBBER insns here
1578 since they will not be deleted. */
1579 && (temp = prev_active_insn (insn))
1580 && no_labels_between_p (temp, insn)
1581 && GET_CODE (temp) == INSN
1582 && GET_CODE (PATTERN (temp)) == SET
1583 #ifdef HAVE_cc0
1584 && sets_cc0_p (PATTERN (temp)) == 1
1585 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1586 #else
1587 /* Temp must be a compare insn, we can not accept a register
1588 to register move here, since it may not be simply a
1589 tst insn. */
1590 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1591 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1592 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1593 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1594 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1595 #endif
1596 /* May skip USE or CLOBBER insns here
1597 for checking for opportunity, since we
1598 take care of them later. */
1599 && (temp1 = prev_active_insn (temp))
1600 && GET_CODE (temp1) == INSN
1601 && GET_CODE (PATTERN (temp1)) == SET
1602 #ifdef HAVE_cc0
1603 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1604 #else
1605 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1606 == SET_DEST (PATTERN (temp1)))
1607 #endif
1608 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1609 /* If this isn't true, cse will do the job. */
1610 && ! no_labels_between_p (temp1, temp))
1611 {
1612 /* Get the if_then_else from the condjump. */
1613 rtx choice = SET_SRC (PATTERN (insn));
1614 if (GET_CODE (choice) == IF_THEN_ELSE
1615 && (GET_CODE (XEXP (choice, 0)) == EQ
1616 || GET_CODE (XEXP (choice, 0)) == NE))
1617 {
1618 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1619 rtx last_insn;
1620 rtx ultimate;
1621 rtx p;
1622
1623 /* Get the place that condjump will jump to
1624 if it is reached from here. */
1625 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1626 == want_nonzero)
1627 ultimate = XEXP (choice, 1);
1628 else
1629 ultimate = XEXP (choice, 2);
1630 /* Get it as a CODE_LABEL. */
1631 if (ultimate == pc_rtx)
1632 ultimate = get_label_after (insn);
1633 else
1634 /* Get the label out of the LABEL_REF. */
1635 ultimate = XEXP (ultimate, 0);
1636
1637 /* Insert the jump immediately before TEMP, specifically
1638 after the label that is between TEMP1 and TEMP. */
1639 last_insn = PREV_INSN (temp);
1640
1641 /* If we would be branching to the next insn, the jump
1642 would immediately be deleted and the re-inserted in
1643 a subsequent pass over the code. So don't do anything
1644 in that case. */
1645 if (next_active_insn (last_insn)
1646 != next_active_insn (ultimate))
1647 {
1648 emit_barrier_after (last_insn);
1649 p = emit_jump_insn_after (gen_jump (ultimate),
1650 last_insn);
1651 JUMP_LABEL (p) = ultimate;
1652 ++LABEL_NUSES (ultimate);
1653 if (INSN_UID (ultimate) < max_jump_chain
1654 && INSN_CODE (p) < max_jump_chain)
1655 {
1656 jump_chain[INSN_UID (p)]
1657 = jump_chain[INSN_UID (ultimate)];
1658 jump_chain[INSN_UID (ultimate)] = p;
1659 }
1660 changed = 1;
1661 continue;
1662 }
1663 }
1664 }
1665 #endif
1666 /* Detect a conditional jump going to the same place
1667 as an immediately following unconditional jump. */
1668 else if (this_is_condjump
1669 && (temp = next_active_insn (insn)) != 0
1670 && simplejump_p (temp)
1671 && (next_active_insn (JUMP_LABEL (insn))
1672 == next_active_insn (JUMP_LABEL (temp))))
1673 {
1674 rtx tem = temp;
1675
1676 /* ??? Optional. Disables some optimizations, but makes
1677 gcov output more accurate with -O. */
1678 if (flag_test_coverage && !reload_completed)
1679 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1680 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1681 break;
1682
1683 if (tem == temp)
1684 {
1685 delete_jump (insn);
1686 changed = 1;
1687 continue;
1688 }
1689 }
1690 #ifdef HAVE_trap
1691 /* Detect a conditional jump jumping over an unconditional trap. */
1692 else if (HAVE_trap
1693 && this_is_condjump && ! this_is_simplejump
1694 && reallabelprev != 0
1695 && GET_CODE (reallabelprev) == INSN
1696 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1697 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1698 && prev_active_insn (reallabelprev) == insn
1699 && no_labels_between_p (insn, reallabelprev)
1700 && (temp2 = get_condition (insn, &temp4))
1701 && can_reverse_comparison_p (temp2, insn))
1702 {
1703 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1704 XEXP (temp2, 0), XEXP (temp2, 1),
1705 TRAP_CODE (PATTERN (reallabelprev)));
1706
1707 if (new)
1708 {
1709 emit_insn_before (new, temp4);
1710 delete_insn (reallabelprev);
1711 delete_jump (insn);
1712 changed = 1;
1713 continue;
1714 }
1715 }
1716 /* Detect a jump jumping to an unconditional trap. */
1717 else if (HAVE_trap && this_is_condjump
1718 && (temp = next_active_insn (JUMP_LABEL (insn)))
1719 && GET_CODE (temp) == INSN
1720 && GET_CODE (PATTERN (temp)) == TRAP_IF
1721 && (this_is_simplejump
1722 || (temp2 = get_condition (insn, &temp4))))
1723 {
1724 rtx tc = TRAP_CONDITION (PATTERN (temp));
1725
1726 if (tc == const_true_rtx
1727 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1728 {
1729 rtx new;
1730 /* Replace an unconditional jump to a trap with a trap. */
1731 if (this_is_simplejump)
1732 {
1733 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1734 delete_jump (insn);
1735 changed = 1;
1736 continue;
1737 }
1738 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1739 XEXP (temp2, 1),
1740 TRAP_CODE (PATTERN (temp)));
1741 if (new)
1742 {
1743 emit_insn_before (new, temp4);
1744 delete_jump (insn);
1745 changed = 1;
1746 continue;
1747 }
1748 }
1749 /* If the trap condition and jump condition are mutually
1750 exclusive, redirect the jump to the following insn. */
1751 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1752 && ! this_is_simplejump
1753 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1754 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1755 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1756 && redirect_jump (insn, get_label_after (temp)))
1757 {
1758 changed = 1;
1759 continue;
1760 }
1761 }
1762 #endif
1763
1764 /* Detect a conditional jump jumping over an unconditional jump. */
1765
1766 else if ((this_is_condjump || this_is_condjump_in_parallel)
1767 && ! this_is_simplejump
1768 && reallabelprev != 0
1769 && GET_CODE (reallabelprev) == JUMP_INSN
1770 && prev_active_insn (reallabelprev) == insn
1771 && no_labels_between_p (insn, reallabelprev)
1772 && simplejump_p (reallabelprev))
1773 {
1774 /* When we invert the unconditional jump, we will be
1775 decrementing the usage count of its old label.
1776 Make sure that we don't delete it now because that
1777 might cause the following code to be deleted. */
1778 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1779 rtx prev_label = JUMP_LABEL (insn);
1780
1781 if (prev_label)
1782 ++LABEL_NUSES (prev_label);
1783
1784 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1785 {
1786 /* It is very likely that if there are USE insns before
1787 this jump, they hold REG_DEAD notes. These REG_DEAD
1788 notes are no longer valid due to this optimization,
1789 and will cause the life-analysis that following passes
1790 (notably delayed-branch scheduling) to think that
1791 these registers are dead when they are not.
1792
1793 To prevent this trouble, we just remove the USE insns
1794 from the insn chain. */
1795
1796 while (prev_uses && GET_CODE (prev_uses) == INSN
1797 && GET_CODE (PATTERN (prev_uses)) == USE)
1798 {
1799 rtx useless = prev_uses;
1800 prev_uses = prev_nonnote_insn (prev_uses);
1801 delete_insn (useless);
1802 }
1803
1804 delete_insn (reallabelprev);
1805 next = insn;
1806 changed = 1;
1807 }
1808
1809 /* We can now safely delete the label if it is unreferenced
1810 since the delete_insn above has deleted the BARRIER. */
1811 if (prev_label && --LABEL_NUSES (prev_label) == 0)
1812 delete_insn (prev_label);
1813 continue;
1814 }
1815 else
1816 {
1817 /* Detect a jump to a jump. */
1818
1819 nlabel = follow_jumps (JUMP_LABEL (insn));
1820 if (nlabel != JUMP_LABEL (insn)
1821 && redirect_jump (insn, nlabel))
1822 {
1823 changed = 1;
1824 next = insn;
1825 }
1826
1827 /* Look for if (foo) bar; else break; */
1828 /* The insns look like this:
1829 insn = condjump label1;
1830 ...range1 (some insns)...
1831 jump label2;
1832 label1:
1833 ...range2 (some insns)...
1834 jump somewhere unconditionally
1835 label2: */
1836 {
1837 rtx label1 = next_label (insn);
1838 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1839 /* Don't do this optimization on the first round, so that
1840 jump-around-a-jump gets simplified before we ask here
1841 whether a jump is unconditional.
1842
1843 Also don't do it when we are called after reload since
1844 it will confuse reorg. */
1845 if (! first
1846 && (reload_completed ? ! flag_delayed_branch : 1)
1847 /* Make sure INSN is something we can invert. */
1848 && condjump_p (insn)
1849 && label1 != 0
1850 && JUMP_LABEL (insn) == label1
1851 && LABEL_NUSES (label1) == 1
1852 && GET_CODE (range1end) == JUMP_INSN
1853 && simplejump_p (range1end))
1854 {
1855 rtx label2 = next_label (label1);
1856 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1857 if (range1end != range2end
1858 && JUMP_LABEL (range1end) == label2
1859 && GET_CODE (range2end) == JUMP_INSN
1860 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1861 /* Invert the jump condition, so we
1862 still execute the same insns in each case. */
1863 && invert_jump (insn, label1))
1864 {
1865 rtx range1beg = next_active_insn (insn);
1866 rtx range2beg = next_active_insn (label1);
1867 rtx range1after, range2after;
1868 rtx range1before, range2before;
1869 rtx rangenext;
1870
1871 /* Include in each range any notes before it, to be
1872 sure that we get the line number note if any, even
1873 if there are other notes here. */
1874 while (PREV_INSN (range1beg)
1875 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
1876 range1beg = PREV_INSN (range1beg);
1877
1878 while (PREV_INSN (range2beg)
1879 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
1880 range2beg = PREV_INSN (range2beg);
1881
1882 /* Don't move NOTEs for blocks or loops; shift them
1883 outside the ranges, where they'll stay put. */
1884 range1beg = squeeze_notes (range1beg, range1end);
1885 range2beg = squeeze_notes (range2beg, range2end);
1886
1887 /* Get current surrounds of the 2 ranges. */
1888 range1before = PREV_INSN (range1beg);
1889 range2before = PREV_INSN (range2beg);
1890 range1after = NEXT_INSN (range1end);
1891 range2after = NEXT_INSN (range2end);
1892
1893 /* Splice range2 where range1 was. */
1894 NEXT_INSN (range1before) = range2beg;
1895 PREV_INSN (range2beg) = range1before;
1896 NEXT_INSN (range2end) = range1after;
1897 PREV_INSN (range1after) = range2end;
1898 /* Splice range1 where range2 was. */
1899 NEXT_INSN (range2before) = range1beg;
1900 PREV_INSN (range1beg) = range2before;
1901 NEXT_INSN (range1end) = range2after;
1902 PREV_INSN (range2after) = range1end;
1903
1904 /* Check for a loop end note between the end of
1905 range2, and the next code label. If there is one,
1906 then what we have really seen is
1907 if (foo) break; end_of_loop;
1908 and moved the break sequence outside the loop.
1909 We must move the LOOP_END note to where the
1910 loop really ends now, or we will confuse loop
1911 optimization. Stop if we find a LOOP_BEG note
1912 first, since we don't want to move the LOOP_END
1913 note in that case. */
1914 for (;range2after != label2; range2after = rangenext)
1915 {
1916 rangenext = NEXT_INSN (range2after);
1917 if (GET_CODE (range2after) == NOTE)
1918 {
1919 if (NOTE_LINE_NUMBER (range2after)
1920 == NOTE_INSN_LOOP_END)
1921 {
1922 NEXT_INSN (PREV_INSN (range2after))
1923 = rangenext;
1924 PREV_INSN (rangenext)
1925 = PREV_INSN (range2after);
1926 PREV_INSN (range2after)
1927 = PREV_INSN (range1beg);
1928 NEXT_INSN (range2after) = range1beg;
1929 NEXT_INSN (PREV_INSN (range1beg))
1930 = range2after;
1931 PREV_INSN (range1beg) = range2after;
1932 }
1933 else if (NOTE_LINE_NUMBER (range2after)
1934 == NOTE_INSN_LOOP_BEG)
1935 break;
1936 }
1937 }
1938 changed = 1;
1939 continue;
1940 }
1941 }
1942 }
1943
1944 /* Now that the jump has been tensioned,
1945 try cross jumping: check for identical code
1946 before the jump and before its target label. */
1947
1948 /* First, cross jumping of conditional jumps: */
1949
1950 if (cross_jump && condjump_p (insn))
1951 {
1952 rtx newjpos, newlpos;
1953 rtx x = prev_real_insn (JUMP_LABEL (insn));
1954
1955 /* A conditional jump may be crossjumped
1956 only if the place it jumps to follows
1957 an opposing jump that comes back here. */
1958
1959 if (x != 0 && ! jump_back_p (x, insn))
1960 /* We have no opposing jump;
1961 cannot cross jump this insn. */
1962 x = 0;
1963
1964 newjpos = 0;
1965 /* TARGET is nonzero if it is ok to cross jump
1966 to code before TARGET. If so, see if matches. */
1967 if (x != 0)
1968 find_cross_jump (insn, x, 2,
1969 &newjpos, &newlpos);
1970
1971 if (newjpos != 0)
1972 {
1973 do_cross_jump (insn, newjpos, newlpos);
1974 /* Make the old conditional jump
1975 into an unconditional one. */
1976 SET_SRC (PATTERN (insn))
1977 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
1978 INSN_CODE (insn) = -1;
1979 emit_barrier_after (insn);
1980 /* Add to jump_chain unless this is a new label
1981 whose UID is too large. */
1982 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1983 {
1984 jump_chain[INSN_UID (insn)]
1985 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1986 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1987 }
1988 changed = 1;
1989 next = insn;
1990 }
1991 }
1992
1993 /* Cross jumping of unconditional jumps:
1994 a few differences. */
1995
1996 if (cross_jump && simplejump_p (insn))
1997 {
1998 rtx newjpos, newlpos;
1999 rtx target;
2000
2001 newjpos = 0;
2002
2003 /* TARGET is nonzero if it is ok to cross jump
2004 to code before TARGET. If so, see if matches. */
2005 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2006 &newjpos, &newlpos);
2007
2008 /* If cannot cross jump to code before the label,
2009 see if we can cross jump to another jump to
2010 the same label. */
2011 /* Try each other jump to this label. */
2012 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2013 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2014 target != 0 && newjpos == 0;
2015 target = jump_chain[INSN_UID (target)])
2016 if (target != insn
2017 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2018 /* Ignore TARGET if it's deleted. */
2019 && ! INSN_DELETED_P (target))
2020 find_cross_jump (insn, target, 2,
2021 &newjpos, &newlpos);
2022
2023 if (newjpos != 0)
2024 {
2025 do_cross_jump (insn, newjpos, newlpos);
2026 changed = 1;
2027 next = insn;
2028 }
2029 }
2030
2031 /* This code was dead in the previous jump.c! */
2032 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2033 {
2034 /* Return insns all "jump to the same place"
2035 so we can cross-jump between any two of them. */
2036
2037 rtx newjpos, newlpos, target;
2038
2039 newjpos = 0;
2040
2041 /* If cannot cross jump to code before the label,
2042 see if we can cross jump to another jump to
2043 the same label. */
2044 /* Try each other jump to this label. */
2045 for (target = jump_chain[0];
2046 target != 0 && newjpos == 0;
2047 target = jump_chain[INSN_UID (target)])
2048 if (target != insn
2049 && ! INSN_DELETED_P (target)
2050 && GET_CODE (PATTERN (target)) == RETURN)
2051 find_cross_jump (insn, target, 2,
2052 &newjpos, &newlpos);
2053
2054 if (newjpos != 0)
2055 {
2056 do_cross_jump (insn, newjpos, newlpos);
2057 changed = 1;
2058 next = insn;
2059 }
2060 }
2061 }
2062 }
2063
2064 first = 0;
2065 }
2066
2067 /* Delete extraneous line number notes.
2068 Note that two consecutive notes for different lines are not really
2069 extraneous. There should be some indication where that line belonged,
2070 even if it became empty. */
2071
2072 {
2073 rtx last_note = 0;
2074
2075 for (insn = f; insn; insn = NEXT_INSN (insn))
2076 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2077 {
2078 /* Delete this note if it is identical to previous note. */
2079 if (last_note
2080 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2081 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2082 {
2083 delete_insn (insn);
2084 continue;
2085 }
2086
2087 last_note = insn;
2088 }
2089 }
2090
2091 #ifdef HAVE_return
2092 if (HAVE_return)
2093 {
2094 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2095 in front of it. If the machine allows it at this point (we might be
2096 after reload for a leaf routine), it will improve optimization for it
2097 to be there. We do this both here and at the start of this pass since
2098 the RETURN might have been deleted by some of our optimizations. */
2099 insn = get_last_insn ();
2100 while (insn && GET_CODE (insn) == NOTE)
2101 insn = PREV_INSN (insn);
2102
2103 if (insn && GET_CODE (insn) != BARRIER)
2104 {
2105 emit_jump_insn (gen_return ());
2106 emit_barrier ();
2107 }
2108 }
2109 #endif
2110
2111 /* CAN_REACH_END is persistent for each function. Once set it should
2112 not be cleared. This is especially true for the case where we
2113 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2114 the front-end before compiling each function. */
2115 if (calculate_can_reach_end (last_insn, 0, 1))
2116 can_reach_end = 1;
2117
2118 /* Show JUMP_CHAIN no longer valid. */
2119 jump_chain = 0;
2120 }
2121 \f
2122 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2123 notes whose labels don't occur in the insn any more. Returns the
2124 largest INSN_UID found. */
2125 static int
2126 init_label_info (f)
2127 rtx f;
2128 {
2129 int largest_uid = 0;
2130 rtx insn;
2131
2132 for (insn = f; insn; insn = NEXT_INSN (insn))
2133 {
2134 if (GET_CODE (insn) == CODE_LABEL)
2135 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2136 else if (GET_CODE (insn) == JUMP_INSN)
2137 JUMP_LABEL (insn) = 0;
2138 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2139 {
2140 rtx note, next;
2141
2142 for (note = REG_NOTES (insn); note; note = next)
2143 {
2144 next = XEXP (note, 1);
2145 if (REG_NOTE_KIND (note) == REG_LABEL
2146 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2147 remove_note (insn, note);
2148 }
2149 }
2150 if (INSN_UID (insn) > largest_uid)
2151 largest_uid = INSN_UID (insn);
2152 }
2153
2154 return largest_uid;
2155 }
2156
2157 /* Delete insns following barriers, up to next label.
2158
2159 Also delete no-op jumps created by gcse. */
2160 static void
2161 delete_barrier_successors (f)
2162 rtx f;
2163 {
2164 rtx insn;
2165
2166 for (insn = f; insn;)
2167 {
2168 if (GET_CODE (insn) == BARRIER)
2169 {
2170 insn = NEXT_INSN (insn);
2171
2172 never_reached_warning (insn);
2173
2174 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2175 {
2176 if (GET_CODE (insn) == NOTE
2177 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2178 insn = NEXT_INSN (insn);
2179 else
2180 insn = delete_insn (insn);
2181 }
2182 /* INSN is now the code_label. */
2183 }
2184 /* Also remove (set (pc) (pc)) insns which can be created by
2185 gcse. We eliminate such insns now to avoid having them
2186 cause problems later. */
2187 else if (GET_CODE (insn) == JUMP_INSN
2188 && GET_CODE (PATTERN (insn)) == SET
2189 && SET_SRC (PATTERN (insn)) == pc_rtx
2190 && SET_DEST (PATTERN (insn)) == pc_rtx)
2191 insn = delete_insn (insn);
2192
2193 else
2194 insn = NEXT_INSN (insn);
2195 }
2196 }
2197
2198 /* Mark the label each jump jumps to.
2199 Combine consecutive labels, and count uses of labels.
2200
2201 For each label, make a chain (using `jump_chain')
2202 of all the *unconditional* jumps that jump to it;
2203 also make a chain of all returns.
2204
2205 CROSS_JUMP indicates whether we are doing cross jumping
2206 and if we are whether we will be paying attention to
2207 death notes or not. */
2208
2209 static void
2210 mark_all_labels (f, cross_jump)
2211 rtx f;
2212 int cross_jump;
2213 {
2214 rtx insn;
2215
2216 for (insn = f; insn; insn = NEXT_INSN (insn))
2217 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2218 {
2219 mark_jump_label (PATTERN (insn), insn, cross_jump);
2220 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2221 {
2222 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2223 {
2224 jump_chain[INSN_UID (insn)]
2225 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2226 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2227 }
2228 if (GET_CODE (PATTERN (insn)) == RETURN)
2229 {
2230 jump_chain[INSN_UID (insn)] = jump_chain[0];
2231 jump_chain[0] = insn;
2232 }
2233 }
2234 }
2235 }
2236
2237 /* Delete all labels already not referenced.
2238 Also find and return the last insn. */
2239
2240 static rtx
2241 delete_unreferenced_labels (f)
2242 rtx f;
2243 {
2244 rtx final = NULL_RTX;
2245 rtx insn;
2246
2247 for (insn = f; insn; )
2248 {
2249 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
2250 insn = delete_insn (insn);
2251 else
2252 {
2253 final = insn;
2254 insn = NEXT_INSN (insn);
2255 }
2256 }
2257
2258 return final;
2259 }
2260
2261 /* Delete various simple forms of moves which have no necessary
2262 side effect. */
2263
2264 static void
2265 delete_noop_moves (f)
2266 rtx f;
2267 {
2268 rtx insn, next;
2269
2270 for (insn = f; insn; )
2271 {
2272 next = NEXT_INSN (insn);
2273
2274 if (GET_CODE (insn) == INSN)
2275 {
2276 register rtx body = PATTERN (insn);
2277
2278 /* Combine stack_adjusts with following push_insns. */
2279 #ifdef PUSH_ROUNDING
2280 if (GET_CODE (body) == SET
2281 && SET_DEST (body) == stack_pointer_rtx
2282 && GET_CODE (SET_SRC (body)) == PLUS
2283 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2284 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2285 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2286 {
2287 rtx p;
2288 rtx stack_adjust_insn = insn;
2289 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2290 int total_pushed = 0;
2291 int pushes = 0;
2292
2293 /* Find all successive push insns. */
2294 p = insn;
2295 /* Don't convert more than three pushes;
2296 that starts adding too many displaced addresses
2297 and the whole thing starts becoming a losing
2298 proposition. */
2299 while (pushes < 3)
2300 {
2301 rtx pbody, dest;
2302 p = next_nonnote_insn (p);
2303 if (p == 0 || GET_CODE (p) != INSN)
2304 break;
2305 pbody = PATTERN (p);
2306 if (GET_CODE (pbody) != SET)
2307 break;
2308 dest = SET_DEST (pbody);
2309 /* Allow a no-op move between the adjust and the push. */
2310 if (GET_CODE (dest) == REG
2311 && GET_CODE (SET_SRC (pbody)) == REG
2312 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2313 continue;
2314 if (! (GET_CODE (dest) == MEM
2315 && GET_CODE (XEXP (dest, 0)) == POST_INC
2316 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2317 break;
2318 pushes++;
2319 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2320 > stack_adjust_amount)
2321 break;
2322 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2323 }
2324
2325 /* Discard the amount pushed from the stack adjust;
2326 maybe eliminate it entirely. */
2327 if (total_pushed >= stack_adjust_amount)
2328 {
2329 delete_computation (stack_adjust_insn);
2330 total_pushed = stack_adjust_amount;
2331 }
2332 else
2333 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2334 = GEN_INT (stack_adjust_amount - total_pushed);
2335
2336 /* Change the appropriate push insns to ordinary stores. */
2337 p = insn;
2338 while (total_pushed > 0)
2339 {
2340 rtx pbody, dest;
2341 p = next_nonnote_insn (p);
2342 if (GET_CODE (p) != INSN)
2343 break;
2344 pbody = PATTERN (p);
2345 if (GET_CODE (pbody) != SET)
2346 break;
2347 dest = SET_DEST (pbody);
2348 /* Allow a no-op move between the adjust and the push. */
2349 if (GET_CODE (dest) == REG
2350 && GET_CODE (SET_SRC (pbody)) == REG
2351 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2352 continue;
2353 if (! (GET_CODE (dest) == MEM
2354 && GET_CODE (XEXP (dest, 0)) == POST_INC
2355 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2356 break;
2357 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2358 /* If this push doesn't fully fit in the space
2359 of the stack adjust that we deleted,
2360 make another stack adjust here for what we
2361 didn't use up. There should be peepholes
2362 to recognize the resulting sequence of insns. */
2363 if (total_pushed < 0)
2364 {
2365 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2366 GEN_INT (- total_pushed)),
2367 p);
2368 break;
2369 }
2370 XEXP (dest, 0)
2371 = plus_constant (stack_pointer_rtx, total_pushed);
2372 }
2373 }
2374 #endif
2375
2376 /* Detect and delete no-op move instructions
2377 resulting from not allocating a parameter in a register. */
2378
2379 if (GET_CODE (body) == SET
2380 && (SET_DEST (body) == SET_SRC (body)
2381 || (GET_CODE (SET_DEST (body)) == MEM
2382 && GET_CODE (SET_SRC (body)) == MEM
2383 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2384 && ! (GET_CODE (SET_DEST (body)) == MEM
2385 && MEM_VOLATILE_P (SET_DEST (body)))
2386 && ! (GET_CODE (SET_SRC (body)) == MEM
2387 && MEM_VOLATILE_P (SET_SRC (body))))
2388 delete_computation (insn);
2389
2390 /* Detect and ignore no-op move instructions
2391 resulting from smart or fortuitous register allocation. */
2392
2393 else if (GET_CODE (body) == SET)
2394 {
2395 int sreg = true_regnum (SET_SRC (body));
2396 int dreg = true_regnum (SET_DEST (body));
2397
2398 if (sreg == dreg && sreg >= 0)
2399 delete_insn (insn);
2400 else if (sreg >= 0 && dreg >= 0)
2401 {
2402 rtx trial;
2403 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2404 sreg, NULL_PTR, dreg,
2405 GET_MODE (SET_SRC (body)));
2406
2407 if (tem != 0
2408 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2409 {
2410 /* DREG may have been the target of a REG_DEAD note in
2411 the insn which makes INSN redundant. If so, reorg
2412 would still think it is dead. So search for such a
2413 note and delete it if we find it. */
2414 if (! find_regno_note (insn, REG_UNUSED, dreg))
2415 for (trial = prev_nonnote_insn (insn);
2416 trial && GET_CODE (trial) != CODE_LABEL;
2417 trial = prev_nonnote_insn (trial))
2418 if (find_regno_note (trial, REG_DEAD, dreg))
2419 {
2420 remove_death (dreg, trial);
2421 break;
2422 }
2423
2424 /* Deleting insn could lose a death-note for SREG. */
2425 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2426 {
2427 /* Change this into a USE so that we won't emit
2428 code for it, but still can keep the note. */
2429 PATTERN (insn)
2430 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2431 INSN_CODE (insn) = -1;
2432 /* Remove all reg notes but the REG_DEAD one. */
2433 REG_NOTES (insn) = trial;
2434 XEXP (trial, 1) = NULL_RTX;
2435 }
2436 else
2437 delete_insn (insn);
2438 }
2439 }
2440 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2441 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2442 NULL_PTR, 0,
2443 GET_MODE (SET_DEST (body))))
2444 {
2445 /* This handles the case where we have two consecutive
2446 assignments of the same constant to pseudos that didn't
2447 get a hard reg. Each SET from the constant will be
2448 converted into a SET of the spill register and an
2449 output reload will be made following it. This produces
2450 two loads of the same constant into the same spill
2451 register. */
2452
2453 rtx in_insn = insn;
2454
2455 /* Look back for a death note for the first reg.
2456 If there is one, it is no longer accurate. */
2457 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2458 {
2459 if ((GET_CODE (in_insn) == INSN
2460 || GET_CODE (in_insn) == JUMP_INSN)
2461 && find_regno_note (in_insn, REG_DEAD, dreg))
2462 {
2463 remove_death (dreg, in_insn);
2464 break;
2465 }
2466 in_insn = PREV_INSN (in_insn);
2467 }
2468
2469 /* Delete the second load of the value. */
2470 delete_insn (insn);
2471 }
2472 }
2473 else if (GET_CODE (body) == PARALLEL)
2474 {
2475 /* If each part is a set between two identical registers or
2476 a USE or CLOBBER, delete the insn. */
2477 int i, sreg, dreg;
2478 rtx tem;
2479
2480 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2481 {
2482 tem = XVECEXP (body, 0, i);
2483 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2484 continue;
2485
2486 if (GET_CODE (tem) != SET
2487 || (sreg = true_regnum (SET_SRC (tem))) < 0
2488 || (dreg = true_regnum (SET_DEST (tem))) < 0
2489 || dreg != sreg)
2490 break;
2491 }
2492
2493 if (i < 0)
2494 delete_insn (insn);
2495 }
2496 /* Also delete insns to store bit fields if they are no-ops. */
2497 /* Not worth the hair to detect this in the big-endian case. */
2498 else if (! BYTES_BIG_ENDIAN
2499 && GET_CODE (body) == SET
2500 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2501 && XEXP (SET_DEST (body), 2) == const0_rtx
2502 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2503 && ! (GET_CODE (SET_SRC (body)) == MEM
2504 && MEM_VOLATILE_P (SET_SRC (body))))
2505 delete_insn (insn);
2506 }
2507 insn = next;
2508 }
2509 }
2510
2511 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2512 If so indicate that this function can drop off the end by returning
2513 1, else return 0.
2514
2515 CHECK_DELETED indicates whether we must check if the note being
2516 searched for has the deleted flag set.
2517
2518 DELETE_FINAL_NOTE indicates whether we should delete the note
2519 if we find it. */
2520
2521 static int
2522 calculate_can_reach_end (last, check_deleted, delete_final_note)
2523 rtx last;
2524 int check_deleted;
2525 int delete_final_note;
2526 {
2527 rtx insn = last;
2528 int n_labels = 1;
2529
2530 while (insn != NULL_RTX)
2531 {
2532 int ok = 0;
2533
2534 /* One label can follow the end-note: the return label. */
2535 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2536 ok = 1;
2537 /* Ordinary insns can follow it if returning a structure. */
2538 else if (GET_CODE (insn) == INSN)
2539 ok = 1;
2540 /* If machine uses explicit RETURN insns, no epilogue,
2541 then one of them follows the note. */
2542 else if (GET_CODE (insn) == JUMP_INSN
2543 && GET_CODE (PATTERN (insn)) == RETURN)
2544 ok = 1;
2545 /* A barrier can follow the return insn. */
2546 else if (GET_CODE (insn) == BARRIER)
2547 ok = 1;
2548 /* Other kinds of notes can follow also. */
2549 else if (GET_CODE (insn) == NOTE
2550 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2551 ok = 1;
2552
2553 if (ok != 1)
2554 break;
2555
2556 insn = PREV_INSN (insn);
2557 }
2558
2559 /* See if we backed up to the appropriate type of note. */
2560 if (insn != NULL_RTX
2561 && GET_CODE (insn) == NOTE
2562 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
2563 && (check_deleted == 0
2564 || ! INSN_DELETED_P (insn)))
2565 {
2566 if (delete_final_note)
2567 delete_insn (insn);
2568 return 1;
2569 }
2570
2571 return 0;
2572 }
2573
2574 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2575 jump. Assume that this unconditional jump is to the exit test code. If
2576 the code is sufficiently simple, make a copy of it before INSN,
2577 followed by a jump to the exit of the loop. Then delete the unconditional
2578 jump after INSN.
2579
2580 Return 1 if we made the change, else 0.
2581
2582 This is only safe immediately after a regscan pass because it uses the
2583 values of regno_first_uid and regno_last_uid. */
2584
2585 static int
2586 duplicate_loop_exit_test (loop_start)
2587 rtx loop_start;
2588 {
2589 rtx insn, set, reg, p, link;
2590 rtx copy = 0, first_copy = 0;
2591 int num_insns = 0;
2592 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2593 rtx lastexit;
2594 int max_reg = max_reg_num ();
2595 rtx *reg_map = 0;
2596
2597 /* Scan the exit code. We do not perform this optimization if any insn:
2598
2599 is a CALL_INSN
2600 is a CODE_LABEL
2601 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2602 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2603 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2604 is not valid.
2605
2606 We also do not do this if we find an insn with ASM_OPERANDS. While
2607 this restriction should not be necessary, copying an insn with
2608 ASM_OPERANDS can confuse asm_noperands in some cases.
2609
2610 Also, don't do this if the exit code is more than 20 insns. */
2611
2612 for (insn = exitcode;
2613 insn
2614 && ! (GET_CODE (insn) == NOTE
2615 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2616 insn = NEXT_INSN (insn))
2617 {
2618 switch (GET_CODE (insn))
2619 {
2620 case CODE_LABEL:
2621 case CALL_INSN:
2622 return 0;
2623 case NOTE:
2624 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2625 a jump immediately after the loop start that branches outside
2626 the loop but within an outer loop, near the exit test.
2627 If we copied this exit test and created a phony
2628 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2629 before the exit test look like these could be safely moved
2630 out of the loop even if they actually may be never executed.
2631 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2632
2633 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2634 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2635 return 0;
2636
2637 if (optimize < 2
2638 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2639 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2640 /* If we were to duplicate this code, we would not move
2641 the BLOCK notes, and so debugging the moved code would
2642 be difficult. Thus, we only move the code with -O2 or
2643 higher. */
2644 return 0;
2645
2646 break;
2647 case JUMP_INSN:
2648 case INSN:
2649 /* The code below would grossly mishandle REG_WAS_0 notes,
2650 so get rid of them here. */
2651 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2652 remove_note (insn, p);
2653 if (++num_insns > 20
2654 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2655 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
2656 || asm_noperands (PATTERN (insn)) > 0)
2657 return 0;
2658 break;
2659 default:
2660 break;
2661 }
2662 }
2663
2664 /* Unless INSN is zero, we can do the optimization. */
2665 if (insn == 0)
2666 return 0;
2667
2668 lastexit = insn;
2669
2670 /* See if any insn sets a register only used in the loop exit code and
2671 not a user variable. If so, replace it with a new register. */
2672 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2673 if (GET_CODE (insn) == INSN
2674 && (set = single_set (insn)) != 0
2675 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2676 || (GET_CODE (reg) == SUBREG
2677 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2678 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2679 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2680 {
2681 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2682 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2683 break;
2684
2685 if (p != lastexit)
2686 {
2687 /* We can do the replacement. Allocate reg_map if this is the
2688 first replacement we found. */
2689 if (reg_map == 0)
2690 {
2691 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2692 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2693 }
2694
2695 REG_LOOP_TEST_P (reg) = 1;
2696
2697 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2698 }
2699 }
2700
2701 /* Now copy each insn. */
2702 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2703 {
2704 switch (GET_CODE (insn))
2705 {
2706 case BARRIER:
2707 copy = emit_barrier_before (loop_start);
2708 break;
2709 case NOTE:
2710 /* Only copy line-number notes. */
2711 if (NOTE_LINE_NUMBER (insn) >= 0)
2712 {
2713 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2714 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2715 }
2716 break;
2717
2718 case INSN:
2719 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2720 if (reg_map)
2721 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2722
2723 mark_jump_label (PATTERN (copy), copy, 0);
2724
2725 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2726 make them. */
2727 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2728 if (REG_NOTE_KIND (link) != REG_LABEL)
2729 REG_NOTES (copy)
2730 = copy_rtx (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2731 XEXP (link, 0),
2732 REG_NOTES (copy)));
2733 if (reg_map && REG_NOTES (copy))
2734 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2735 break;
2736
2737 case JUMP_INSN:
2738 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2739 if (reg_map)
2740 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2741 mark_jump_label (PATTERN (copy), copy, 0);
2742 if (REG_NOTES (insn))
2743 {
2744 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2745 if (reg_map)
2746 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2747 }
2748
2749 /* If this is a simple jump, add it to the jump chain. */
2750
2751 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2752 && simplejump_p (copy))
2753 {
2754 jump_chain[INSN_UID (copy)]
2755 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2756 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2757 }
2758 break;
2759
2760 default:
2761 abort ();
2762 }
2763
2764 /* Record the first insn we copied. We need it so that we can
2765 scan the copied insns for new pseudo registers. */
2766 if (! first_copy)
2767 first_copy = copy;
2768 }
2769
2770 /* Now clean up by emitting a jump to the end label and deleting the jump
2771 at the start of the loop. */
2772 if (! copy || GET_CODE (copy) != BARRIER)
2773 {
2774 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2775 loop_start);
2776
2777 /* Record the first insn we copied. We need it so that we can
2778 scan the copied insns for new pseudo registers. This may not
2779 be strictly necessary since we should have copied at least one
2780 insn above. But I am going to be safe. */
2781 if (! first_copy)
2782 first_copy = copy;
2783
2784 mark_jump_label (PATTERN (copy), copy, 0);
2785 if (INSN_UID (copy) < max_jump_chain
2786 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2787 {
2788 jump_chain[INSN_UID (copy)]
2789 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2790 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2791 }
2792 emit_barrier_before (loop_start);
2793 }
2794
2795 /* Now scan from the first insn we copied to the last insn we copied
2796 (copy) for new pseudo registers. Do this after the code to jump to
2797 the end label since that might create a new pseudo too. */
2798 reg_scan_update (first_copy, copy, max_reg);
2799
2800 /* Mark the exit code as the virtual top of the converted loop. */
2801 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2802
2803 delete_insn (next_nonnote_insn (loop_start));
2804
2805 return 1;
2806 }
2807 \f
2808 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2809 loop-end notes between START and END out before START. Assume that
2810 END is not such a note. START may be such a note. Returns the value
2811 of the new starting insn, which may be different if the original start
2812 was such a note. */
2813
2814 rtx
2815 squeeze_notes (start, end)
2816 rtx start, end;
2817 {
2818 rtx insn;
2819 rtx next;
2820
2821 for (insn = start; insn != end; insn = next)
2822 {
2823 next = NEXT_INSN (insn);
2824 if (GET_CODE (insn) == NOTE
2825 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2826 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2827 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2828 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2829 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2830 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2831 {
2832 if (insn == start)
2833 start = next;
2834 else
2835 {
2836 rtx prev = PREV_INSN (insn);
2837 PREV_INSN (insn) = PREV_INSN (start);
2838 NEXT_INSN (insn) = start;
2839 NEXT_INSN (PREV_INSN (insn)) = insn;
2840 PREV_INSN (NEXT_INSN (insn)) = insn;
2841 NEXT_INSN (prev) = next;
2842 PREV_INSN (next) = prev;
2843 }
2844 }
2845 }
2846
2847 return start;
2848 }
2849 \f
2850 /* Compare the instructions before insn E1 with those before E2
2851 to find an opportunity for cross jumping.
2852 (This means detecting identical sequences of insns followed by
2853 jumps to the same place, or followed by a label and a jump
2854 to that label, and replacing one with a jump to the other.)
2855
2856 Assume E1 is a jump that jumps to label E2
2857 (that is not always true but it might as well be).
2858 Find the longest possible equivalent sequences
2859 and store the first insns of those sequences into *F1 and *F2.
2860 Store zero there if no equivalent preceding instructions are found.
2861
2862 We give up if we find a label in stream 1.
2863 Actually we could transfer that label into stream 2. */
2864
2865 static void
2866 find_cross_jump (e1, e2, minimum, f1, f2)
2867 rtx e1, e2;
2868 int minimum;
2869 rtx *f1, *f2;
2870 {
2871 register rtx i1 = e1, i2 = e2;
2872 register rtx p1, p2;
2873 int lose = 0;
2874
2875 rtx last1 = 0, last2 = 0;
2876 rtx afterlast1 = 0, afterlast2 = 0;
2877
2878 *f1 = 0;
2879 *f2 = 0;
2880
2881 while (1)
2882 {
2883 i1 = prev_nonnote_insn (i1);
2884
2885 i2 = PREV_INSN (i2);
2886 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2887 i2 = PREV_INSN (i2);
2888
2889 if (i1 == 0)
2890 break;
2891
2892 /* Don't allow the range of insns preceding E1 or E2
2893 to include the other (E2 or E1). */
2894 if (i2 == e1 || i1 == e2)
2895 break;
2896
2897 /* If we will get to this code by jumping, those jumps will be
2898 tensioned to go directly to the new label (before I2),
2899 so this cross-jumping won't cost extra. So reduce the minimum. */
2900 if (GET_CODE (i1) == CODE_LABEL)
2901 {
2902 --minimum;
2903 break;
2904 }
2905
2906 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2907 break;
2908
2909 /* Avoid moving insns across EH regions if either of the insns
2910 can throw. */
2911 if (flag_exceptions
2912 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2913 && !in_same_eh_region (i1, i2))
2914 break;
2915
2916 p1 = PATTERN (i1);
2917 p2 = PATTERN (i2);
2918
2919 /* If this is a CALL_INSN, compare register usage information.
2920 If we don't check this on stack register machines, the two
2921 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2922 numbers of stack registers in the same basic block.
2923 If we don't check this on machines with delay slots, a delay slot may
2924 be filled that clobbers a parameter expected by the subroutine.
2925
2926 ??? We take the simple route for now and assume that if they're
2927 equal, they were constructed identically. */
2928
2929 if (GET_CODE (i1) == CALL_INSN
2930 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2931 CALL_INSN_FUNCTION_USAGE (i2)))
2932 lose = 1;
2933
2934 #ifdef STACK_REGS
2935 /* If cross_jump_death_matters is not 0, the insn's mode
2936 indicates whether or not the insn contains any stack-like
2937 regs. */
2938
2939 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
2940 {
2941 /* If register stack conversion has already been done, then
2942 death notes must also be compared before it is certain that
2943 the two instruction streams match. */
2944
2945 rtx note;
2946 HARD_REG_SET i1_regset, i2_regset;
2947
2948 CLEAR_HARD_REG_SET (i1_regset);
2949 CLEAR_HARD_REG_SET (i2_regset);
2950
2951 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2952 if (REG_NOTE_KIND (note) == REG_DEAD
2953 && STACK_REG_P (XEXP (note, 0)))
2954 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2955
2956 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2957 if (REG_NOTE_KIND (note) == REG_DEAD
2958 && STACK_REG_P (XEXP (note, 0)))
2959 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2960
2961 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2962
2963 lose = 1;
2964
2965 done:
2966 ;
2967 }
2968 #endif
2969
2970 /* Don't allow old-style asm or volatile extended asms to be accepted
2971 for cross jumping purposes. It is conceptually correct to allow
2972 them, since cross-jumping preserves the dynamic instruction order
2973 even though it is changing the static instruction order. However,
2974 if an asm is being used to emit an assembler pseudo-op, such as
2975 the MIPS `.set reorder' pseudo-op, then the static instruction order
2976 matters and it must be preserved. */
2977 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2978 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2979 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2980 lose = 1;
2981
2982 if (lose || GET_CODE (p1) != GET_CODE (p2)
2983 || ! rtx_renumbered_equal_p (p1, p2))
2984 {
2985 /* The following code helps take care of G++ cleanups. */
2986 rtx equiv1;
2987 rtx equiv2;
2988
2989 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2990 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2991 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2992 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2993 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2994 /* If the equivalences are not to a constant, they may
2995 reference pseudos that no longer exist, so we can't
2996 use them. */
2997 && CONSTANT_P (XEXP (equiv1, 0))
2998 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2999 {
3000 rtx s1 = single_set (i1);
3001 rtx s2 = single_set (i2);
3002 if (s1 != 0 && s2 != 0
3003 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3004 {
3005 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3006 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3007 if (! rtx_renumbered_equal_p (p1, p2))
3008 cancel_changes (0);
3009 else if (apply_change_group ())
3010 goto win;
3011 }
3012 }
3013
3014 /* Insns fail to match; cross jumping is limited to the following
3015 insns. */
3016
3017 #ifdef HAVE_cc0
3018 /* Don't allow the insn after a compare to be shared by
3019 cross-jumping unless the compare is also shared.
3020 Here, if either of these non-matching insns is a compare,
3021 exclude the following insn from possible cross-jumping. */
3022 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3023 last1 = afterlast1, last2 = afterlast2, ++minimum;
3024 #endif
3025
3026 /* If cross-jumping here will feed a jump-around-jump
3027 optimization, this jump won't cost extra, so reduce
3028 the minimum. */
3029 if (GET_CODE (i1) == JUMP_INSN
3030 && JUMP_LABEL (i1)
3031 && prev_real_insn (JUMP_LABEL (i1)) == e1)
3032 --minimum;
3033 break;
3034 }
3035
3036 win:
3037 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3038 {
3039 /* Ok, this insn is potentially includable in a cross-jump here. */
3040 afterlast1 = last1, afterlast2 = last2;
3041 last1 = i1, last2 = i2, --minimum;
3042 }
3043 }
3044
3045 if (minimum <= 0 && last1 != 0 && last1 != e1)
3046 *f1 = last1, *f2 = last2;
3047 }
3048
3049 static void
3050 do_cross_jump (insn, newjpos, newlpos)
3051 rtx insn, newjpos, newlpos;
3052 {
3053 /* Find an existing label at this point
3054 or make a new one if there is none. */
3055 register rtx label = get_label_before (newlpos);
3056
3057 /* Make the same jump insn jump to the new point. */
3058 if (GET_CODE (PATTERN (insn)) == RETURN)
3059 {
3060 /* Remove from jump chain of returns. */
3061 delete_from_jump_chain (insn);
3062 /* Change the insn. */
3063 PATTERN (insn) = gen_jump (label);
3064 INSN_CODE (insn) = -1;
3065 JUMP_LABEL (insn) = label;
3066 LABEL_NUSES (label)++;
3067 /* Add to new the jump chain. */
3068 if (INSN_UID (label) < max_jump_chain
3069 && INSN_UID (insn) < max_jump_chain)
3070 {
3071 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3072 jump_chain[INSN_UID (label)] = insn;
3073 }
3074 }
3075 else
3076 redirect_jump (insn, label);
3077
3078 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3079 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3080 the NEWJPOS stream. */
3081
3082 while (newjpos != insn)
3083 {
3084 rtx lnote;
3085
3086 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3087 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3088 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3089 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3090 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3091 remove_note (newlpos, lnote);
3092
3093 delete_insn (newjpos);
3094 newjpos = next_real_insn (newjpos);
3095 newlpos = next_real_insn (newlpos);
3096 }
3097 }
3098 \f
3099 /* Return the label before INSN, or put a new label there. */
3100
3101 rtx
3102 get_label_before (insn)
3103 rtx insn;
3104 {
3105 rtx label;
3106
3107 /* Find an existing label at this point
3108 or make a new one if there is none. */
3109 label = prev_nonnote_insn (insn);
3110
3111 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3112 {
3113 rtx prev = PREV_INSN (insn);
3114
3115 label = gen_label_rtx ();
3116 emit_label_after (label, prev);
3117 LABEL_NUSES (label) = 0;
3118 }
3119 return label;
3120 }
3121
3122 /* Return the label after INSN, or put a new label there. */
3123
3124 rtx
3125 get_label_after (insn)
3126 rtx insn;
3127 {
3128 rtx label;
3129
3130 /* Find an existing label at this point
3131 or make a new one if there is none. */
3132 label = next_nonnote_insn (insn);
3133
3134 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3135 {
3136 label = gen_label_rtx ();
3137 emit_label_after (label, insn);
3138 LABEL_NUSES (label) = 0;
3139 }
3140 return label;
3141 }
3142 \f
3143 /* Return 1 if INSN is a jump that jumps to right after TARGET
3144 only on the condition that TARGET itself would drop through.
3145 Assumes that TARGET is a conditional jump. */
3146
3147 static int
3148 jump_back_p (insn, target)
3149 rtx insn, target;
3150 {
3151 rtx cinsn, ctarget;
3152 enum rtx_code codei, codet;
3153
3154 if (simplejump_p (insn) || ! condjump_p (insn)
3155 || simplejump_p (target)
3156 || target != prev_real_insn (JUMP_LABEL (insn)))
3157 return 0;
3158
3159 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3160 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3161
3162 codei = GET_CODE (cinsn);
3163 codet = GET_CODE (ctarget);
3164
3165 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3166 {
3167 if (! can_reverse_comparison_p (cinsn, insn))
3168 return 0;
3169 codei = reverse_condition (codei);
3170 }
3171
3172 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3173 {
3174 if (! can_reverse_comparison_p (ctarget, target))
3175 return 0;
3176 codet = reverse_condition (codet);
3177 }
3178
3179 return (codei == codet
3180 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3181 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3182 }
3183 \f
3184 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3185 return non-zero if it is safe to reverse this comparison. It is if our
3186 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3187 this is known to be an integer comparison. */
3188
3189 int
3190 can_reverse_comparison_p (comparison, insn)
3191 rtx comparison;
3192 rtx insn;
3193 {
3194 rtx arg0;
3195
3196 /* If this is not actually a comparison, we can't reverse it. */
3197 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3198 return 0;
3199
3200 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3201 /* If this is an NE comparison, it is safe to reverse it to an EQ
3202 comparison and vice versa, even for floating point. If no operands
3203 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3204 always false and NE is always true, so the reversal is also valid. */
3205 || flag_fast_math
3206 || GET_CODE (comparison) == NE
3207 || GET_CODE (comparison) == EQ)
3208 return 1;
3209
3210 arg0 = XEXP (comparison, 0);
3211
3212 /* Make sure ARG0 is one of the actual objects being compared. If we
3213 can't do this, we can't be sure the comparison can be reversed.
3214
3215 Handle cc0 and a MODE_CC register. */
3216 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3217 #ifdef HAVE_cc0
3218 || arg0 == cc0_rtx
3219 #endif
3220 )
3221 {
3222 rtx prev = prev_nonnote_insn (insn);
3223 rtx set;
3224
3225 /* If the comparison itself was a loop invariant, it could have been
3226 hoisted out of the loop. If we proceed to unroll such a loop, then
3227 we may not be able to find the comparison when copying the loop.
3228
3229 Returning zero in that case is the safe thing to do. */
3230 if (prev == 0)
3231 return 0;
3232
3233 set = single_set (prev);
3234 if (set == 0 || SET_DEST (set) != arg0)
3235 return 0;
3236
3237 arg0 = SET_SRC (set);
3238
3239 if (GET_CODE (arg0) == COMPARE)
3240 arg0 = XEXP (arg0, 0);
3241 }
3242
3243 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3244 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3245 return (GET_CODE (arg0) == CONST_INT
3246 || (GET_MODE (arg0) != VOIDmode
3247 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3248 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3249 }
3250
3251 /* Given an rtx-code for a comparison, return the code
3252 for the negated comparison.
3253 WATCH OUT! reverse_condition is not safe to use on a jump
3254 that might be acting on the results of an IEEE floating point comparison,
3255 because of the special treatment of non-signaling nans in comparisons.
3256 Use can_reverse_comparison_p to be sure. */
3257
3258 enum rtx_code
3259 reverse_condition (code)
3260 enum rtx_code code;
3261 {
3262 switch (code)
3263 {
3264 case EQ:
3265 return NE;
3266
3267 case NE:
3268 return EQ;
3269
3270 case GT:
3271 return LE;
3272
3273 case GE:
3274 return LT;
3275
3276 case LT:
3277 return GE;
3278
3279 case LE:
3280 return GT;
3281
3282 case GTU:
3283 return LEU;
3284
3285 case GEU:
3286 return LTU;
3287
3288 case LTU:
3289 return GEU;
3290
3291 case LEU:
3292 return GTU;
3293
3294 default:
3295 abort ();
3296 return UNKNOWN;
3297 }
3298 }
3299
3300 /* Similar, but return the code when two operands of a comparison are swapped.
3301 This IS safe for IEEE floating-point. */
3302
3303 enum rtx_code
3304 swap_condition (code)
3305 enum rtx_code code;
3306 {
3307 switch (code)
3308 {
3309 case EQ:
3310 case NE:
3311 return code;
3312
3313 case GT:
3314 return LT;
3315
3316 case GE:
3317 return LE;
3318
3319 case LT:
3320 return GT;
3321
3322 case LE:
3323 return GE;
3324
3325 case GTU:
3326 return LTU;
3327
3328 case GEU:
3329 return LEU;
3330
3331 case LTU:
3332 return GTU;
3333
3334 case LEU:
3335 return GEU;
3336
3337 default:
3338 abort ();
3339 return UNKNOWN;
3340 }
3341 }
3342
3343 /* Given a comparison CODE, return the corresponding unsigned comparison.
3344 If CODE is an equality comparison or already an unsigned comparison,
3345 CODE is returned. */
3346
3347 enum rtx_code
3348 unsigned_condition (code)
3349 enum rtx_code code;
3350 {
3351 switch (code)
3352 {
3353 case EQ:
3354 case NE:
3355 case GTU:
3356 case GEU:
3357 case LTU:
3358 case LEU:
3359 return code;
3360
3361 case GT:
3362 return GTU;
3363
3364 case GE:
3365 return GEU;
3366
3367 case LT:
3368 return LTU;
3369
3370 case LE:
3371 return LEU;
3372
3373 default:
3374 abort ();
3375 }
3376 }
3377
3378 /* Similarly, return the signed version of a comparison. */
3379
3380 enum rtx_code
3381 signed_condition (code)
3382 enum rtx_code code;
3383 {
3384 switch (code)
3385 {
3386 case EQ:
3387 case NE:
3388 case GT:
3389 case GE:
3390 case LT:
3391 case LE:
3392 return code;
3393
3394 case GTU:
3395 return GT;
3396
3397 case GEU:
3398 return GE;
3399
3400 case LTU:
3401 return LT;
3402
3403 case LEU:
3404 return LE;
3405
3406 default:
3407 abort ();
3408 }
3409 }
3410 \f
3411 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3412 truth of CODE1 implies the truth of CODE2. */
3413
3414 int
3415 comparison_dominates_p (code1, code2)
3416 enum rtx_code code1, code2;
3417 {
3418 if (code1 == code2)
3419 return 1;
3420
3421 switch (code1)
3422 {
3423 case EQ:
3424 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3425 return 1;
3426 break;
3427
3428 case LT:
3429 if (code2 == LE || code2 == NE)
3430 return 1;
3431 break;
3432
3433 case GT:
3434 if (code2 == GE || code2 == NE)
3435 return 1;
3436 break;
3437
3438 case LTU:
3439 if (code2 == LEU || code2 == NE)
3440 return 1;
3441 break;
3442
3443 case GTU:
3444 if (code2 == GEU || code2 == NE)
3445 return 1;
3446 break;
3447
3448 default:
3449 break;
3450 }
3451
3452 return 0;
3453 }
3454 \f
3455 /* Return 1 if INSN is an unconditional jump and nothing else. */
3456
3457 int
3458 simplejump_p (insn)
3459 rtx insn;
3460 {
3461 return (GET_CODE (insn) == JUMP_INSN
3462 && GET_CODE (PATTERN (insn)) == SET
3463 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3464 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3465 }
3466
3467 /* Return nonzero if INSN is a (possibly) conditional jump
3468 and nothing more. */
3469
3470 int
3471 condjump_p (insn)
3472 rtx insn;
3473 {
3474 register rtx x = PATTERN (insn);
3475 if (GET_CODE (x) != SET)
3476 return 0;
3477 if (GET_CODE (SET_DEST (x)) != PC)
3478 return 0;
3479 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3480 return 1;
3481 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3482 return 0;
3483 if (XEXP (SET_SRC (x), 2) == pc_rtx
3484 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3485 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3486 return 1;
3487 if (XEXP (SET_SRC (x), 1) == pc_rtx
3488 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3489 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3490 return 1;
3491 return 0;
3492 }
3493
3494 /* Return nonzero if INSN is a (possibly) conditional jump
3495 and nothing more. */
3496
3497 int
3498 condjump_in_parallel_p (insn)
3499 rtx insn;
3500 {
3501 register rtx x = PATTERN (insn);
3502
3503 if (GET_CODE (x) != PARALLEL)
3504 return 0;
3505 else
3506 x = XVECEXP (x, 0, 0);
3507
3508 if (GET_CODE (x) != SET)
3509 return 0;
3510 if (GET_CODE (SET_DEST (x)) != PC)
3511 return 0;
3512 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3513 return 1;
3514 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3515 return 0;
3516 if (XEXP (SET_SRC (x), 2) == pc_rtx
3517 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3518 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3519 return 1;
3520 if (XEXP (SET_SRC (x), 1) == pc_rtx
3521 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3522 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3523 return 1;
3524 return 0;
3525 }
3526
3527 /* Return the label of a conditional jump. */
3528
3529 rtx
3530 condjump_label (insn)
3531 rtx insn;
3532 {
3533 register rtx x = PATTERN (insn);
3534
3535 if (GET_CODE (x) == PARALLEL)
3536 x = XVECEXP (x, 0, 0);
3537 if (GET_CODE (x) != SET)
3538 return NULL_RTX;
3539 if (GET_CODE (SET_DEST (x)) != PC)
3540 return NULL_RTX;
3541 x = SET_SRC (x);
3542 if (GET_CODE (x) == LABEL_REF)
3543 return x;
3544 if (GET_CODE (x) != IF_THEN_ELSE)
3545 return NULL_RTX;
3546 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3547 return XEXP (x, 1);
3548 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3549 return XEXP (x, 2);
3550 return NULL_RTX;
3551 }
3552
3553 /* Return true if INSN is a (possibly conditional) return insn. */
3554
3555 static int
3556 returnjump_p_1 (loc, data)
3557 rtx *loc;
3558 void *data ATTRIBUTE_UNUSED;
3559 {
3560 rtx x = *loc;
3561 return GET_CODE (x) == RETURN;
3562 }
3563
3564 int
3565 returnjump_p (insn)
3566 rtx insn;
3567 {
3568 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3569 }
3570
3571 /* Return true if INSN is a jump that only transfers control and
3572 nothing more. */
3573
3574 int
3575 onlyjump_p (insn)
3576 rtx insn;
3577 {
3578 rtx set;
3579
3580 if (GET_CODE (insn) != JUMP_INSN)
3581 return 0;
3582
3583 set = single_set (insn);
3584 if (set == NULL)
3585 return 0;
3586 if (GET_CODE (SET_DEST (set)) != PC)
3587 return 0;
3588 if (side_effects_p (SET_SRC (set)))
3589 return 0;
3590
3591 return 1;
3592 }
3593
3594 #ifdef HAVE_cc0
3595
3596 /* Return 1 if X is an RTX that does nothing but set the condition codes
3597 and CLOBBER or USE registers.
3598 Return -1 if X does explicitly set the condition codes,
3599 but also does other things. */
3600
3601 int
3602 sets_cc0_p (x)
3603 rtx x ATTRIBUTE_UNUSED;
3604 {
3605 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3606 return 1;
3607 if (GET_CODE (x) == PARALLEL)
3608 {
3609 int i;
3610 int sets_cc0 = 0;
3611 int other_things = 0;
3612 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3613 {
3614 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3615 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3616 sets_cc0 = 1;
3617 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3618 other_things = 1;
3619 }
3620 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3621 }
3622 return 0;
3623 }
3624 #endif
3625 \f
3626 /* Follow any unconditional jump at LABEL;
3627 return the ultimate label reached by any such chain of jumps.
3628 If LABEL is not followed by a jump, return LABEL.
3629 If the chain loops or we can't find end, return LABEL,
3630 since that tells caller to avoid changing the insn.
3631
3632 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3633 a USE or CLOBBER. */
3634
3635 rtx
3636 follow_jumps (label)
3637 rtx label;
3638 {
3639 register rtx insn;
3640 register rtx next;
3641 register rtx value = label;
3642 register int depth;
3643
3644 for (depth = 0;
3645 (depth < 10
3646 && (insn = next_active_insn (value)) != 0
3647 && GET_CODE (insn) == JUMP_INSN
3648 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3649 || GET_CODE (PATTERN (insn)) == RETURN)
3650 && (next = NEXT_INSN (insn))
3651 && GET_CODE (next) == BARRIER);
3652 depth++)
3653 {
3654 /* Don't chain through the insn that jumps into a loop
3655 from outside the loop,
3656 since that would create multiple loop entry jumps
3657 and prevent loop optimization. */
3658 rtx tem;
3659 if (!reload_completed)
3660 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3661 if (GET_CODE (tem) == NOTE
3662 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3663 /* ??? Optional. Disables some optimizations, but makes
3664 gcov output more accurate with -O. */
3665 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3666 return value;
3667
3668 /* If we have found a cycle, make the insn jump to itself. */
3669 if (JUMP_LABEL (insn) == label)
3670 return label;
3671
3672 tem = next_active_insn (JUMP_LABEL (insn));
3673 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3674 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3675 break;
3676
3677 value = JUMP_LABEL (insn);
3678 }
3679 if (depth == 10)
3680 return label;
3681 return value;
3682 }
3683
3684 /* Assuming that field IDX of X is a vector of label_refs,
3685 replace each of them by the ultimate label reached by it.
3686 Return nonzero if a change is made.
3687 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3688
3689 static int
3690 tension_vector_labels (x, idx)
3691 register rtx x;
3692 register int idx;
3693 {
3694 int changed = 0;
3695 register int i;
3696 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3697 {
3698 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3699 register rtx nlabel = follow_jumps (olabel);
3700 if (nlabel && nlabel != olabel)
3701 {
3702 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3703 ++LABEL_NUSES (nlabel);
3704 if (--LABEL_NUSES (olabel) == 0)
3705 delete_insn (olabel);
3706 changed = 1;
3707 }
3708 }
3709 return changed;
3710 }
3711 \f
3712 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3713 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3714 in INSN, then store one of them in JUMP_LABEL (INSN).
3715 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3716 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3717 Also, when there are consecutive labels, canonicalize on the last of them.
3718
3719 Note that two labels separated by a loop-beginning note
3720 must be kept distinct if we have not yet done loop-optimization,
3721 because the gap between them is where loop-optimize
3722 will want to move invariant code to. CROSS_JUMP tells us
3723 that loop-optimization is done with.
3724
3725 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3726 two labels distinct if they are separated by only USE or CLOBBER insns. */
3727
3728 static void
3729 mark_jump_label (x, insn, cross_jump)
3730 register rtx x;
3731 rtx insn;
3732 int cross_jump;
3733 {
3734 register RTX_CODE code = GET_CODE (x);
3735 register int i;
3736 register const char *fmt;
3737
3738 switch (code)
3739 {
3740 case PC:
3741 case CC0:
3742 case REG:
3743 case SUBREG:
3744 case CONST_INT:
3745 case SYMBOL_REF:
3746 case CONST_DOUBLE:
3747 case CLOBBER:
3748 case CALL:
3749 return;
3750
3751 case MEM:
3752 /* If this is a constant-pool reference, see if it is a label. */
3753 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3754 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3755 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3756 break;
3757
3758 case LABEL_REF:
3759 {
3760 rtx label = XEXP (x, 0);
3761 rtx olabel = label;
3762 rtx note;
3763 rtx next;
3764
3765 if (GET_CODE (label) != CODE_LABEL)
3766 abort ();
3767
3768 /* Ignore references to labels of containing functions. */
3769 if (LABEL_REF_NONLOCAL_P (x))
3770 break;
3771
3772 /* If there are other labels following this one,
3773 replace it with the last of the consecutive labels. */
3774 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3775 {
3776 if (GET_CODE (next) == CODE_LABEL)
3777 label = next;
3778 else if (cross_jump && GET_CODE (next) == INSN
3779 && (GET_CODE (PATTERN (next)) == USE
3780 || GET_CODE (PATTERN (next)) == CLOBBER))
3781 continue;
3782 else if (GET_CODE (next) != NOTE)
3783 break;
3784 else if (! cross_jump
3785 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3786 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3787 /* ??? Optional. Disables some optimizations, but
3788 makes gcov output more accurate with -O. */
3789 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3790 break;
3791 }
3792
3793 XEXP (x, 0) = label;
3794 if (! insn || ! INSN_DELETED_P (insn))
3795 ++LABEL_NUSES (label);
3796
3797 if (insn)
3798 {
3799 if (GET_CODE (insn) == JUMP_INSN)
3800 JUMP_LABEL (insn) = label;
3801
3802 /* If we've changed OLABEL and we had a REG_LABEL note
3803 for it, update it as well. */
3804 else if (label != olabel
3805 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3806 XEXP (note, 0) = label;
3807
3808 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3809 is one. */
3810 else if (! find_reg_note (insn, REG_LABEL, label))
3811 {
3812 /* This code used to ignore labels which refered to dispatch
3813 tables to avoid flow.c generating worse code.
3814
3815 However, in the presense of global optimizations like
3816 gcse which call find_basic_blocks without calling
3817 life_analysis, not recording such labels will lead
3818 to compiler aborts because of inconsistencies in the
3819 flow graph. So we go ahead and record the label.
3820
3821 It may also be the case that the optimization argument
3822 is no longer valid because of the more accurate cfg
3823 we build in find_basic_blocks -- it no longer pessimizes
3824 code when it finds a REG_LABEL note. */
3825 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3826 REG_NOTES (insn));
3827 }
3828 }
3829 return;
3830 }
3831
3832 /* Do walk the labels in a vector, but not the first operand of an
3833 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3834 case ADDR_VEC:
3835 case ADDR_DIFF_VEC:
3836 if (! INSN_DELETED_P (insn))
3837 {
3838 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3839
3840 for (i = 0; i < XVECLEN (x, eltnum); i++)
3841 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3842 }
3843 return;
3844
3845 default:
3846 break;
3847 }
3848
3849 fmt = GET_RTX_FORMAT (code);
3850 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3851 {
3852 if (fmt[i] == 'e')
3853 mark_jump_label (XEXP (x, i), insn, cross_jump);
3854 else if (fmt[i] == 'E')
3855 {
3856 register int j;
3857 for (j = 0; j < XVECLEN (x, i); j++)
3858 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3859 }
3860 }
3861 }
3862
3863 /* If all INSN does is set the pc, delete it,
3864 and delete the insn that set the condition codes for it
3865 if that's what the previous thing was. */
3866
3867 void
3868 delete_jump (insn)
3869 rtx insn;
3870 {
3871 register rtx set = single_set (insn);
3872
3873 if (set && GET_CODE (SET_DEST (set)) == PC)
3874 delete_computation (insn);
3875 }
3876
3877 /* Recursively delete prior insns that compute the value (used only by INSN
3878 which the caller is deleting) stored in the register mentioned by NOTE
3879 which is a REG_DEAD note associated with INSN. */
3880
3881 static void
3882 delete_prior_computation (note, insn)
3883 rtx note;
3884 rtx insn;
3885 {
3886 rtx our_prev;
3887 rtx reg = XEXP (note, 0);
3888
3889 for (our_prev = prev_nonnote_insn (insn);
3890 our_prev && GET_CODE (our_prev) == INSN;
3891 our_prev = prev_nonnote_insn (our_prev))
3892 {
3893 rtx pat = PATTERN (our_prev);
3894
3895 /* If we reach a SEQUENCE, it is too complex to try to
3896 do anything with it, so give up. */
3897 if (GET_CODE (pat) == SEQUENCE)
3898 break;
3899
3900 if (GET_CODE (pat) == USE
3901 && GET_CODE (XEXP (pat, 0)) == INSN)
3902 /* reorg creates USEs that look like this. We leave them
3903 alone because reorg needs them for its own purposes. */
3904 break;
3905
3906 if (reg_set_p (reg, pat))
3907 {
3908 if (side_effects_p (pat))
3909 break;
3910
3911 if (GET_CODE (pat) == PARALLEL)
3912 {
3913 /* If we find a SET of something else, we can't
3914 delete the insn. */
3915
3916 int i;
3917
3918 for (i = 0; i < XVECLEN (pat, 0); i++)
3919 {
3920 rtx part = XVECEXP (pat, 0, i);
3921
3922 if (GET_CODE (part) == SET
3923 && SET_DEST (part) != reg)
3924 break;
3925 }
3926
3927 if (i == XVECLEN (pat, 0))
3928 delete_computation (our_prev);
3929 }
3930 else if (GET_CODE (pat) == SET
3931 && GET_CODE (SET_DEST (pat)) == REG)
3932 {
3933 int dest_regno = REGNO (SET_DEST (pat));
3934 int dest_endregno
3935 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
3936 ? HARD_REGNO_NREGS (dest_regno,
3937 GET_MODE (SET_DEST (pat))) : 1);
3938 int regno = REGNO (reg);
3939 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
3940 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
3941
3942 if (dest_regno >= regno
3943 && dest_endregno <= endregno)
3944 delete_computation (our_prev);
3945
3946 /* We may have a multi-word hard register and some, but not
3947 all, of the words of the register are needed in subsequent
3948 insns. Write REG_UNUSED notes for those parts that were not
3949 needed. */
3950 else if (dest_regno <= regno
3951 && dest_endregno >= endregno
3952 && ! find_regno_note (our_prev, REG_UNUSED, REGNO(reg)))
3953 {
3954 int i;
3955
3956 REG_NOTES (our_prev)
3957 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
3958
3959 for (i = dest_regno; i < dest_endregno; i++)
3960 if (! find_regno_note (our_prev, REG_UNUSED, i))
3961 break;
3962
3963 if (i == dest_endregno)
3964 delete_computation (our_prev);
3965 }
3966 }
3967
3968 break;
3969 }
3970
3971 /* If PAT references the register that dies here, it is an
3972 additional use. Hence any prior SET isn't dead. However, this
3973 insn becomes the new place for the REG_DEAD note. */
3974 if (reg_overlap_mentioned_p (reg, pat))
3975 {
3976 XEXP (note, 1) = REG_NOTES (our_prev);
3977 REG_NOTES (our_prev) = note;
3978 break;
3979 }
3980 }
3981 }
3982
3983 /* Delete INSN and recursively delete insns that compute values used only
3984 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3985 If we are running before flow.c, we need do nothing since flow.c will
3986 delete dead code. We also can't know if the registers being used are
3987 dead or not at this point.
3988
3989 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3990 nothing other than set a register that dies in this insn, we can delete
3991 that insn as well.
3992
3993 On machines with CC0, if CC0 is used in this insn, we may be able to
3994 delete the insn that set it. */
3995
3996 static void
3997 delete_computation (insn)
3998 rtx insn;
3999 {
4000 rtx note, next;
4001 rtx set;
4002
4003 #ifdef HAVE_cc0
4004 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
4005 {
4006 rtx prev = prev_nonnote_insn (insn);
4007 /* We assume that at this stage
4008 CC's are always set explicitly
4009 and always immediately before the jump that
4010 will use them. So if the previous insn
4011 exists to set the CC's, delete it
4012 (unless it performs auto-increments, etc.). */
4013 if (prev && GET_CODE (prev) == INSN
4014 && sets_cc0_p (PATTERN (prev)))
4015 {
4016 if (sets_cc0_p (PATTERN (prev)) > 0
4017 && ! side_effects_p (PATTERN (prev)))
4018 delete_computation (prev);
4019 else
4020 /* Otherwise, show that cc0 won't be used. */
4021 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
4022 cc0_rtx, REG_NOTES (prev));
4023 }
4024 }
4025 #endif
4026
4027 #ifdef INSN_SCHEDULING
4028 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4029 reload has completed. The schedulers need to be fixed. Until
4030 they are, we must not rely on the death notes here. */
4031 if (reload_completed && flag_schedule_insns_after_reload)
4032 {
4033 delete_insn (insn);
4034 return;
4035 }
4036 #endif
4037
4038 set = single_set (insn);
4039
4040 for (note = REG_NOTES (insn); note; note = next)
4041 {
4042 next = XEXP (note, 1);
4043
4044 if (REG_NOTE_KIND (note) != REG_DEAD
4045 /* Verify that the REG_NOTE is legitimate. */
4046 || GET_CODE (XEXP (note, 0)) != REG)
4047 continue;
4048
4049 if (set && reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
4050 set = NULL_RTX;
4051
4052 delete_prior_computation (note, insn);
4053 }
4054
4055 /* The REG_DEAD note may have been omitted for a register
4056 which is both set and used by the insn. */
4057 if (set
4058 && GET_CODE (SET_DEST (set)) == REG
4059 && reg_mentioned_p (SET_DEST (set), SET_SRC (set)))
4060 {
4061 note = gen_rtx_EXPR_LIST (REG_DEAD, SET_DEST (set), NULL_RTX);
4062 delete_prior_computation (note, insn);
4063 }
4064
4065 delete_insn (insn);
4066 }
4067 \f
4068 /* Delete insn INSN from the chain of insns and update label ref counts.
4069 May delete some following insns as a consequence; may even delete
4070 a label elsewhere and insns that follow it.
4071
4072 Returns the first insn after INSN that was not deleted. */
4073
4074 rtx
4075 delete_insn (insn)
4076 register rtx insn;
4077 {
4078 register rtx next = NEXT_INSN (insn);
4079 register rtx prev = PREV_INSN (insn);
4080 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4081 register int dont_really_delete = 0;
4082
4083 while (next && INSN_DELETED_P (next))
4084 next = NEXT_INSN (next);
4085
4086 /* This insn is already deleted => return first following nondeleted. */
4087 if (INSN_DELETED_P (insn))
4088 return next;
4089
4090 if (was_code_label)
4091 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4092
4093 /* Don't delete user-declared labels. Convert them to special NOTEs
4094 instead. */
4095 if (was_code_label && LABEL_NAME (insn) != 0
4096 && optimize && ! dont_really_delete)
4097 {
4098 PUT_CODE (insn, NOTE);
4099 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4100 NOTE_SOURCE_FILE (insn) = 0;
4101 dont_really_delete = 1;
4102 }
4103 else
4104 /* Mark this insn as deleted. */
4105 INSN_DELETED_P (insn) = 1;
4106
4107 /* If this is an unconditional jump, delete it from the jump chain. */
4108 if (simplejump_p (insn))
4109 delete_from_jump_chain (insn);
4110
4111 /* If instruction is followed by a barrier,
4112 delete the barrier too. */
4113
4114 if (next != 0 && GET_CODE (next) == BARRIER)
4115 {
4116 INSN_DELETED_P (next) = 1;
4117 next = NEXT_INSN (next);
4118 }
4119
4120 /* Patch out INSN (and the barrier if any) */
4121
4122 if (optimize && ! dont_really_delete)
4123 {
4124 if (prev)
4125 {
4126 NEXT_INSN (prev) = next;
4127 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4128 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4129 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4130 }
4131
4132 if (next)
4133 {
4134 PREV_INSN (next) = prev;
4135 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4136 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4137 }
4138
4139 if (prev && NEXT_INSN (prev) == 0)
4140 set_last_insn (prev);
4141 }
4142
4143 /* If deleting a jump, decrement the count of the label,
4144 and delete the label if it is now unused. */
4145
4146 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4147 {
4148 rtx lab = JUMP_LABEL (insn), lab_next;
4149
4150 if (--LABEL_NUSES (lab) == 0)
4151 {
4152 /* This can delete NEXT or PREV,
4153 either directly if NEXT is JUMP_LABEL (INSN),
4154 or indirectly through more levels of jumps. */
4155 delete_insn (lab);
4156
4157 /* I feel a little doubtful about this loop,
4158 but I see no clean and sure alternative way
4159 to find the first insn after INSN that is not now deleted.
4160 I hope this works. */
4161 while (next && INSN_DELETED_P (next))
4162 next = NEXT_INSN (next);
4163 return next;
4164 }
4165 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4166 && GET_CODE (lab_next) == JUMP_INSN
4167 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4168 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4169 {
4170 /* If we're deleting the tablejump, delete the dispatch table.
4171 We may not be able to kill the label immediately preceeding
4172 just yet, as it might be referenced in code leading up to
4173 the tablejump. */
4174 delete_insn (lab_next);
4175 }
4176 }
4177
4178 /* Likewise if we're deleting a dispatch table. */
4179
4180 if (GET_CODE (insn) == JUMP_INSN
4181 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4182 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4183 {
4184 rtx pat = PATTERN (insn);
4185 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4186 int len = XVECLEN (pat, diff_vec_p);
4187
4188 for (i = 0; i < len; i++)
4189 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4190 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4191 while (next && INSN_DELETED_P (next))
4192 next = NEXT_INSN (next);
4193 return next;
4194 }
4195
4196 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4197 prev = PREV_INSN (prev);
4198
4199 /* If INSN was a label and a dispatch table follows it,
4200 delete the dispatch table. The tablejump must have gone already.
4201 It isn't useful to fall through into a table. */
4202
4203 if (was_code_label
4204 && NEXT_INSN (insn) != 0
4205 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4206 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4207 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4208 next = delete_insn (NEXT_INSN (insn));
4209
4210 /* If INSN was a label, delete insns following it if now unreachable. */
4211
4212 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4213 {
4214 register RTX_CODE code;
4215 while (next != 0
4216 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4217 || code == NOTE || code == BARRIER
4218 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4219 {
4220 if (code == NOTE
4221 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4222 next = NEXT_INSN (next);
4223 /* Keep going past other deleted labels to delete what follows. */
4224 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4225 next = NEXT_INSN (next);
4226 else
4227 /* Note: if this deletes a jump, it can cause more
4228 deletion of unreachable code, after a different label.
4229 As long as the value from this recursive call is correct,
4230 this invocation functions correctly. */
4231 next = delete_insn (next);
4232 }
4233 }
4234
4235 return next;
4236 }
4237
4238 /* Advance from INSN till reaching something not deleted
4239 then return that. May return INSN itself. */
4240
4241 rtx
4242 next_nondeleted_insn (insn)
4243 rtx insn;
4244 {
4245 while (INSN_DELETED_P (insn))
4246 insn = NEXT_INSN (insn);
4247 return insn;
4248 }
4249 \f
4250 /* Delete a range of insns from FROM to TO, inclusive.
4251 This is for the sake of peephole optimization, so assume
4252 that whatever these insns do will still be done by a new
4253 peephole insn that will replace them. */
4254
4255 void
4256 delete_for_peephole (from, to)
4257 register rtx from, to;
4258 {
4259 register rtx insn = from;
4260
4261 while (1)
4262 {
4263 register rtx next = NEXT_INSN (insn);
4264 register rtx prev = PREV_INSN (insn);
4265
4266 if (GET_CODE (insn) != NOTE)
4267 {
4268 INSN_DELETED_P (insn) = 1;
4269
4270 /* Patch this insn out of the chain. */
4271 /* We don't do this all at once, because we
4272 must preserve all NOTEs. */
4273 if (prev)
4274 NEXT_INSN (prev) = next;
4275
4276 if (next)
4277 PREV_INSN (next) = prev;
4278 }
4279
4280 if (insn == to)
4281 break;
4282 insn = next;
4283 }
4284
4285 /* Note that if TO is an unconditional jump
4286 we *do not* delete the BARRIER that follows,
4287 since the peephole that replaces this sequence
4288 is also an unconditional jump in that case. */
4289 }
4290 \f
4291 /* We have determined that INSN is never reached, and are about to
4292 delete it. Print a warning if the user asked for one.
4293
4294 To try to make this warning more useful, this should only be called
4295 once per basic block not reached, and it only warns when the basic
4296 block contains more than one line from the current function, and
4297 contains at least one operation. CSE and inlining can duplicate insns,
4298 so it's possible to get spurious warnings from this. */
4299
4300 void
4301 never_reached_warning (avoided_insn)
4302 rtx avoided_insn;
4303 {
4304 rtx insn;
4305 rtx a_line_note = NULL;
4306 int two_avoided_lines = 0;
4307 int contains_insn = 0;
4308
4309 if (! warn_notreached)
4310 return;
4311
4312 /* Scan forwards, looking at LINE_NUMBER notes, until
4313 we hit a LABEL or we run out of insns. */
4314
4315 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
4316 {
4317 if (GET_CODE (insn) == CODE_LABEL)
4318 break;
4319 else if (GET_CODE (insn) == NOTE /* A line number note? */
4320 && NOTE_LINE_NUMBER (insn) >= 0)
4321 {
4322 if (a_line_note == NULL)
4323 a_line_note = insn;
4324 else
4325 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
4326 != NOTE_LINE_NUMBER (insn));
4327 }
4328 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4329 contains_insn = 1;
4330 }
4331 if (two_avoided_lines && contains_insn)
4332 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
4333 NOTE_LINE_NUMBER (a_line_note),
4334 "will never be executed");
4335 }
4336 \f
4337 /* Invert the condition of the jump JUMP, and make it jump
4338 to label NLABEL instead of where it jumps now. */
4339
4340 int
4341 invert_jump (jump, nlabel)
4342 rtx jump, nlabel;
4343 {
4344 /* We have to either invert the condition and change the label or
4345 do neither. Either operation could fail. We first try to invert
4346 the jump. If that succeeds, we try changing the label. If that fails,
4347 we invert the jump back to what it was. */
4348
4349 if (! invert_exp (PATTERN (jump), jump))
4350 return 0;
4351
4352 if (redirect_jump (jump, nlabel))
4353 {
4354 if (flag_branch_probabilities)
4355 {
4356 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4357
4358 /* An inverted jump means that a probability taken becomes a
4359 probability not taken. Subtract the branch probability from the
4360 probability base to convert it back to a taken probability.
4361 (We don't flip the probability on a branch that's never taken. */
4362 if (note && XINT (XEXP (note, 0), 0) >= 0)
4363 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4364 }
4365
4366 return 1;
4367 }
4368
4369 if (! invert_exp (PATTERN (jump), jump))
4370 /* This should just be putting it back the way it was. */
4371 abort ();
4372
4373 return 0;
4374 }
4375
4376 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4377
4378 Return 1 if we can do so, 0 if we cannot find a way to do so that
4379 matches a pattern. */
4380
4381 int
4382 invert_exp (x, insn)
4383 rtx x;
4384 rtx insn;
4385 {
4386 register RTX_CODE code;
4387 register int i;
4388 register const char *fmt;
4389
4390 code = GET_CODE (x);
4391
4392 if (code == IF_THEN_ELSE)
4393 {
4394 register rtx comp = XEXP (x, 0);
4395 register rtx tem;
4396
4397 /* We can do this in two ways: The preferable way, which can only
4398 be done if this is not an integer comparison, is to reverse
4399 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4400 of the IF_THEN_ELSE. If we can't do either, fail. */
4401
4402 if (can_reverse_comparison_p (comp, insn)
4403 && validate_change (insn, &XEXP (x, 0),
4404 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4405 GET_MODE (comp), XEXP (comp, 0),
4406 XEXP (comp, 1)), 0))
4407 return 1;
4408
4409 tem = XEXP (x, 1);
4410 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4411 validate_change (insn, &XEXP (x, 2), tem, 1);
4412 return apply_change_group ();
4413 }
4414
4415 fmt = GET_RTX_FORMAT (code);
4416 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4417 {
4418 if (fmt[i] == 'e')
4419 if (! invert_exp (XEXP (x, i), insn))
4420 return 0;
4421 if (fmt[i] == 'E')
4422 {
4423 register int j;
4424 for (j = 0; j < XVECLEN (x, i); j++)
4425 if (!invert_exp (XVECEXP (x, i, j), insn))
4426 return 0;
4427 }
4428 }
4429
4430 return 1;
4431 }
4432 \f
4433 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4434 If the old jump target label is unused as a result,
4435 it and the code following it may be deleted.
4436
4437 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4438 RETURN insn.
4439
4440 The return value will be 1 if the change was made, 0 if it wasn't (this
4441 can only occur for NLABEL == 0). */
4442
4443 int
4444 redirect_jump (jump, nlabel)
4445 rtx jump, nlabel;
4446 {
4447 register rtx olabel = JUMP_LABEL (jump);
4448
4449 if (nlabel == olabel)
4450 return 1;
4451
4452 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4453 return 0;
4454
4455 /* If this is an unconditional branch, delete it from the jump_chain of
4456 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4457 have UID's in range and JUMP_CHAIN is valid). */
4458 if (jump_chain && (simplejump_p (jump)
4459 || GET_CODE (PATTERN (jump)) == RETURN))
4460 {
4461 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4462
4463 delete_from_jump_chain (jump);
4464 if (label_index < max_jump_chain
4465 && INSN_UID (jump) < max_jump_chain)
4466 {
4467 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4468 jump_chain[label_index] = jump;
4469 }
4470 }
4471
4472 JUMP_LABEL (jump) = nlabel;
4473 if (nlabel)
4474 ++LABEL_NUSES (nlabel);
4475
4476 if (olabel && --LABEL_NUSES (olabel) == 0)
4477 delete_insn (olabel);
4478
4479 return 1;
4480 }
4481
4482 /* Delete the instruction JUMP from any jump chain it might be on. */
4483
4484 static void
4485 delete_from_jump_chain (jump)
4486 rtx jump;
4487 {
4488 int index;
4489 rtx olabel = JUMP_LABEL (jump);
4490
4491 /* Handle unconditional jumps. */
4492 if (jump_chain && olabel != 0
4493 && INSN_UID (olabel) < max_jump_chain
4494 && simplejump_p (jump))
4495 index = INSN_UID (olabel);
4496 /* Handle return insns. */
4497 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4498 index = 0;
4499 else return;
4500
4501 if (jump_chain[index] == jump)
4502 jump_chain[index] = jump_chain[INSN_UID (jump)];
4503 else
4504 {
4505 rtx insn;
4506
4507 for (insn = jump_chain[index];
4508 insn != 0;
4509 insn = jump_chain[INSN_UID (insn)])
4510 if (jump_chain[INSN_UID (insn)] == jump)
4511 {
4512 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4513 break;
4514 }
4515 }
4516 }
4517
4518 /* If NLABEL is nonzero, throughout the rtx at LOC,
4519 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4520 zero, alter (RETURN) to (LABEL_REF NLABEL).
4521
4522 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4523 validity with validate_change. Convert (set (pc) (label_ref olabel))
4524 to (return).
4525
4526 Return 0 if we found a change we would like to make but it is invalid.
4527 Otherwise, return 1. */
4528
4529 int
4530 redirect_exp (loc, olabel, nlabel, insn)
4531 rtx *loc;
4532 rtx olabel, nlabel;
4533 rtx insn;
4534 {
4535 register rtx x = *loc;
4536 register RTX_CODE code = GET_CODE (x);
4537 register int i;
4538 register const char *fmt;
4539
4540 if (code == LABEL_REF)
4541 {
4542 if (XEXP (x, 0) == olabel)
4543 {
4544 if (nlabel)
4545 XEXP (x, 0) = nlabel;
4546 else
4547 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4548 return 1;
4549 }
4550 }
4551 else if (code == RETURN && olabel == 0)
4552 {
4553 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4554 if (loc == &PATTERN (insn))
4555 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4556 return validate_change (insn, loc, x, 0);
4557 }
4558
4559 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4560 && GET_CODE (SET_SRC (x)) == LABEL_REF
4561 && XEXP (SET_SRC (x), 0) == olabel)
4562 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4563
4564 fmt = GET_RTX_FORMAT (code);
4565 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4566 {
4567 if (fmt[i] == 'e')
4568 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4569 return 0;
4570 if (fmt[i] == 'E')
4571 {
4572 register int j;
4573 for (j = 0; j < XVECLEN (x, i); j++)
4574 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4575 return 0;
4576 }
4577 }
4578
4579 return 1;
4580 }
4581 \f
4582 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4583
4584 If the old jump target label (before the dispatch table) becomes unused,
4585 it and the dispatch table may be deleted. In that case, find the insn
4586 before the jump references that label and delete it and logical successors
4587 too. */
4588
4589 static void
4590 redirect_tablejump (jump, nlabel)
4591 rtx jump, nlabel;
4592 {
4593 register rtx olabel = JUMP_LABEL (jump);
4594
4595 /* Add this jump to the jump_chain of NLABEL. */
4596 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4597 && INSN_UID (jump) < max_jump_chain)
4598 {
4599 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4600 jump_chain[INSN_UID (nlabel)] = jump;
4601 }
4602
4603 PATTERN (jump) = gen_jump (nlabel);
4604 JUMP_LABEL (jump) = nlabel;
4605 ++LABEL_NUSES (nlabel);
4606 INSN_CODE (jump) = -1;
4607
4608 if (--LABEL_NUSES (olabel) == 0)
4609 {
4610 delete_labelref_insn (jump, olabel, 0);
4611 delete_insn (olabel);
4612 }
4613 }
4614
4615 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4616 If we found one, delete it and then delete this insn if DELETE_THIS is
4617 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4618
4619 static int
4620 delete_labelref_insn (insn, label, delete_this)
4621 rtx insn, label;
4622 int delete_this;
4623 {
4624 int deleted = 0;
4625 rtx link;
4626
4627 if (GET_CODE (insn) != NOTE
4628 && reg_mentioned_p (label, PATTERN (insn)))
4629 {
4630 if (delete_this)
4631 {
4632 delete_insn (insn);
4633 deleted = 1;
4634 }
4635 else
4636 return 1;
4637 }
4638
4639 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4640 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4641 {
4642 if (delete_this)
4643 {
4644 delete_insn (insn);
4645 deleted = 1;
4646 }
4647 else
4648 return 1;
4649 }
4650
4651 return deleted;
4652 }
4653 \f
4654 /* Like rtx_equal_p except that it considers two REGs as equal
4655 if they renumber to the same value and considers two commutative
4656 operations to be the same if the order of the operands has been
4657 reversed.
4658
4659 ??? Addition is not commutative on the PA due to the weird implicit
4660 space register selection rules for memory addresses. Therefore, we
4661 don't consider a + b == b + a.
4662
4663 We could/should make this test a little tighter. Possibly only
4664 disabling it on the PA via some backend macro or only disabling this
4665 case when the PLUS is inside a MEM. */
4666
4667 int
4668 rtx_renumbered_equal_p (x, y)
4669 rtx x, y;
4670 {
4671 register int i;
4672 register RTX_CODE code = GET_CODE (x);
4673 register const char *fmt;
4674
4675 if (x == y)
4676 return 1;
4677
4678 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4679 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4680 && GET_CODE (SUBREG_REG (y)) == REG)))
4681 {
4682 int reg_x = -1, reg_y = -1;
4683 int word_x = 0, word_y = 0;
4684
4685 if (GET_MODE (x) != GET_MODE (y))
4686 return 0;
4687
4688 /* If we haven't done any renumbering, don't
4689 make any assumptions. */
4690 if (reg_renumber == 0)
4691 return rtx_equal_p (x, y);
4692
4693 if (code == SUBREG)
4694 {
4695 reg_x = REGNO (SUBREG_REG (x));
4696 word_x = SUBREG_WORD (x);
4697
4698 if (reg_renumber[reg_x] >= 0)
4699 {
4700 reg_x = reg_renumber[reg_x] + word_x;
4701 word_x = 0;
4702 }
4703 }
4704
4705 else
4706 {
4707 reg_x = REGNO (x);
4708 if (reg_renumber[reg_x] >= 0)
4709 reg_x = reg_renumber[reg_x];
4710 }
4711
4712 if (GET_CODE (y) == SUBREG)
4713 {
4714 reg_y = REGNO (SUBREG_REG (y));
4715 word_y = SUBREG_WORD (y);
4716
4717 if (reg_renumber[reg_y] >= 0)
4718 {
4719 reg_y = reg_renumber[reg_y];
4720 word_y = 0;
4721 }
4722 }
4723
4724 else
4725 {
4726 reg_y = REGNO (y);
4727 if (reg_renumber[reg_y] >= 0)
4728 reg_y = reg_renumber[reg_y];
4729 }
4730
4731 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4732 }
4733
4734 /* Now we have disposed of all the cases
4735 in which different rtx codes can match. */
4736 if (code != GET_CODE (y))
4737 return 0;
4738
4739 switch (code)
4740 {
4741 case PC:
4742 case CC0:
4743 case ADDR_VEC:
4744 case ADDR_DIFF_VEC:
4745 return 0;
4746
4747 case CONST_INT:
4748 return INTVAL (x) == INTVAL (y);
4749
4750 case LABEL_REF:
4751 /* We can't assume nonlocal labels have their following insns yet. */
4752 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4753 return XEXP (x, 0) == XEXP (y, 0);
4754
4755 /* Two label-refs are equivalent if they point at labels
4756 in the same position in the instruction stream. */
4757 return (next_real_insn (XEXP (x, 0))
4758 == next_real_insn (XEXP (y, 0)));
4759
4760 case SYMBOL_REF:
4761 return XSTR (x, 0) == XSTR (y, 0);
4762
4763 case CODE_LABEL:
4764 /* If we didn't match EQ equality above, they aren't the same. */
4765 return 0;
4766
4767 default:
4768 break;
4769 }
4770
4771 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4772
4773 if (GET_MODE (x) != GET_MODE (y))
4774 return 0;
4775
4776 /* For commutative operations, the RTX match if the operand match in any
4777 order. Also handle the simple binary and unary cases without a loop.
4778
4779 ??? Don't consider PLUS a commutative operator; see comments above. */
4780 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4781 && code != PLUS)
4782 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4783 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4784 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4785 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4786 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4787 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4788 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4789 else if (GET_RTX_CLASS (code) == '1')
4790 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4791
4792 /* Compare the elements. If any pair of corresponding elements
4793 fail to match, return 0 for the whole things. */
4794
4795 fmt = GET_RTX_FORMAT (code);
4796 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4797 {
4798 register int j;
4799 switch (fmt[i])
4800 {
4801 case 'w':
4802 if (XWINT (x, i) != XWINT (y, i))
4803 return 0;
4804 break;
4805
4806 case 'i':
4807 if (XINT (x, i) != XINT (y, i))
4808 return 0;
4809 break;
4810
4811 case 's':
4812 if (strcmp (XSTR (x, i), XSTR (y, i)))
4813 return 0;
4814 break;
4815
4816 case 'e':
4817 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4818 return 0;
4819 break;
4820
4821 case 'u':
4822 if (XEXP (x, i) != XEXP (y, i))
4823 return 0;
4824 /* fall through. */
4825 case '0':
4826 break;
4827
4828 case 'E':
4829 if (XVECLEN (x, i) != XVECLEN (y, i))
4830 return 0;
4831 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4832 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4833 return 0;
4834 break;
4835
4836 default:
4837 abort ();
4838 }
4839 }
4840 return 1;
4841 }
4842 \f
4843 /* If X is a hard register or equivalent to one or a subregister of one,
4844 return the hard register number. If X is a pseudo register that was not
4845 assigned a hard register, return the pseudo register number. Otherwise,
4846 return -1. Any rtx is valid for X. */
4847
4848 int
4849 true_regnum (x)
4850 rtx x;
4851 {
4852 if (GET_CODE (x) == REG)
4853 {
4854 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4855 return reg_renumber[REGNO (x)];
4856 return REGNO (x);
4857 }
4858 if (GET_CODE (x) == SUBREG)
4859 {
4860 int base = true_regnum (SUBREG_REG (x));
4861 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4862 return SUBREG_WORD (x) + base;
4863 }
4864 return -1;
4865 }
4866 \f
4867 /* Optimize code of the form:
4868
4869 for (x = a[i]; x; ...)
4870 ...
4871 for (x = a[i]; x; ...)
4872 ...
4873 foo:
4874
4875 Loop optimize will change the above code into
4876
4877 if (x = a[i])
4878 for (;;)
4879 { ...; if (! (x = ...)) break; }
4880 if (x = a[i])
4881 for (;;)
4882 { ...; if (! (x = ...)) break; }
4883 foo:
4884
4885 In general, if the first test fails, the program can branch
4886 directly to `foo' and skip the second try which is doomed to fail.
4887 We run this after loop optimization and before flow analysis. */
4888
4889 /* When comparing the insn patterns, we track the fact that different
4890 pseudo-register numbers may have been used in each computation.
4891 The following array stores an equivalence -- same_regs[I] == J means
4892 that pseudo register I was used in the first set of tests in a context
4893 where J was used in the second set. We also count the number of such
4894 pending equivalences. If nonzero, the expressions really aren't the
4895 same. */
4896
4897 static int *same_regs;
4898
4899 static int num_same_regs;
4900
4901 /* Track any registers modified between the target of the first jump and
4902 the second jump. They never compare equal. */
4903
4904 static char *modified_regs;
4905
4906 /* Record if memory was modified. */
4907
4908 static int modified_mem;
4909
4910 /* Called via note_stores on each insn between the target of the first
4911 branch and the second branch. It marks any changed registers. */
4912
4913 static void
4914 mark_modified_reg (dest, x)
4915 rtx dest;
4916 rtx x ATTRIBUTE_UNUSED;
4917 {
4918 int regno, i;
4919
4920 if (GET_CODE (dest) == SUBREG)
4921 dest = SUBREG_REG (dest);
4922
4923 if (GET_CODE (dest) == MEM)
4924 modified_mem = 1;
4925
4926 if (GET_CODE (dest) != REG)
4927 return;
4928
4929 regno = REGNO (dest);
4930 if (regno >= FIRST_PSEUDO_REGISTER)
4931 modified_regs[regno] = 1;
4932 else
4933 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4934 modified_regs[regno + i] = 1;
4935 }
4936
4937 /* F is the first insn in the chain of insns. */
4938
4939 void
4940 thread_jumps (f, max_reg, flag_before_loop)
4941 rtx f;
4942 int max_reg;
4943 int flag_before_loop;
4944 {
4945 /* Basic algorithm is to find a conditional branch,
4946 the label it may branch to, and the branch after
4947 that label. If the two branches test the same condition,
4948 walk back from both branch paths until the insn patterns
4949 differ, or code labels are hit. If we make it back to
4950 the target of the first branch, then we know that the first branch
4951 will either always succeed or always fail depending on the relative
4952 senses of the two branches. So adjust the first branch accordingly
4953 in this case. */
4954
4955 rtx label, b1, b2, t1, t2;
4956 enum rtx_code code1, code2;
4957 rtx b1op0, b1op1, b2op0, b2op1;
4958 int changed = 1;
4959 int i;
4960 int *all_reset;
4961
4962 /* Allocate register tables and quick-reset table. */
4963 modified_regs = (char *) alloca (max_reg * sizeof (char));
4964 same_regs = (int *) alloca (max_reg * sizeof (int));
4965 all_reset = (int *) alloca (max_reg * sizeof (int));
4966 for (i = 0; i < max_reg; i++)
4967 all_reset[i] = -1;
4968
4969 while (changed)
4970 {
4971 changed = 0;
4972
4973 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4974 {
4975 /* Get to a candidate branch insn. */
4976 if (GET_CODE (b1) != JUMP_INSN
4977 || ! condjump_p (b1) || simplejump_p (b1)
4978 || JUMP_LABEL (b1) == 0)
4979 continue;
4980
4981 bzero (modified_regs, max_reg * sizeof (char));
4982 modified_mem = 0;
4983
4984 bcopy ((char *) all_reset, (char *) same_regs,
4985 max_reg * sizeof (int));
4986 num_same_regs = 0;
4987
4988 label = JUMP_LABEL (b1);
4989
4990 /* Look for a branch after the target. Record any registers and
4991 memory modified between the target and the branch. Stop when we
4992 get to a label since we can't know what was changed there. */
4993 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4994 {
4995 if (GET_CODE (b2) == CODE_LABEL)
4996 break;
4997
4998 else if (GET_CODE (b2) == JUMP_INSN)
4999 {
5000 /* If this is an unconditional jump and is the only use of
5001 its target label, we can follow it. */
5002 if (simplejump_p (b2)
5003 && JUMP_LABEL (b2) != 0
5004 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
5005 {
5006 b2 = JUMP_LABEL (b2);
5007 continue;
5008 }
5009 else
5010 break;
5011 }
5012
5013 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
5014 continue;
5015
5016 if (GET_CODE (b2) == CALL_INSN)
5017 {
5018 modified_mem = 1;
5019 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5020 if (call_used_regs[i] && ! fixed_regs[i]
5021 && i != STACK_POINTER_REGNUM
5022 && i != FRAME_POINTER_REGNUM
5023 && i != HARD_FRAME_POINTER_REGNUM
5024 && i != ARG_POINTER_REGNUM)
5025 modified_regs[i] = 1;
5026 }
5027
5028 note_stores (PATTERN (b2), mark_modified_reg);
5029 }
5030
5031 /* Check the next candidate branch insn from the label
5032 of the first. */
5033 if (b2 == 0
5034 || GET_CODE (b2) != JUMP_INSN
5035 || b2 == b1
5036 || ! condjump_p (b2)
5037 || simplejump_p (b2))
5038 continue;
5039
5040 /* Get the comparison codes and operands, reversing the
5041 codes if appropriate. If we don't have comparison codes,
5042 we can't do anything. */
5043 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
5044 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
5045 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
5046 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
5047 code1 = reverse_condition (code1);
5048
5049 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
5050 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
5051 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
5052 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
5053 code2 = reverse_condition (code2);
5054
5055 /* If they test the same things and knowing that B1 branches
5056 tells us whether or not B2 branches, check if we
5057 can thread the branch. */
5058 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
5059 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
5060 && (comparison_dominates_p (code1, code2)
5061 || (comparison_dominates_p (code1, reverse_condition (code2))
5062 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
5063 0),
5064 b1))))
5065 {
5066 t1 = prev_nonnote_insn (b1);
5067 t2 = prev_nonnote_insn (b2);
5068
5069 while (t1 != 0 && t2 != 0)
5070 {
5071 if (t2 == label)
5072 {
5073 /* We have reached the target of the first branch.
5074 If there are no pending register equivalents,
5075 we know that this branch will either always
5076 succeed (if the senses of the two branches are
5077 the same) or always fail (if not). */
5078 rtx new_label;
5079
5080 if (num_same_regs != 0)
5081 break;
5082
5083 if (comparison_dominates_p (code1, code2))
5084 new_label = JUMP_LABEL (b2);
5085 else
5086 new_label = get_label_after (b2);
5087
5088 if (JUMP_LABEL (b1) != new_label)
5089 {
5090 rtx prev = PREV_INSN (new_label);
5091
5092 if (flag_before_loop
5093 && GET_CODE (prev) == NOTE
5094 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5095 {
5096 /* Don't thread to the loop label. If a loop
5097 label is reused, loop optimization will
5098 be disabled for that loop. */
5099 new_label = gen_label_rtx ();
5100 emit_label_after (new_label, PREV_INSN (prev));
5101 }
5102 changed |= redirect_jump (b1, new_label);
5103 }
5104 break;
5105 }
5106
5107 /* If either of these is not a normal insn (it might be
5108 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5109 have already been skipped above.) Similarly, fail
5110 if the insns are different. */
5111 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5112 || recog_memoized (t1) != recog_memoized (t2)
5113 || ! rtx_equal_for_thread_p (PATTERN (t1),
5114 PATTERN (t2), t2))
5115 break;
5116
5117 t1 = prev_nonnote_insn (t1);
5118 t2 = prev_nonnote_insn (t2);
5119 }
5120 }
5121 }
5122 }
5123 }
5124 \f
5125 /* This is like RTX_EQUAL_P except that it knows about our handling of
5126 possibly equivalent registers and knows to consider volatile and
5127 modified objects as not equal.
5128
5129 YINSN is the insn containing Y. */
5130
5131 int
5132 rtx_equal_for_thread_p (x, y, yinsn)
5133 rtx x, y;
5134 rtx yinsn;
5135 {
5136 register int i;
5137 register int j;
5138 register enum rtx_code code;
5139 register const char *fmt;
5140
5141 code = GET_CODE (x);
5142 /* Rtx's of different codes cannot be equal. */
5143 if (code != GET_CODE (y))
5144 return 0;
5145
5146 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5147 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5148
5149 if (GET_MODE (x) != GET_MODE (y))
5150 return 0;
5151
5152 /* For floating-point, consider everything unequal. This is a bit
5153 pessimistic, but this pass would only rarely do anything for FP
5154 anyway. */
5155 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5156 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5157 return 0;
5158
5159 /* For commutative operations, the RTX match if the operand match in any
5160 order. Also handle the simple binary and unary cases without a loop. */
5161 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5162 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5163 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5164 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5165 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5166 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5167 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5168 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5169 else if (GET_RTX_CLASS (code) == '1')
5170 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5171
5172 /* Handle special-cases first. */
5173 switch (code)
5174 {
5175 case REG:
5176 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5177 return 1;
5178
5179 /* If neither is user variable or hard register, check for possible
5180 equivalence. */
5181 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5182 || REGNO (x) < FIRST_PSEUDO_REGISTER
5183 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5184 return 0;
5185
5186 if (same_regs[REGNO (x)] == -1)
5187 {
5188 same_regs[REGNO (x)] = REGNO (y);
5189 num_same_regs++;
5190
5191 /* If this is the first time we are seeing a register on the `Y'
5192 side, see if it is the last use. If not, we can't thread the
5193 jump, so mark it as not equivalent. */
5194 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5195 return 0;
5196
5197 return 1;
5198 }
5199 else
5200 return (same_regs[REGNO (x)] == REGNO (y));
5201
5202 break;
5203
5204 case MEM:
5205 /* If memory modified or either volatile, not equivalent.
5206 Else, check address. */
5207 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5208 return 0;
5209
5210 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5211
5212 case ASM_INPUT:
5213 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5214 return 0;
5215
5216 break;
5217
5218 case SET:
5219 /* Cancel a pending `same_regs' if setting equivalenced registers.
5220 Then process source. */
5221 if (GET_CODE (SET_DEST (x)) == REG
5222 && GET_CODE (SET_DEST (y)) == REG)
5223 {
5224 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
5225 {
5226 same_regs[REGNO (SET_DEST (x))] = -1;
5227 num_same_regs--;
5228 }
5229 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5230 return 0;
5231 }
5232 else
5233 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5234 return 0;
5235
5236 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5237
5238 case LABEL_REF:
5239 return XEXP (x, 0) == XEXP (y, 0);
5240
5241 case SYMBOL_REF:
5242 return XSTR (x, 0) == XSTR (y, 0);
5243
5244 default:
5245 break;
5246 }
5247
5248 if (x == y)
5249 return 1;
5250
5251 fmt = GET_RTX_FORMAT (code);
5252 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5253 {
5254 switch (fmt[i])
5255 {
5256 case 'w':
5257 if (XWINT (x, i) != XWINT (y, i))
5258 return 0;
5259 break;
5260
5261 case 'n':
5262 case 'i':
5263 if (XINT (x, i) != XINT (y, i))
5264 return 0;
5265 break;
5266
5267 case 'V':
5268 case 'E':
5269 /* Two vectors must have the same length. */
5270 if (XVECLEN (x, i) != XVECLEN (y, i))
5271 return 0;
5272
5273 /* And the corresponding elements must match. */
5274 for (j = 0; j < XVECLEN (x, i); j++)
5275 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5276 XVECEXP (y, i, j), yinsn) == 0)
5277 return 0;
5278 break;
5279
5280 case 'e':
5281 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5282 return 0;
5283 break;
5284
5285 case 'S':
5286 case 's':
5287 if (strcmp (XSTR (x, i), XSTR (y, i)))
5288 return 0;
5289 break;
5290
5291 case 'u':
5292 /* These are just backpointers, so they don't matter. */
5293 break;
5294
5295 case '0':
5296 case 't':
5297 break;
5298
5299 /* It is believed that rtx's at this level will never
5300 contain anything but integers and other rtx's,
5301 except for within LABEL_REFs and SYMBOL_REFs. */
5302 default:
5303 abort ();
5304 }
5305 }
5306 return 1;
5307 }
5308 \f
5309
5310 #ifndef HAVE_cc0
5311 /* Return the insn that NEW can be safely inserted in front of starting at
5312 the jump insn INSN. Return 0 if it is not safe to do this jump
5313 optimization. Note that NEW must contain a single set. */
5314
5315 static rtx
5316 find_insert_position (insn, new)
5317 rtx insn;
5318 rtx new;
5319 {
5320 int i;
5321 rtx prev;
5322
5323 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5324 if (GET_CODE (PATTERN (new)) != PARALLEL)
5325 return insn;
5326
5327 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5328 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5329 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5330 insn))
5331 break;
5332
5333 if (i < 0)
5334 return insn;
5335
5336 /* There is a good chance that the previous insn PREV sets the thing
5337 being clobbered (often the CC in a hard reg). If PREV does not
5338 use what NEW sets, we can insert NEW before PREV. */
5339
5340 prev = prev_active_insn (insn);
5341 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5342 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5343 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5344 insn)
5345 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5346 prev))
5347 return 0;
5348
5349 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5350 }
5351 #endif /* !HAVE_cc0 */