jump.c (jump_optimize_1): Do cmov opt on any single-set; force B into a register...
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "hard-reg-set.h"
59 #include "regs.h"
60 #include "insn-config.h"
61 #include "insn-flags.h"
62 #include "insn-attr.h"
63 #include "recog.h"
64 #include "function.h"
65 #include "expr.h"
66 #include "real.h"
67 #include "except.h"
68 #include "toplev.h"
69
70 /* ??? Eventually must record somehow the labels used by jumps
71 from nested functions. */
72 /* Pre-record the next or previous real insn for each label?
73 No, this pass is very fast anyway. */
74 /* Condense consecutive labels?
75 This would make life analysis faster, maybe. */
76 /* Optimize jump y; x: ... y: jumpif... x?
77 Don't know if it is worth bothering with. */
78 /* Optimize two cases of conditional jump to conditional jump?
79 This can never delete any instruction or make anything dead,
80 or even change what is live at any point.
81 So perhaps let combiner do it. */
82
83 /* Vector indexed by uid.
84 For each CODE_LABEL, index by its uid to get first unconditional jump
85 that jumps to the label.
86 For each JUMP_INSN, index by its uid to get the next unconditional jump
87 that jumps to the same label.
88 Element 0 is the start of a chain of all return insns.
89 (It is safe to use element 0 because insn uid 0 is not used. */
90
91 static rtx *jump_chain;
92
93 /* Maximum index in jump_chain. */
94
95 static int max_jump_chain;
96
97 /* Set nonzero by jump_optimize if control can fall through
98 to the end of the function. */
99 int can_reach_end;
100
101 /* Indicates whether death notes are significant in cross jump analysis.
102 Normally they are not significant, because of A and B jump to C,
103 and R dies in A, it must die in B. But this might not be true after
104 stack register conversion, and we must compare death notes in that
105 case. */
106
107 static int cross_jump_death_matters = 0;
108
109 static int init_label_info PROTO((rtx));
110 static void delete_barrier_successors PROTO((rtx));
111 static void mark_all_labels PROTO((rtx, int));
112 static rtx delete_unreferenced_labels PROTO((rtx));
113 static void delete_noop_moves PROTO((rtx));
114 static int calculate_can_reach_end PROTO((rtx, int, int));
115 static int duplicate_loop_exit_test PROTO((rtx));
116 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
117 static void do_cross_jump PROTO((rtx, rtx, rtx));
118 static int jump_back_p PROTO((rtx, rtx));
119 static int tension_vector_labels PROTO((rtx, int));
120 static void mark_jump_label PROTO((rtx, rtx, int));
121 static void delete_computation PROTO((rtx));
122 static void delete_from_jump_chain PROTO((rtx));
123 static int delete_labelref_insn PROTO((rtx, rtx, int));
124 static void mark_modified_reg PROTO((rtx, rtx));
125 static void redirect_tablejump PROTO((rtx, rtx));
126 static void jump_optimize_1 PROTO ((rtx, int, int, int, int));
127 #ifndef HAVE_cc0
128 static rtx find_insert_position PROTO((rtx, rtx));
129 #endif
130
131 /* Main external entry point into the jump optimizer. See comments before
132 jump_optimize_1 for descriptions of the arguments. */
133 void
134 jump_optimize (f, cross_jump, noop_moves, after_regscan)
135 rtx f;
136 int cross_jump;
137 int noop_moves;
138 int after_regscan;
139 {
140 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
141 }
142
143 /* Alternate entry into the jump optimizer. This entry point only rebuilds
144 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
145 instructions. */
146 void
147 rebuild_jump_labels (f)
148 rtx f;
149 {
150 jump_optimize_1 (f, 0, 0, 0, 1);
151 }
152
153 \f
154 /* Delete no-op jumps and optimize jumps to jumps
155 and jumps around jumps.
156 Delete unused labels and unreachable code.
157
158 If CROSS_JUMP is 1, detect matching code
159 before a jump and its destination and unify them.
160 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
161
162 If NOOP_MOVES is nonzero, delete no-op move insns.
163
164 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
165 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
166
167 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
168 and JUMP_LABEL field for jumping insns.
169
170 If `optimize' is zero, don't change any code,
171 just determine whether control drops off the end of the function.
172 This case occurs when we have -W and not -O.
173 It works because `delete_insn' checks the value of `optimize'
174 and refrains from actually deleting when that is 0. */
175
176 static void
177 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
178 rtx f;
179 int cross_jump;
180 int noop_moves;
181 int after_regscan;
182 int mark_labels_only;
183 {
184 register rtx insn, next;
185 int changed;
186 int old_max_reg;
187 int first = 1;
188 int max_uid = 0;
189 rtx last_insn;
190
191 cross_jump_death_matters = (cross_jump == 2);
192 max_uid = init_label_info (f) + 1;
193
194 /* If we are performing cross jump optimizations, then initialize
195 tables mapping UIDs to EH regions to avoid incorrect movement
196 of insns from one EH region to another. */
197 if (flag_exceptions && cross_jump)
198 init_insn_eh_region (f, max_uid);
199
200 delete_barrier_successors (f);
201
202 /* Leave some extra room for labels and duplicate exit test insns
203 we make. */
204 max_jump_chain = max_uid * 14 / 10;
205 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
206 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
207
208 mark_all_labels (f, cross_jump);
209
210 /* Keep track of labels used from static data;
211 they cannot ever be deleted. */
212
213 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
214 LABEL_NUSES (XEXP (insn, 0))++;
215
216 check_exception_handler_labels ();
217
218 /* Keep track of labels used for marking handlers for exception
219 regions; they cannot usually be deleted. */
220
221 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
222 LABEL_NUSES (XEXP (insn, 0))++;
223
224 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
225 notes and recompute LABEL_NUSES. */
226 if (mark_labels_only)
227 return;
228
229 exception_optimize ();
230
231 last_insn = delete_unreferenced_labels (f);
232
233 if (!optimize)
234 {
235 /* CAN_REACH_END is persistent for each function. Once set it should
236 not be cleared. This is especially true for the case where we
237 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
238 the front-end before compiling each function. */
239 if (calculate_can_reach_end (last_insn, 1, 0))
240 can_reach_end = 1;
241
242 /* Zero the "deleted" flag of all the "deleted" insns. */
243 for (insn = f; insn; insn = NEXT_INSN (insn))
244 INSN_DELETED_P (insn) = 0;
245
246 /* Show that the jump chain is not valid. */
247 jump_chain = 0;
248 return;
249 }
250
251 #ifdef HAVE_return
252 if (HAVE_return)
253 {
254 /* If we fall through to the epilogue, see if we can insert a RETURN insn
255 in front of it. If the machine allows it at this point (we might be
256 after reload for a leaf routine), it will improve optimization for it
257 to be there. */
258 insn = get_last_insn ();
259 while (insn && GET_CODE (insn) == NOTE)
260 insn = PREV_INSN (insn);
261
262 if (insn && GET_CODE (insn) != BARRIER)
263 {
264 emit_jump_insn (gen_return ());
265 emit_barrier ();
266 }
267 }
268 #endif
269
270 if (noop_moves)
271 delete_noop_moves (f);
272
273 /* If we haven't yet gotten to reload and we have just run regscan,
274 delete any insn that sets a register that isn't used elsewhere.
275 This helps some of the optimizations below by having less insns
276 being jumped around. */
277
278 if (! reload_completed && after_regscan)
279 for (insn = f; insn; insn = next)
280 {
281 rtx set = single_set (insn);
282
283 next = NEXT_INSN (insn);
284
285 if (set && GET_CODE (SET_DEST (set)) == REG
286 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
287 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
288 /* We use regno_last_note_uid so as not to delete the setting
289 of a reg that's used in notes. A subsequent optimization
290 might arrange to use that reg for real. */
291 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
292 && ! side_effects_p (SET_SRC (set))
293 && ! find_reg_note (insn, REG_RETVAL, 0))
294 delete_insn (insn);
295 }
296
297 /* Now iterate optimizing jumps until nothing changes over one pass. */
298 changed = 1;
299 old_max_reg = max_reg_num ();
300 while (changed)
301 {
302 changed = 0;
303
304 for (insn = f; insn; insn = next)
305 {
306 rtx reallabelprev;
307 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
308 rtx nlabel;
309 int this_is_simplejump, this_is_condjump, reversep = 0;
310 int this_is_condjump_in_parallel;
311
312 #if 0
313 /* If NOT the first iteration, if this is the last jump pass
314 (just before final), do the special peephole optimizations.
315 Avoiding the first iteration gives ordinary jump opts
316 a chance to work before peephole opts. */
317
318 if (reload_completed && !first && !flag_no_peephole)
319 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
320 peephole (insn);
321 #endif
322
323 /* That could have deleted some insns after INSN, so check now
324 what the following insn is. */
325
326 next = NEXT_INSN (insn);
327
328 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
329 jump. Try to optimize by duplicating the loop exit test if so.
330 This is only safe immediately after regscan, because it uses
331 the values of regno_first_uid and regno_last_uid. */
332 if (after_regscan && GET_CODE (insn) == NOTE
333 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
334 && (temp1 = next_nonnote_insn (insn)) != 0
335 && simplejump_p (temp1))
336 {
337 temp = PREV_INSN (insn);
338 if (duplicate_loop_exit_test (insn))
339 {
340 changed = 1;
341 next = NEXT_INSN (temp);
342 continue;
343 }
344 }
345
346 if (GET_CODE (insn) != JUMP_INSN)
347 continue;
348
349 this_is_simplejump = simplejump_p (insn);
350 this_is_condjump = condjump_p (insn);
351 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
352
353 /* Tension the labels in dispatch tables. */
354
355 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
356 changed |= tension_vector_labels (PATTERN (insn), 0);
357 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
358 changed |= tension_vector_labels (PATTERN (insn), 1);
359
360 /* If a dispatch table always goes to the same place,
361 get rid of it and replace the insn that uses it. */
362
363 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
364 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
365 {
366 int i;
367 rtx pat = PATTERN (insn);
368 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
369 int len = XVECLEN (pat, diff_vec_p);
370 rtx dispatch = prev_real_insn (insn);
371
372 for (i = 0; i < len; i++)
373 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
374 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
375 break;
376 if (i == len
377 && dispatch != 0
378 && GET_CODE (dispatch) == JUMP_INSN
379 && JUMP_LABEL (dispatch) != 0
380 /* Don't mess with a casesi insn. */
381 && !(GET_CODE (PATTERN (dispatch)) == SET
382 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
383 == IF_THEN_ELSE))
384 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
385 {
386 redirect_tablejump (dispatch,
387 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
388 changed = 1;
389 }
390 }
391
392 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
393
394 /* If a jump references the end of the function, try to turn
395 it into a RETURN insn, possibly a conditional one. */
396 if (JUMP_LABEL (insn)
397 && (next_active_insn (JUMP_LABEL (insn)) == 0
398 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
399 == RETURN))
400 changed |= redirect_jump (insn, NULL_RTX);
401
402 /* Detect jump to following insn. */
403 if (reallabelprev == insn && condjump_p (insn))
404 {
405 next = next_real_insn (JUMP_LABEL (insn));
406 delete_jump (insn);
407 changed = 1;
408 continue;
409 }
410
411 /* If we have an unconditional jump preceded by a USE, try to put
412 the USE before the target and jump there. This simplifies many
413 of the optimizations below since we don't have to worry about
414 dealing with these USE insns. We only do this if the label
415 being branch to already has the identical USE or if code
416 never falls through to that label. */
417
418 if (this_is_simplejump
419 && (temp = prev_nonnote_insn (insn)) != 0
420 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
421 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
422 && (GET_CODE (temp1) == BARRIER
423 || (GET_CODE (temp1) == INSN
424 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
425 /* Don't do this optimization if we have a loop containing only
426 the USE instruction, and the loop start label has a usage
427 count of 1. This is because we will redo this optimization
428 everytime through the outer loop, and jump opt will never
429 exit. */
430 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
431 && temp2 == JUMP_LABEL (insn)
432 && LABEL_NUSES (temp2) == 1))
433 {
434 if (GET_CODE (temp1) == BARRIER)
435 {
436 emit_insn_after (PATTERN (temp), temp1);
437 temp1 = NEXT_INSN (temp1);
438 }
439
440 delete_insn (temp);
441 redirect_jump (insn, get_label_before (temp1));
442 reallabelprev = prev_real_insn (temp1);
443 changed = 1;
444 }
445
446 /* Simplify if (...) x = a; else x = b; by converting it
447 to x = b; if (...) x = a;
448 if B is sufficiently simple, the test doesn't involve X,
449 and nothing in the test modifies B or X.
450
451 If we have small register classes, we also can't do this if X
452 is a hard register.
453
454 If the "x = b;" insn has any REG_NOTES, we don't do this because
455 of the possibility that we are running after CSE and there is a
456 REG_EQUAL note that is only valid if the branch has already been
457 taken. If we move the insn with the REG_EQUAL note, we may
458 fold the comparison to always be false in a later CSE pass.
459 (We could also delete the REG_NOTES when moving the insn, but it
460 seems simpler to not move it.) An exception is that we can move
461 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
462 value is the same as "b".
463
464 INSN is the branch over the `else' part.
465
466 We set:
467
468 TEMP to the jump insn preceding "x = a;"
469 TEMP1 to X
470 TEMP2 to the insn that sets "x = b;"
471 TEMP3 to the insn that sets "x = a;"
472 TEMP4 to the set of "x = b"; */
473
474 if (this_is_simplejump
475 && (temp3 = prev_active_insn (insn)) != 0
476 && GET_CODE (temp3) == INSN
477 && (temp4 = single_set (temp3)) != 0
478 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
479 && (! SMALL_REGISTER_CLASSES
480 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
481 && (temp2 = next_active_insn (insn)) != 0
482 && GET_CODE (temp2) == INSN
483 && (temp4 = single_set (temp2)) != 0
484 && rtx_equal_p (SET_DEST (temp4), temp1)
485 && ! side_effects_p (SET_SRC (temp4))
486 && ! may_trap_p (SET_SRC (temp4))
487 && (REG_NOTES (temp2) == 0
488 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
489 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
490 && XEXP (REG_NOTES (temp2), 1) == 0
491 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
492 SET_SRC (temp4))))
493 && (temp = prev_active_insn (temp3)) != 0
494 && condjump_p (temp) && ! simplejump_p (temp)
495 /* TEMP must skip over the "x = a;" insn */
496 && prev_real_insn (JUMP_LABEL (temp)) == insn
497 && no_labels_between_p (insn, JUMP_LABEL (temp))
498 /* There must be no other entries to the "x = b;" insn. */
499 && no_labels_between_p (JUMP_LABEL (temp), temp2)
500 /* INSN must either branch to the insn after TEMP2 or the insn
501 after TEMP2 must branch to the same place as INSN. */
502 && (reallabelprev == temp2
503 || ((temp5 = next_active_insn (temp2)) != 0
504 && simplejump_p (temp5)
505 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
506 {
507 /* The test expression, X, may be a complicated test with
508 multiple branches. See if we can find all the uses of
509 the label that TEMP branches to without hitting a CALL_INSN
510 or a jump to somewhere else. */
511 rtx target = JUMP_LABEL (temp);
512 int nuses = LABEL_NUSES (target);
513 rtx p;
514 #ifdef HAVE_cc0
515 rtx q;
516 #endif
517
518 /* Set P to the first jump insn that goes around "x = a;". */
519 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
520 {
521 if (GET_CODE (p) == JUMP_INSN)
522 {
523 if (condjump_p (p) && ! simplejump_p (p)
524 && JUMP_LABEL (p) == target)
525 {
526 nuses--;
527 if (nuses == 0)
528 break;
529 }
530 else
531 break;
532 }
533 else if (GET_CODE (p) == CALL_INSN)
534 break;
535 }
536
537 #ifdef HAVE_cc0
538 /* We cannot insert anything between a set of cc and its use
539 so if P uses cc0, we must back up to the previous insn. */
540 q = prev_nonnote_insn (p);
541 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
542 && sets_cc0_p (PATTERN (q)))
543 p = q;
544 #endif
545
546 if (p)
547 p = PREV_INSN (p);
548
549 /* If we found all the uses and there was no data conflict, we
550 can move the assignment unless we can branch into the middle
551 from somewhere. */
552 if (nuses == 0 && p
553 && no_labels_between_p (p, insn)
554 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
555 && ! reg_set_between_p (temp1, p, temp3)
556 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
557 || ! modified_between_p (SET_SRC (temp4), p, temp2))
558 /* Verify that registers used by the jump are not clobbered
559 by the instruction being moved. */
560 && ! regs_set_between_p (PATTERN (temp),
561 PREV_INSN (temp2),
562 NEXT_INSN (temp2)))
563 {
564 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
565 delete_insn (temp2);
566
567 /* Set NEXT to an insn that we know won't go away. */
568 next = next_active_insn (insn);
569
570 /* Delete the jump around the set. Note that we must do
571 this before we redirect the test jumps so that it won't
572 delete the code immediately following the assignment
573 we moved (which might be a jump). */
574
575 delete_insn (insn);
576
577 /* We either have two consecutive labels or a jump to
578 a jump, so adjust all the JUMP_INSNs to branch to where
579 INSN branches to. */
580 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
581 if (GET_CODE (p) == JUMP_INSN)
582 redirect_jump (p, target);
583
584 changed = 1;
585 continue;
586 }
587 }
588
589 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
590 to x = a; if (...) goto l; x = b;
591 if A is sufficiently simple, the test doesn't involve X,
592 and nothing in the test modifies A or X.
593
594 If we have small register classes, we also can't do this if X
595 is a hard register.
596
597 If the "x = a;" insn has any REG_NOTES, we don't do this because
598 of the possibility that we are running after CSE and there is a
599 REG_EQUAL note that is only valid if the branch has already been
600 taken. If we move the insn with the REG_EQUAL note, we may
601 fold the comparison to always be false in a later CSE pass.
602 (We could also delete the REG_NOTES when moving the insn, but it
603 seems simpler to not move it.) An exception is that we can move
604 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
605 value is the same as "a".
606
607 INSN is the goto.
608
609 We set:
610
611 TEMP to the jump insn preceding "x = a;"
612 TEMP1 to X
613 TEMP2 to the insn that sets "x = b;"
614 TEMP3 to the insn that sets "x = a;"
615 TEMP4 to the set of "x = a"; */
616
617 if (this_is_simplejump
618 && (temp2 = next_active_insn (insn)) != 0
619 && GET_CODE (temp2) == INSN
620 && (temp4 = single_set (temp2)) != 0
621 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
622 && (! SMALL_REGISTER_CLASSES
623 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
624 && (temp3 = prev_active_insn (insn)) != 0
625 && GET_CODE (temp3) == INSN
626 && (temp4 = single_set (temp3)) != 0
627 && rtx_equal_p (SET_DEST (temp4), temp1)
628 && ! side_effects_p (SET_SRC (temp4))
629 && ! may_trap_p (SET_SRC (temp4))
630 && (REG_NOTES (temp3) == 0
631 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
632 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
633 && XEXP (REG_NOTES (temp3), 1) == 0
634 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
635 SET_SRC (temp4))))
636 && (temp = prev_active_insn (temp3)) != 0
637 && condjump_p (temp) && ! simplejump_p (temp)
638 /* TEMP must skip over the "x = a;" insn */
639 && prev_real_insn (JUMP_LABEL (temp)) == insn
640 && no_labels_between_p (temp, insn))
641 {
642 rtx prev_label = JUMP_LABEL (temp);
643 rtx insert_after = prev_nonnote_insn (temp);
644
645 #ifdef HAVE_cc0
646 /* We cannot insert anything between a set of cc and its use. */
647 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
648 && sets_cc0_p (PATTERN (insert_after)))
649 insert_after = prev_nonnote_insn (insert_after);
650 #endif
651 ++LABEL_NUSES (prev_label);
652
653 if (insert_after
654 && no_labels_between_p (insert_after, temp)
655 && ! reg_referenced_between_p (temp1, insert_after, temp3)
656 && ! reg_referenced_between_p (temp1, temp3,
657 NEXT_INSN (temp2))
658 && ! reg_set_between_p (temp1, insert_after, temp)
659 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
660 /* Verify that registers used by the jump are not clobbered
661 by the instruction being moved. */
662 && ! regs_set_between_p (PATTERN (temp),
663 PREV_INSN (temp3),
664 NEXT_INSN (temp3))
665 && invert_jump (temp, JUMP_LABEL (insn)))
666 {
667 emit_insn_after_with_line_notes (PATTERN (temp3),
668 insert_after, temp3);
669 delete_insn (temp3);
670 delete_insn (insn);
671 /* Set NEXT to an insn that we know won't go away. */
672 next = temp2;
673 changed = 1;
674 }
675 if (prev_label && --LABEL_NUSES (prev_label) == 0)
676 delete_insn (prev_label);
677 if (changed)
678 continue;
679 }
680
681 #ifndef HAVE_cc0
682 /* If we have if (...) x = exp; and branches are expensive,
683 EXP is a single insn, does not have any side effects, cannot
684 trap, and is not too costly, convert this to
685 t = exp; if (...) x = t;
686
687 Don't do this when we have CC0 because it is unlikely to help
688 and we'd need to worry about where to place the new insn and
689 the potential for conflicts. We also can't do this when we have
690 notes on the insn for the same reason as above.
691
692 We set:
693
694 TEMP to the "x = exp;" insn.
695 TEMP1 to the single set in the "x = exp;" insn.
696 TEMP2 to "x". */
697
698 if (! reload_completed
699 && this_is_condjump && ! this_is_simplejump
700 && BRANCH_COST >= 3
701 && (temp = next_nonnote_insn (insn)) != 0
702 && GET_CODE (temp) == INSN
703 && REG_NOTES (temp) == 0
704 && (reallabelprev == temp
705 || ((temp2 = next_active_insn (temp)) != 0
706 && simplejump_p (temp2)
707 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
708 && (temp1 = single_set (temp)) != 0
709 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
710 && (! SMALL_REGISTER_CLASSES
711 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
712 && GET_CODE (SET_SRC (temp1)) != REG
713 && GET_CODE (SET_SRC (temp1)) != SUBREG
714 && GET_CODE (SET_SRC (temp1)) != CONST_INT
715 && ! side_effects_p (SET_SRC (temp1))
716 && ! may_trap_p (SET_SRC (temp1))
717 && rtx_cost (SET_SRC (temp1), SET) < 10)
718 {
719 rtx new = gen_reg_rtx (GET_MODE (temp2));
720
721 if ((temp3 = find_insert_position (insn, temp))
722 && validate_change (temp, &SET_DEST (temp1), new, 0))
723 {
724 next = emit_insn_after (gen_move_insn (temp2, new), insn);
725 emit_insn_after_with_line_notes (PATTERN (temp),
726 PREV_INSN (temp3), temp);
727 delete_insn (temp);
728 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
729
730 if (after_regscan)
731 {
732 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
733 old_max_reg = max_reg_num ();
734 }
735 }
736 }
737
738 /* Similarly, if it takes two insns to compute EXP but they
739 have the same destination. Here TEMP3 will be the second
740 insn and TEMP4 the SET from that insn. */
741
742 if (! reload_completed
743 && this_is_condjump && ! this_is_simplejump
744 && BRANCH_COST >= 4
745 && (temp = next_nonnote_insn (insn)) != 0
746 && GET_CODE (temp) == INSN
747 && REG_NOTES (temp) == 0
748 && (temp3 = next_nonnote_insn (temp)) != 0
749 && GET_CODE (temp3) == INSN
750 && REG_NOTES (temp3) == 0
751 && (reallabelprev == temp3
752 || ((temp2 = next_active_insn (temp3)) != 0
753 && simplejump_p (temp2)
754 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
755 && (temp1 = single_set (temp)) != 0
756 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
757 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
758 && (! SMALL_REGISTER_CLASSES
759 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
760 && ! side_effects_p (SET_SRC (temp1))
761 && ! may_trap_p (SET_SRC (temp1))
762 && rtx_cost (SET_SRC (temp1), SET) < 10
763 && (temp4 = single_set (temp3)) != 0
764 && rtx_equal_p (SET_DEST (temp4), temp2)
765 && ! side_effects_p (SET_SRC (temp4))
766 && ! may_trap_p (SET_SRC (temp4))
767 && rtx_cost (SET_SRC (temp4), SET) < 10)
768 {
769 rtx new = gen_reg_rtx (GET_MODE (temp2));
770
771 if ((temp5 = find_insert_position (insn, temp))
772 && (temp6 = find_insert_position (insn, temp3))
773 && validate_change (temp, &SET_DEST (temp1), new, 0))
774 {
775 /* Use the earliest of temp5 and temp6. */
776 if (temp5 != insn)
777 temp6 = temp5;
778 next = emit_insn_after (gen_move_insn (temp2, new), insn);
779 emit_insn_after_with_line_notes (PATTERN (temp),
780 PREV_INSN (temp6), temp);
781 emit_insn_after_with_line_notes
782 (replace_rtx (PATTERN (temp3), temp2, new),
783 PREV_INSN (temp6), temp3);
784 delete_insn (temp);
785 delete_insn (temp3);
786 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
787
788 if (after_regscan)
789 {
790 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
791 old_max_reg = max_reg_num ();
792 }
793 }
794 }
795
796 /* Finally, handle the case where two insns are used to
797 compute EXP but a temporary register is used. Here we must
798 ensure that the temporary register is not used anywhere else. */
799
800 if (! reload_completed
801 && after_regscan
802 && this_is_condjump && ! this_is_simplejump
803 && BRANCH_COST >= 4
804 && (temp = next_nonnote_insn (insn)) != 0
805 && GET_CODE (temp) == INSN
806 && REG_NOTES (temp) == 0
807 && (temp3 = next_nonnote_insn (temp)) != 0
808 && GET_CODE (temp3) == INSN
809 && REG_NOTES (temp3) == 0
810 && (reallabelprev == temp3
811 || ((temp2 = next_active_insn (temp3)) != 0
812 && simplejump_p (temp2)
813 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
814 && (temp1 = single_set (temp)) != 0
815 && (temp5 = SET_DEST (temp1),
816 (GET_CODE (temp5) == REG
817 || (GET_CODE (temp5) == SUBREG
818 && (temp5 = SUBREG_REG (temp5),
819 GET_CODE (temp5) == REG))))
820 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
821 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
822 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
823 && ! side_effects_p (SET_SRC (temp1))
824 && ! may_trap_p (SET_SRC (temp1))
825 && rtx_cost (SET_SRC (temp1), SET) < 10
826 && (temp4 = single_set (temp3)) != 0
827 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
828 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
829 && (! SMALL_REGISTER_CLASSES
830 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
831 && rtx_equal_p (SET_DEST (temp4), temp2)
832 && ! side_effects_p (SET_SRC (temp4))
833 && ! may_trap_p (SET_SRC (temp4))
834 && rtx_cost (SET_SRC (temp4), SET) < 10)
835 {
836 rtx new = gen_reg_rtx (GET_MODE (temp2));
837
838 if ((temp5 = find_insert_position (insn, temp))
839 && (temp6 = find_insert_position (insn, temp3))
840 && validate_change (temp3, &SET_DEST (temp4), new, 0))
841 {
842 /* Use the earliest of temp5 and temp6. */
843 if (temp5 != insn)
844 temp6 = temp5;
845 next = emit_insn_after (gen_move_insn (temp2, new), insn);
846 emit_insn_after_with_line_notes (PATTERN (temp),
847 PREV_INSN (temp6), temp);
848 emit_insn_after_with_line_notes (PATTERN (temp3),
849 PREV_INSN (temp6), temp3);
850 delete_insn (temp);
851 delete_insn (temp3);
852 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
853
854 if (after_regscan)
855 {
856 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
857 old_max_reg = max_reg_num ();
858 }
859 }
860 }
861 #endif /* HAVE_cc0 */
862
863 /* Try to use a conditional move (if the target has them), or a
864 store-flag insn. The general case is:
865
866 1) x = a; if (...) x = b; and
867 2) if (...) x = b;
868
869 If the jump would be faster, the machine should not have defined
870 the movcc or scc insns!. These cases are often made by the
871 previous optimization.
872
873 The second case is treated as x = x; if (...) x = b;.
874
875 INSN here is the jump around the store. We set:
876
877 TEMP to the "x op= b;" insn.
878 TEMP1 to X.
879 TEMP2 to B.
880 TEMP3 to A (X in the second case).
881 TEMP4 to the condition being tested.
882 TEMP5 to the earliest insn used to find the condition.
883 TEMP6 to the SET of TEMP. */
884
885 if (/* We can't do this after reload has completed. */
886 ! reload_completed
887 && this_is_condjump && ! this_is_simplejump
888 /* Set TEMP to the "x = b;" insn. */
889 && (temp = next_nonnote_insn (insn)) != 0
890 && GET_CODE (temp) == INSN
891 && (temp6 = single_set (temp)) != NULL_RTX
892 && GET_CODE (temp1 = SET_DEST (temp6)) == REG
893 && (! SMALL_REGISTER_CLASSES
894 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
895 && ! side_effects_p (temp2 = SET_SRC (temp6))
896 && ! may_trap_p (temp2)
897 /* Allow either form, but prefer the former if both apply.
898 There is no point in using the old value of TEMP1 if
899 it is a register, since cse will alias them. It can
900 lose if the old value were a hard register since CSE
901 won't replace hard registers. Avoid using TEMP3 if
902 small register classes and it is a hard register. */
903 && (((temp3 = reg_set_last (temp1, insn)) != 0
904 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
905 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
906 /* Make the latter case look like x = x; if (...) x = b; */
907 || (temp3 = temp1, 1))
908 /* INSN must either branch to the insn after TEMP or the insn
909 after TEMP must branch to the same place as INSN. */
910 && (reallabelprev == temp
911 || ((temp4 = next_active_insn (temp)) != 0
912 && simplejump_p (temp4)
913 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
914 && (temp4 = get_condition (insn, &temp5)) != 0
915 /* We must be comparing objects whose modes imply the size.
916 We could handle BLKmode if (1) emit_store_flag could
917 and (2) we could find the size reliably. */
918 && GET_MODE (XEXP (temp4, 0)) != BLKmode
919 /* Even if branches are cheap, the store_flag optimization
920 can win when the operation to be performed can be
921 expressed directly. */
922 #ifdef HAVE_cc0
923 /* If the previous insn sets CC0 and something else, we can't
924 do this since we are going to delete that insn. */
925
926 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
927 && GET_CODE (temp6) == INSN
928 && (sets_cc0_p (PATTERN (temp6)) == -1
929 || (sets_cc0_p (PATTERN (temp6)) == 1
930 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
931 #endif
932 )
933 {
934 #ifdef HAVE_conditional_move
935 /* First try a conditional move. */
936 {
937 enum rtx_code code = GET_CODE (temp4);
938 rtx var = temp1;
939 rtx cond0, cond1, aval, bval;
940 rtx target, new_insn;
941
942 /* Copy the compared variables into cond0 and cond1, so that
943 any side effects performed in or after the old comparison,
944 will not affect our compare which will come later. */
945 /* ??? Is it possible to just use the comparison in the jump
946 insn? After all, we're going to delete it. We'd have
947 to modify emit_conditional_move to take a comparison rtx
948 instead or write a new function. */
949 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
950 /* We want the target to be able to simplify comparisons with
951 zero (and maybe other constants as well), so don't create
952 pseudos for them. There's no need to either. */
953 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
954 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
955 cond1 = XEXP (temp4, 1);
956 else
957 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
958
959 /* Careful about copying these values -- an IOR or what may
960 need to do other things, like clobber flags. */
961 /* ??? Assume for the moment that AVAL is ok. */
962 aval = temp3;
963
964 start_sequence ();
965
966 /* If we're not dealing with a register or the insn is more
967 complex than a simple SET, duplicate the computation and
968 replace the destination with a new temporary. */
969 if (register_operand (temp2, GET_MODE (var))
970 && GET_CODE (PATTERN (temp)) == SET)
971 bval = temp2;
972 else
973 {
974 bval = gen_reg_rtx (GET_MODE (var));
975 new_insn = copy_rtx (temp);
976 temp6 = single_set (new_insn);
977 SET_DEST (temp6) = bval;
978 emit_insn (PATTERN (new_insn));
979 }
980
981 target = emit_conditional_move (var, code,
982 cond0, cond1, VOIDmode,
983 aval, bval, GET_MODE (var),
984 (code == LTU || code == GEU
985 || code == LEU || code == GTU));
986
987 if (target)
988 {
989 rtx seq1, seq2, last;
990 int copy_ok;
991
992 /* Save the conditional move sequence but don't emit it
993 yet. On some machines, like the alpha, it is possible
994 that temp5 == insn, so next generate the sequence that
995 saves the compared values and then emit both
996 sequences ensuring seq1 occurs before seq2. */
997 seq2 = get_insns ();
998 end_sequence ();
999
1000 /* "Now that we can't fail..." Famous last words.
1001 Generate the copy insns that preserve the compared
1002 values. */
1003 start_sequence ();
1004 emit_move_insn (cond0, XEXP (temp4, 0));
1005 if (cond1 != XEXP (temp4, 1))
1006 emit_move_insn (cond1, XEXP (temp4, 1));
1007 seq1 = get_insns ();
1008 end_sequence ();
1009
1010 /* Validate the sequence -- this may be some weird
1011 bit-extract-and-test instruction for which there
1012 exists no complimentary bit-extract insn. */
1013 copy_ok = 1;
1014 for (last = seq1; last ; last = NEXT_INSN (last))
1015 if (recog_memoized (last) < 0)
1016 {
1017 copy_ok = 0;
1018 break;
1019 }
1020
1021 if (copy_ok)
1022 {
1023 emit_insns_before (seq1, temp5);
1024
1025 /* Insert conditional move after insn, to be sure
1026 that the jump and a possible compare won't be
1027 separated. */
1028 last = emit_insns_after (seq2, insn);
1029
1030 /* ??? We can also delete the insn that sets X to A.
1031 Flow will do it too though. */
1032 delete_insn (temp);
1033 next = NEXT_INSN (insn);
1034 delete_jump (insn);
1035
1036 if (after_regscan)
1037 {
1038 reg_scan_update (seq1, NEXT_INSN (last),
1039 old_max_reg);
1040 old_max_reg = max_reg_num ();
1041 }
1042
1043 changed = 1;
1044 continue;
1045 }
1046 }
1047 else
1048 end_sequence ();
1049 }
1050 #endif
1051
1052 /* That didn't work, try a store-flag insn.
1053
1054 We further divide the cases into:
1055
1056 1) x = a; if (...) x = b; and either A or B is zero,
1057 2) if (...) x = 0; and jumps are expensive,
1058 3) x = a; if (...) x = b; and A and B are constants where all
1059 the set bits in A are also set in B and jumps are expensive,
1060 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1061 more expensive, and
1062 5) if (...) x = b; if jumps are even more expensive. */
1063
1064 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1065 && ((GET_CODE (temp3) == CONST_INT)
1066 /* Make the latter case look like
1067 x = x; if (...) x = 0; */
1068 || (temp3 = temp1,
1069 ((BRANCH_COST >= 2
1070 && temp2 == const0_rtx)
1071 || BRANCH_COST >= 3)))
1072 /* If B is zero, OK; if A is zero, can only do (1) if we
1073 can reverse the condition. See if (3) applies possibly
1074 by reversing the condition. Prefer reversing to (4) when
1075 branches are very expensive. */
1076 && (((BRANCH_COST >= 2
1077 || STORE_FLAG_VALUE == -1
1078 || (STORE_FLAG_VALUE == 1
1079 /* Check that the mask is a power of two,
1080 so that it can probably be generated
1081 with a shift. */
1082 && GET_CODE (temp3) == CONST_INT
1083 && exact_log2 (INTVAL (temp3)) >= 0))
1084 && (reversep = 0, temp2 == const0_rtx))
1085 || ((BRANCH_COST >= 2
1086 || STORE_FLAG_VALUE == -1
1087 || (STORE_FLAG_VALUE == 1
1088 && GET_CODE (temp2) == CONST_INT
1089 && exact_log2 (INTVAL (temp2)) >= 0))
1090 && temp3 == const0_rtx
1091 && (reversep = can_reverse_comparison_p (temp4, insn)))
1092 || (BRANCH_COST >= 2
1093 && GET_CODE (temp2) == CONST_INT
1094 && GET_CODE (temp3) == CONST_INT
1095 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1096 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1097 && (reversep = can_reverse_comparison_p (temp4,
1098 insn)))))
1099 || BRANCH_COST >= 3)
1100 )
1101 {
1102 enum rtx_code code = GET_CODE (temp4);
1103 rtx uval, cval, var = temp1;
1104 int normalizep;
1105 rtx target;
1106
1107 /* If necessary, reverse the condition. */
1108 if (reversep)
1109 code = reverse_condition (code), uval = temp2, cval = temp3;
1110 else
1111 uval = temp3, cval = temp2;
1112
1113 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1114 is the constant 1, it is best to just compute the result
1115 directly. If UVAL is constant and STORE_FLAG_VALUE
1116 includes all of its bits, it is best to compute the flag
1117 value unnormalized and `and' it with UVAL. Otherwise,
1118 normalize to -1 and `and' with UVAL. */
1119 normalizep = (cval != const0_rtx ? -1
1120 : (uval == const1_rtx ? 1
1121 : (GET_CODE (uval) == CONST_INT
1122 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1123 ? 0 : -1));
1124
1125 /* We will be putting the store-flag insn immediately in
1126 front of the comparison that was originally being done,
1127 so we know all the variables in TEMP4 will be valid.
1128 However, this might be in front of the assignment of
1129 A to VAR. If it is, it would clobber the store-flag
1130 we will be emitting.
1131
1132 Therefore, emit into a temporary which will be copied to
1133 VAR immediately after TEMP. */
1134
1135 start_sequence ();
1136 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1137 XEXP (temp4, 0), XEXP (temp4, 1),
1138 VOIDmode,
1139 (code == LTU || code == LEU
1140 || code == GEU || code == GTU),
1141 normalizep);
1142 if (target)
1143 {
1144 rtx seq;
1145 rtx before = insn;
1146
1147 seq = get_insns ();
1148 end_sequence ();
1149
1150 /* Put the store-flag insns in front of the first insn
1151 used to compute the condition to ensure that we
1152 use the same values of them as the current
1153 comparison. However, the remainder of the insns we
1154 generate will be placed directly in front of the
1155 jump insn, in case any of the pseudos we use
1156 are modified earlier. */
1157
1158 emit_insns_before (seq, temp5);
1159
1160 start_sequence ();
1161
1162 /* Both CVAL and UVAL are non-zero. */
1163 if (cval != const0_rtx && uval != const0_rtx)
1164 {
1165 rtx tem1, tem2;
1166
1167 tem1 = expand_and (uval, target, NULL_RTX);
1168 if (GET_CODE (cval) == CONST_INT
1169 && GET_CODE (uval) == CONST_INT
1170 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1171 tem2 = cval;
1172 else
1173 {
1174 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1175 target, NULL_RTX, 0);
1176 tem2 = expand_and (cval, tem2,
1177 (GET_CODE (tem2) == REG
1178 ? tem2 : 0));
1179 }
1180
1181 /* If we usually make new pseudos, do so here. This
1182 turns out to help machines that have conditional
1183 move insns. */
1184 /* ??? Conditional moves have already been handled.
1185 This may be obsolete. */
1186
1187 if (flag_expensive_optimizations)
1188 target = 0;
1189
1190 target = expand_binop (GET_MODE (var), ior_optab,
1191 tem1, tem2, target,
1192 1, OPTAB_WIDEN);
1193 }
1194 else if (normalizep != 1)
1195 {
1196 /* We know that either CVAL or UVAL is zero. If
1197 UVAL is zero, negate TARGET and `and' with CVAL.
1198 Otherwise, `and' with UVAL. */
1199 if (uval == const0_rtx)
1200 {
1201 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1202 target, NULL_RTX, 0);
1203 uval = cval;
1204 }
1205
1206 target = expand_and (uval, target,
1207 (GET_CODE (target) == REG
1208 && ! preserve_subexpressions_p ()
1209 ? target : NULL_RTX));
1210 }
1211
1212 emit_move_insn (var, target);
1213 seq = get_insns ();
1214 end_sequence ();
1215 #ifdef HAVE_cc0
1216 /* If INSN uses CC0, we must not separate it from the
1217 insn that sets cc0. */
1218 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1219 before = prev_nonnote_insn (before);
1220 #endif
1221 emit_insns_before (seq, before);
1222
1223 delete_insn (temp);
1224 next = NEXT_INSN (insn);
1225 delete_jump (insn);
1226
1227 if (after_regscan)
1228 {
1229 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1230 old_max_reg = max_reg_num ();
1231 }
1232
1233 changed = 1;
1234 continue;
1235 }
1236 else
1237 end_sequence ();
1238 }
1239 }
1240
1241 /* If branches are expensive, convert
1242 if (foo) bar++; to bar += (foo != 0);
1243 and similarly for "bar--;"
1244
1245 INSN is the conditional branch around the arithmetic. We set:
1246
1247 TEMP is the arithmetic insn.
1248 TEMP1 is the SET doing the arithmetic.
1249 TEMP2 is the operand being incremented or decremented.
1250 TEMP3 to the condition being tested.
1251 TEMP4 to the earliest insn used to find the condition. */
1252
1253 if ((BRANCH_COST >= 2
1254 #ifdef HAVE_incscc
1255 || HAVE_incscc
1256 #endif
1257 #ifdef HAVE_decscc
1258 || HAVE_decscc
1259 #endif
1260 )
1261 && ! reload_completed
1262 && this_is_condjump && ! this_is_simplejump
1263 && (temp = next_nonnote_insn (insn)) != 0
1264 && (temp1 = single_set (temp)) != 0
1265 && (temp2 = SET_DEST (temp1),
1266 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1267 && GET_CODE (SET_SRC (temp1)) == PLUS
1268 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1269 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1270 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1271 && ! side_effects_p (temp2)
1272 && ! may_trap_p (temp2)
1273 /* INSN must either branch to the insn after TEMP or the insn
1274 after TEMP must branch to the same place as INSN. */
1275 && (reallabelprev == temp
1276 || ((temp3 = next_active_insn (temp)) != 0
1277 && simplejump_p (temp3)
1278 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1279 && (temp3 = get_condition (insn, &temp4)) != 0
1280 /* We must be comparing objects whose modes imply the size.
1281 We could handle BLKmode if (1) emit_store_flag could
1282 and (2) we could find the size reliably. */
1283 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1284 && can_reverse_comparison_p (temp3, insn))
1285 {
1286 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1287 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1288
1289 start_sequence ();
1290
1291 /* It must be the case that TEMP2 is not modified in the range
1292 [TEMP4, INSN). The one exception we make is if the insn
1293 before INSN sets TEMP2 to something which is also unchanged
1294 in that range. In that case, we can move the initialization
1295 into our sequence. */
1296
1297 if ((temp5 = prev_active_insn (insn)) != 0
1298 && no_labels_between_p (temp5, insn)
1299 && GET_CODE (temp5) == INSN
1300 && (temp6 = single_set (temp5)) != 0
1301 && rtx_equal_p (temp2, SET_DEST (temp6))
1302 && (CONSTANT_P (SET_SRC (temp6))
1303 || GET_CODE (SET_SRC (temp6)) == REG
1304 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1305 {
1306 emit_insn (PATTERN (temp5));
1307 init_insn = temp5;
1308 init = SET_SRC (temp6);
1309 }
1310
1311 if (CONSTANT_P (init)
1312 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1313 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1314 XEXP (temp3, 0), XEXP (temp3, 1),
1315 VOIDmode,
1316 (code == LTU || code == LEU
1317 || code == GTU || code == GEU), 1);
1318
1319 /* If we can do the store-flag, do the addition or
1320 subtraction. */
1321
1322 if (target)
1323 target = expand_binop (GET_MODE (temp2),
1324 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1325 ? add_optab : sub_optab),
1326 temp2, target, temp2, 0, OPTAB_WIDEN);
1327
1328 if (target != 0)
1329 {
1330 /* Put the result back in temp2 in case it isn't already.
1331 Then replace the jump, possible a CC0-setting insn in
1332 front of the jump, and TEMP, with the sequence we have
1333 made. */
1334
1335 if (target != temp2)
1336 emit_move_insn (temp2, target);
1337
1338 seq = get_insns ();
1339 end_sequence ();
1340
1341 emit_insns_before (seq, temp4);
1342 delete_insn (temp);
1343
1344 if (init_insn)
1345 delete_insn (init_insn);
1346
1347 next = NEXT_INSN (insn);
1348 #ifdef HAVE_cc0
1349 delete_insn (prev_nonnote_insn (insn));
1350 #endif
1351 delete_insn (insn);
1352
1353 if (after_regscan)
1354 {
1355 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1356 old_max_reg = max_reg_num ();
1357 }
1358
1359 changed = 1;
1360 continue;
1361 }
1362 else
1363 end_sequence ();
1364 }
1365
1366 /* Simplify if (...) x = 1; else {...} if (x) ...
1367 We recognize this case scanning backwards as well.
1368
1369 TEMP is the assignment to x;
1370 TEMP1 is the label at the head of the second if. */
1371 /* ?? This should call get_condition to find the values being
1372 compared, instead of looking for a COMPARE insn when HAVE_cc0
1373 is not defined. This would allow it to work on the m88k. */
1374 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1375 is not defined and the condition is tested by a separate compare
1376 insn. This is because the code below assumes that the result
1377 of the compare dies in the following branch.
1378
1379 Not only that, but there might be other insns between the
1380 compare and branch whose results are live. Those insns need
1381 to be executed.
1382
1383 A way to fix this is to move the insns at JUMP_LABEL (insn)
1384 to before INSN. If we are running before flow, they will
1385 be deleted if they aren't needed. But this doesn't work
1386 well after flow.
1387
1388 This is really a special-case of jump threading, anyway. The
1389 right thing to do is to replace this and jump threading with
1390 much simpler code in cse.
1391
1392 This code has been turned off in the non-cc0 case in the
1393 meantime. */
1394
1395 #ifdef HAVE_cc0
1396 else if (this_is_simplejump
1397 /* Safe to skip USE and CLOBBER insns here
1398 since they will not be deleted. */
1399 && (temp = prev_active_insn (insn))
1400 && no_labels_between_p (temp, insn)
1401 && GET_CODE (temp) == INSN
1402 && GET_CODE (PATTERN (temp)) == SET
1403 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1404 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1405 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1406 /* If we find that the next value tested is `x'
1407 (TEMP1 is the insn where this happens), win. */
1408 && GET_CODE (temp1) == INSN
1409 && GET_CODE (PATTERN (temp1)) == SET
1410 #ifdef HAVE_cc0
1411 /* Does temp1 `tst' the value of x? */
1412 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1413 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1414 && (temp1 = next_nonnote_insn (temp1))
1415 #else
1416 /* Does temp1 compare the value of x against zero? */
1417 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1418 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1419 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1420 == SET_DEST (PATTERN (temp)))
1421 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1422 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1423 #endif
1424 && condjump_p (temp1))
1425 {
1426 /* Get the if_then_else from the condjump. */
1427 rtx choice = SET_SRC (PATTERN (temp1));
1428 if (GET_CODE (choice) == IF_THEN_ELSE)
1429 {
1430 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1431 rtx val = SET_SRC (PATTERN (temp));
1432 rtx cond
1433 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1434 val, const0_rtx);
1435 rtx ultimate;
1436
1437 if (cond == const_true_rtx)
1438 ultimate = XEXP (choice, 1);
1439 else if (cond == const0_rtx)
1440 ultimate = XEXP (choice, 2);
1441 else
1442 ultimate = 0;
1443
1444 if (ultimate == pc_rtx)
1445 ultimate = get_label_after (temp1);
1446 else if (ultimate && GET_CODE (ultimate) != RETURN)
1447 ultimate = XEXP (ultimate, 0);
1448
1449 if (ultimate && JUMP_LABEL(insn) != ultimate)
1450 changed |= redirect_jump (insn, ultimate);
1451 }
1452 }
1453 #endif
1454
1455 #if 0
1456 /* @@ This needs a bit of work before it will be right.
1457
1458 Any type of comparison can be accepted for the first and
1459 second compare. When rewriting the first jump, we must
1460 compute the what conditions can reach label3, and use the
1461 appropriate code. We can not simply reverse/swap the code
1462 of the first jump. In some cases, the second jump must be
1463 rewritten also.
1464
1465 For example,
1466 < == converts to > ==
1467 < != converts to == >
1468 etc.
1469
1470 If the code is written to only accept an '==' test for the second
1471 compare, then all that needs to be done is to swap the condition
1472 of the first branch.
1473
1474 It is questionable whether we want this optimization anyways,
1475 since if the user wrote code like this because he/she knew that
1476 the jump to label1 is taken most of the time, then rewriting
1477 this gives slower code. */
1478 /* @@ This should call get_condition to find the values being
1479 compared, instead of looking for a COMPARE insn when HAVE_cc0
1480 is not defined. This would allow it to work on the m88k. */
1481 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1482 is not defined and the condition is tested by a separate compare
1483 insn. This is because the code below assumes that the result
1484 of the compare dies in the following branch. */
1485
1486 /* Simplify test a ~= b
1487 condjump label1;
1488 test a == b
1489 condjump label2;
1490 jump label3;
1491 label1:
1492
1493 rewriting as
1494 test a ~~= b
1495 condjump label3
1496 test a == b
1497 condjump label2
1498 label1:
1499
1500 where ~= is an inequality, e.g. >, and ~~= is the swapped
1501 inequality, e.g. <.
1502
1503 We recognize this case scanning backwards.
1504
1505 TEMP is the conditional jump to `label2';
1506 TEMP1 is the test for `a == b';
1507 TEMP2 is the conditional jump to `label1';
1508 TEMP3 is the test for `a ~= b'. */
1509 else if (this_is_simplejump
1510 && (temp = prev_active_insn (insn))
1511 && no_labels_between_p (temp, insn)
1512 && condjump_p (temp)
1513 && (temp1 = prev_active_insn (temp))
1514 && no_labels_between_p (temp1, temp)
1515 && GET_CODE (temp1) == INSN
1516 && GET_CODE (PATTERN (temp1)) == SET
1517 #ifdef HAVE_cc0
1518 && sets_cc0_p (PATTERN (temp1)) == 1
1519 #else
1520 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1521 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1522 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1523 #endif
1524 && (temp2 = prev_active_insn (temp1))
1525 && no_labels_between_p (temp2, temp1)
1526 && condjump_p (temp2)
1527 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1528 && (temp3 = prev_active_insn (temp2))
1529 && no_labels_between_p (temp3, temp2)
1530 && GET_CODE (PATTERN (temp3)) == SET
1531 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1532 SET_DEST (PATTERN (temp1)))
1533 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1534 SET_SRC (PATTERN (temp3)))
1535 && ! inequality_comparisons_p (PATTERN (temp))
1536 && inequality_comparisons_p (PATTERN (temp2)))
1537 {
1538 rtx fallthrough_label = JUMP_LABEL (temp2);
1539
1540 ++LABEL_NUSES (fallthrough_label);
1541 if (swap_jump (temp2, JUMP_LABEL (insn)))
1542 {
1543 delete_insn (insn);
1544 changed = 1;
1545 }
1546
1547 if (--LABEL_NUSES (fallthrough_label) == 0)
1548 delete_insn (fallthrough_label);
1549 }
1550 #endif
1551 /* Simplify if (...) {... x = 1;} if (x) ...
1552
1553 We recognize this case backwards.
1554
1555 TEMP is the test of `x';
1556 TEMP1 is the assignment to `x' at the end of the
1557 previous statement. */
1558 /* @@ This should call get_condition to find the values being
1559 compared, instead of looking for a COMPARE insn when HAVE_cc0
1560 is not defined. This would allow it to work on the m88k. */
1561 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1562 is not defined and the condition is tested by a separate compare
1563 insn. This is because the code below assumes that the result
1564 of the compare dies in the following branch. */
1565
1566 /* ??? This has to be turned off. The problem is that the
1567 unconditional jump might indirectly end up branching to the
1568 label between TEMP1 and TEMP. We can't detect this, in general,
1569 since it may become a jump to there after further optimizations.
1570 If that jump is done, it will be deleted, so we will retry
1571 this optimization in the next pass, thus an infinite loop.
1572
1573 The present code prevents this by putting the jump after the
1574 label, but this is not logically correct. */
1575 #if 0
1576 else if (this_is_condjump
1577 /* Safe to skip USE and CLOBBER insns here
1578 since they will not be deleted. */
1579 && (temp = prev_active_insn (insn))
1580 && no_labels_between_p (temp, insn)
1581 && GET_CODE (temp) == INSN
1582 && GET_CODE (PATTERN (temp)) == SET
1583 #ifdef HAVE_cc0
1584 && sets_cc0_p (PATTERN (temp)) == 1
1585 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1586 #else
1587 /* Temp must be a compare insn, we can not accept a register
1588 to register move here, since it may not be simply a
1589 tst insn. */
1590 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1591 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1592 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1593 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1594 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1595 #endif
1596 /* May skip USE or CLOBBER insns here
1597 for checking for opportunity, since we
1598 take care of them later. */
1599 && (temp1 = prev_active_insn (temp))
1600 && GET_CODE (temp1) == INSN
1601 && GET_CODE (PATTERN (temp1)) == SET
1602 #ifdef HAVE_cc0
1603 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1604 #else
1605 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1606 == SET_DEST (PATTERN (temp1)))
1607 #endif
1608 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1609 /* If this isn't true, cse will do the job. */
1610 && ! no_labels_between_p (temp1, temp))
1611 {
1612 /* Get the if_then_else from the condjump. */
1613 rtx choice = SET_SRC (PATTERN (insn));
1614 if (GET_CODE (choice) == IF_THEN_ELSE
1615 && (GET_CODE (XEXP (choice, 0)) == EQ
1616 || GET_CODE (XEXP (choice, 0)) == NE))
1617 {
1618 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1619 rtx last_insn;
1620 rtx ultimate;
1621 rtx p;
1622
1623 /* Get the place that condjump will jump to
1624 if it is reached from here. */
1625 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1626 == want_nonzero)
1627 ultimate = XEXP (choice, 1);
1628 else
1629 ultimate = XEXP (choice, 2);
1630 /* Get it as a CODE_LABEL. */
1631 if (ultimate == pc_rtx)
1632 ultimate = get_label_after (insn);
1633 else
1634 /* Get the label out of the LABEL_REF. */
1635 ultimate = XEXP (ultimate, 0);
1636
1637 /* Insert the jump immediately before TEMP, specifically
1638 after the label that is between TEMP1 and TEMP. */
1639 last_insn = PREV_INSN (temp);
1640
1641 /* If we would be branching to the next insn, the jump
1642 would immediately be deleted and the re-inserted in
1643 a subsequent pass over the code. So don't do anything
1644 in that case. */
1645 if (next_active_insn (last_insn)
1646 != next_active_insn (ultimate))
1647 {
1648 emit_barrier_after (last_insn);
1649 p = emit_jump_insn_after (gen_jump (ultimate),
1650 last_insn);
1651 JUMP_LABEL (p) = ultimate;
1652 ++LABEL_NUSES (ultimate);
1653 if (INSN_UID (ultimate) < max_jump_chain
1654 && INSN_CODE (p) < max_jump_chain)
1655 {
1656 jump_chain[INSN_UID (p)]
1657 = jump_chain[INSN_UID (ultimate)];
1658 jump_chain[INSN_UID (ultimate)] = p;
1659 }
1660 changed = 1;
1661 continue;
1662 }
1663 }
1664 }
1665 #endif
1666 /* Detect a conditional jump going to the same place
1667 as an immediately following unconditional jump. */
1668 else if (this_is_condjump
1669 && (temp = next_active_insn (insn)) != 0
1670 && simplejump_p (temp)
1671 && (next_active_insn (JUMP_LABEL (insn))
1672 == next_active_insn (JUMP_LABEL (temp))))
1673 {
1674 rtx tem = temp;
1675
1676 /* ??? Optional. Disables some optimizations, but makes
1677 gcov output more accurate with -O. */
1678 if (flag_test_coverage && !reload_completed)
1679 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1680 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1681 break;
1682
1683 if (tem == temp)
1684 {
1685 delete_jump (insn);
1686 changed = 1;
1687 continue;
1688 }
1689 }
1690 #ifdef HAVE_trap
1691 /* Detect a conditional jump jumping over an unconditional trap. */
1692 else if (HAVE_trap
1693 && this_is_condjump && ! this_is_simplejump
1694 && reallabelprev != 0
1695 && GET_CODE (reallabelprev) == INSN
1696 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1697 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1698 && prev_active_insn (reallabelprev) == insn
1699 && no_labels_between_p (insn, reallabelprev)
1700 && (temp2 = get_condition (insn, &temp4))
1701 && can_reverse_comparison_p (temp2, insn))
1702 {
1703 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1704 XEXP (temp2, 0), XEXP (temp2, 1),
1705 TRAP_CODE (PATTERN (reallabelprev)));
1706
1707 if (new)
1708 {
1709 emit_insn_before (new, temp4);
1710 delete_insn (reallabelprev);
1711 delete_jump (insn);
1712 changed = 1;
1713 continue;
1714 }
1715 }
1716 /* Detect a jump jumping to an unconditional trap. */
1717 else if (HAVE_trap && this_is_condjump
1718 && (temp = next_active_insn (JUMP_LABEL (insn)))
1719 && GET_CODE (temp) == INSN
1720 && GET_CODE (PATTERN (temp)) == TRAP_IF
1721 && (this_is_simplejump
1722 || (temp2 = get_condition (insn, &temp4))))
1723 {
1724 rtx tc = TRAP_CONDITION (PATTERN (temp));
1725
1726 if (tc == const_true_rtx
1727 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1728 {
1729 rtx new;
1730 /* Replace an unconditional jump to a trap with a trap. */
1731 if (this_is_simplejump)
1732 {
1733 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1734 delete_jump (insn);
1735 changed = 1;
1736 continue;
1737 }
1738 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1739 XEXP (temp2, 1),
1740 TRAP_CODE (PATTERN (temp)));
1741 if (new)
1742 {
1743 emit_insn_before (new, temp4);
1744 delete_jump (insn);
1745 changed = 1;
1746 continue;
1747 }
1748 }
1749 /* If the trap condition and jump condition are mutually
1750 exclusive, redirect the jump to the following insn. */
1751 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1752 && ! this_is_simplejump
1753 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1754 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1755 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1756 && redirect_jump (insn, get_label_after (temp)))
1757 {
1758 changed = 1;
1759 continue;
1760 }
1761 }
1762 #endif
1763
1764 /* Detect a conditional jump jumping over an unconditional jump. */
1765
1766 else if ((this_is_condjump || this_is_condjump_in_parallel)
1767 && ! this_is_simplejump
1768 && reallabelprev != 0
1769 && GET_CODE (reallabelprev) == JUMP_INSN
1770 && prev_active_insn (reallabelprev) == insn
1771 && no_labels_between_p (insn, reallabelprev)
1772 && simplejump_p (reallabelprev))
1773 {
1774 /* When we invert the unconditional jump, we will be
1775 decrementing the usage count of its old label.
1776 Make sure that we don't delete it now because that
1777 might cause the following code to be deleted. */
1778 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1779 rtx prev_label = JUMP_LABEL (insn);
1780
1781 if (prev_label)
1782 ++LABEL_NUSES (prev_label);
1783
1784 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1785 {
1786 /* It is very likely that if there are USE insns before
1787 this jump, they hold REG_DEAD notes. These REG_DEAD
1788 notes are no longer valid due to this optimization,
1789 and will cause the life-analysis that following passes
1790 (notably delayed-branch scheduling) to think that
1791 these registers are dead when they are not.
1792
1793 To prevent this trouble, we just remove the USE insns
1794 from the insn chain. */
1795
1796 while (prev_uses && GET_CODE (prev_uses) == INSN
1797 && GET_CODE (PATTERN (prev_uses)) == USE)
1798 {
1799 rtx useless = prev_uses;
1800 prev_uses = prev_nonnote_insn (prev_uses);
1801 delete_insn (useless);
1802 }
1803
1804 delete_insn (reallabelprev);
1805 next = insn;
1806 changed = 1;
1807 }
1808
1809 /* We can now safely delete the label if it is unreferenced
1810 since the delete_insn above has deleted the BARRIER. */
1811 if (prev_label && --LABEL_NUSES (prev_label) == 0)
1812 delete_insn (prev_label);
1813 continue;
1814 }
1815 else
1816 {
1817 /* Detect a jump to a jump. */
1818
1819 nlabel = follow_jumps (JUMP_LABEL (insn));
1820 if (nlabel != JUMP_LABEL (insn)
1821 && redirect_jump (insn, nlabel))
1822 {
1823 changed = 1;
1824 next = insn;
1825 }
1826
1827 /* Look for if (foo) bar; else break; */
1828 /* The insns look like this:
1829 insn = condjump label1;
1830 ...range1 (some insns)...
1831 jump label2;
1832 label1:
1833 ...range2 (some insns)...
1834 jump somewhere unconditionally
1835 label2: */
1836 {
1837 rtx label1 = next_label (insn);
1838 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1839 /* Don't do this optimization on the first round, so that
1840 jump-around-a-jump gets simplified before we ask here
1841 whether a jump is unconditional.
1842
1843 Also don't do it when we are called after reload since
1844 it will confuse reorg. */
1845 if (! first
1846 && (reload_completed ? ! flag_delayed_branch : 1)
1847 /* Make sure INSN is something we can invert. */
1848 && condjump_p (insn)
1849 && label1 != 0
1850 && JUMP_LABEL (insn) == label1
1851 && LABEL_NUSES (label1) == 1
1852 && GET_CODE (range1end) == JUMP_INSN
1853 && simplejump_p (range1end))
1854 {
1855 rtx label2 = next_label (label1);
1856 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1857 if (range1end != range2end
1858 && JUMP_LABEL (range1end) == label2
1859 && GET_CODE (range2end) == JUMP_INSN
1860 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1861 /* Invert the jump condition, so we
1862 still execute the same insns in each case. */
1863 && invert_jump (insn, label1))
1864 {
1865 rtx range1beg = next_active_insn (insn);
1866 rtx range2beg = next_active_insn (label1);
1867 rtx range1after, range2after;
1868 rtx range1before, range2before;
1869 rtx rangenext;
1870
1871 /* Include in each range any notes before it, to be
1872 sure that we get the line number note if any, even
1873 if there are other notes here. */
1874 while (PREV_INSN (range1beg)
1875 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
1876 range1beg = PREV_INSN (range1beg);
1877
1878 while (PREV_INSN (range2beg)
1879 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
1880 range2beg = PREV_INSN (range2beg);
1881
1882 /* Don't move NOTEs for blocks or loops; shift them
1883 outside the ranges, where they'll stay put. */
1884 range1beg = squeeze_notes (range1beg, range1end);
1885 range2beg = squeeze_notes (range2beg, range2end);
1886
1887 /* Get current surrounds of the 2 ranges. */
1888 range1before = PREV_INSN (range1beg);
1889 range2before = PREV_INSN (range2beg);
1890 range1after = NEXT_INSN (range1end);
1891 range2after = NEXT_INSN (range2end);
1892
1893 /* Splice range2 where range1 was. */
1894 NEXT_INSN (range1before) = range2beg;
1895 PREV_INSN (range2beg) = range1before;
1896 NEXT_INSN (range2end) = range1after;
1897 PREV_INSN (range1after) = range2end;
1898 /* Splice range1 where range2 was. */
1899 NEXT_INSN (range2before) = range1beg;
1900 PREV_INSN (range1beg) = range2before;
1901 NEXT_INSN (range1end) = range2after;
1902 PREV_INSN (range2after) = range1end;
1903
1904 /* Check for a loop end note between the end of
1905 range2, and the next code label. If there is one,
1906 then what we have really seen is
1907 if (foo) break; end_of_loop;
1908 and moved the break sequence outside the loop.
1909 We must move the LOOP_END note to where the
1910 loop really ends now, or we will confuse loop
1911 optimization. Stop if we find a LOOP_BEG note
1912 first, since we don't want to move the LOOP_END
1913 note in that case. */
1914 for (;range2after != label2; range2after = rangenext)
1915 {
1916 rangenext = NEXT_INSN (range2after);
1917 if (GET_CODE (range2after) == NOTE)
1918 {
1919 if (NOTE_LINE_NUMBER (range2after)
1920 == NOTE_INSN_LOOP_END)
1921 {
1922 NEXT_INSN (PREV_INSN (range2after))
1923 = rangenext;
1924 PREV_INSN (rangenext)
1925 = PREV_INSN (range2after);
1926 PREV_INSN (range2after)
1927 = PREV_INSN (range1beg);
1928 NEXT_INSN (range2after) = range1beg;
1929 NEXT_INSN (PREV_INSN (range1beg))
1930 = range2after;
1931 PREV_INSN (range1beg) = range2after;
1932 }
1933 else if (NOTE_LINE_NUMBER (range2after)
1934 == NOTE_INSN_LOOP_BEG)
1935 break;
1936 }
1937 }
1938 changed = 1;
1939 continue;
1940 }
1941 }
1942 }
1943
1944 /* Now that the jump has been tensioned,
1945 try cross jumping: check for identical code
1946 before the jump and before its target label. */
1947
1948 /* First, cross jumping of conditional jumps: */
1949
1950 if (cross_jump && condjump_p (insn))
1951 {
1952 rtx newjpos, newlpos;
1953 rtx x = prev_real_insn (JUMP_LABEL (insn));
1954
1955 /* A conditional jump may be crossjumped
1956 only if the place it jumps to follows
1957 an opposing jump that comes back here. */
1958
1959 if (x != 0 && ! jump_back_p (x, insn))
1960 /* We have no opposing jump;
1961 cannot cross jump this insn. */
1962 x = 0;
1963
1964 newjpos = 0;
1965 /* TARGET is nonzero if it is ok to cross jump
1966 to code before TARGET. If so, see if matches. */
1967 if (x != 0)
1968 find_cross_jump (insn, x, 2,
1969 &newjpos, &newlpos);
1970
1971 if (newjpos != 0)
1972 {
1973 do_cross_jump (insn, newjpos, newlpos);
1974 /* Make the old conditional jump
1975 into an unconditional one. */
1976 SET_SRC (PATTERN (insn))
1977 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
1978 INSN_CODE (insn) = -1;
1979 emit_barrier_after (insn);
1980 /* Add to jump_chain unless this is a new label
1981 whose UID is too large. */
1982 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1983 {
1984 jump_chain[INSN_UID (insn)]
1985 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1986 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1987 }
1988 changed = 1;
1989 next = insn;
1990 }
1991 }
1992
1993 /* Cross jumping of unconditional jumps:
1994 a few differences. */
1995
1996 if (cross_jump && simplejump_p (insn))
1997 {
1998 rtx newjpos, newlpos;
1999 rtx target;
2000
2001 newjpos = 0;
2002
2003 /* TARGET is nonzero if it is ok to cross jump
2004 to code before TARGET. If so, see if matches. */
2005 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2006 &newjpos, &newlpos);
2007
2008 /* If cannot cross jump to code before the label,
2009 see if we can cross jump to another jump to
2010 the same label. */
2011 /* Try each other jump to this label. */
2012 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2013 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2014 target != 0 && newjpos == 0;
2015 target = jump_chain[INSN_UID (target)])
2016 if (target != insn
2017 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2018 /* Ignore TARGET if it's deleted. */
2019 && ! INSN_DELETED_P (target))
2020 find_cross_jump (insn, target, 2,
2021 &newjpos, &newlpos);
2022
2023 if (newjpos != 0)
2024 {
2025 do_cross_jump (insn, newjpos, newlpos);
2026 changed = 1;
2027 next = insn;
2028 }
2029 }
2030
2031 /* This code was dead in the previous jump.c! */
2032 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2033 {
2034 /* Return insns all "jump to the same place"
2035 so we can cross-jump between any two of them. */
2036
2037 rtx newjpos, newlpos, target;
2038
2039 newjpos = 0;
2040
2041 /* If cannot cross jump to code before the label,
2042 see if we can cross jump to another jump to
2043 the same label. */
2044 /* Try each other jump to this label. */
2045 for (target = jump_chain[0];
2046 target != 0 && newjpos == 0;
2047 target = jump_chain[INSN_UID (target)])
2048 if (target != insn
2049 && ! INSN_DELETED_P (target)
2050 && GET_CODE (PATTERN (target)) == RETURN)
2051 find_cross_jump (insn, target, 2,
2052 &newjpos, &newlpos);
2053
2054 if (newjpos != 0)
2055 {
2056 do_cross_jump (insn, newjpos, newlpos);
2057 changed = 1;
2058 next = insn;
2059 }
2060 }
2061 }
2062 }
2063
2064 first = 0;
2065 }
2066
2067 /* Delete extraneous line number notes.
2068 Note that two consecutive notes for different lines are not really
2069 extraneous. There should be some indication where that line belonged,
2070 even if it became empty. */
2071
2072 {
2073 rtx last_note = 0;
2074
2075 for (insn = f; insn; insn = NEXT_INSN (insn))
2076 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2077 {
2078 /* Delete this note if it is identical to previous note. */
2079 if (last_note
2080 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2081 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2082 {
2083 delete_insn (insn);
2084 continue;
2085 }
2086
2087 last_note = insn;
2088 }
2089 }
2090
2091 #ifdef HAVE_return
2092 if (HAVE_return)
2093 {
2094 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2095 in front of it. If the machine allows it at this point (we might be
2096 after reload for a leaf routine), it will improve optimization for it
2097 to be there. We do this both here and at the start of this pass since
2098 the RETURN might have been deleted by some of our optimizations. */
2099 insn = get_last_insn ();
2100 while (insn && GET_CODE (insn) == NOTE)
2101 insn = PREV_INSN (insn);
2102
2103 if (insn && GET_CODE (insn) != BARRIER)
2104 {
2105 emit_jump_insn (gen_return ());
2106 emit_barrier ();
2107 }
2108 }
2109 #endif
2110
2111 /* CAN_REACH_END is persistent for each function. Once set it should
2112 not be cleared. This is especially true for the case where we
2113 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2114 the front-end before compiling each function. */
2115 if (calculate_can_reach_end (last_insn, 0, 1))
2116 can_reach_end = 1;
2117
2118 /* Show JUMP_CHAIN no longer valid. */
2119 jump_chain = 0;
2120 }
2121 \f
2122 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2123 notes whose labels don't occur in the insn any more. Returns the
2124 largest INSN_UID found. */
2125 static int
2126 init_label_info (f)
2127 rtx f;
2128 {
2129 int largest_uid = 0;
2130 rtx insn;
2131
2132 for (insn = f; insn; insn = NEXT_INSN (insn))
2133 {
2134 if (GET_CODE (insn) == CODE_LABEL)
2135 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2136 else if (GET_CODE (insn) == JUMP_INSN)
2137 JUMP_LABEL (insn) = 0;
2138 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2139 {
2140 rtx note, next;
2141
2142 for (note = REG_NOTES (insn); note; note = next)
2143 {
2144 next = XEXP (note, 1);
2145 if (REG_NOTE_KIND (note) == REG_LABEL
2146 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2147 remove_note (insn, note);
2148 }
2149 }
2150 if (INSN_UID (insn) > largest_uid)
2151 largest_uid = INSN_UID (insn);
2152 }
2153
2154 return largest_uid;
2155 }
2156
2157 /* Delete insns following barriers, up to next label.
2158
2159 Also delete no-op jumps created by gcse. */
2160 static void
2161 delete_barrier_successors (f)
2162 rtx f;
2163 {
2164 rtx insn;
2165
2166 for (insn = f; insn;)
2167 {
2168 if (GET_CODE (insn) == BARRIER)
2169 {
2170 insn = NEXT_INSN (insn);
2171
2172 never_reached_warning (insn);
2173
2174 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2175 {
2176 if (GET_CODE (insn) == NOTE
2177 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2178 insn = NEXT_INSN (insn);
2179 else
2180 insn = delete_insn (insn);
2181 }
2182 /* INSN is now the code_label. */
2183 }
2184 /* Also remove (set (pc) (pc)) insns which can be created by
2185 gcse. We eliminate such insns now to avoid having them
2186 cause problems later. */
2187 else if (GET_CODE (insn) == JUMP_INSN
2188 && GET_CODE (PATTERN (insn)) == SET
2189 && SET_SRC (PATTERN (insn)) == pc_rtx
2190 && SET_DEST (PATTERN (insn)) == pc_rtx)
2191 insn = delete_insn (insn);
2192
2193 else
2194 insn = NEXT_INSN (insn);
2195 }
2196 }
2197
2198 /* Mark the label each jump jumps to.
2199 Combine consecutive labels, and count uses of labels.
2200
2201 For each label, make a chain (using `jump_chain')
2202 of all the *unconditional* jumps that jump to it;
2203 also make a chain of all returns.
2204
2205 CROSS_JUMP indicates whether we are doing cross jumping
2206 and if we are whether we will be paying attention to
2207 death notes or not. */
2208
2209 static void
2210 mark_all_labels (f, cross_jump)
2211 rtx f;
2212 int cross_jump;
2213 {
2214 rtx insn;
2215
2216 for (insn = f; insn; insn = NEXT_INSN (insn))
2217 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2218 {
2219 mark_jump_label (PATTERN (insn), insn, cross_jump);
2220 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2221 {
2222 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2223 {
2224 jump_chain[INSN_UID (insn)]
2225 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2226 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2227 }
2228 if (GET_CODE (PATTERN (insn)) == RETURN)
2229 {
2230 jump_chain[INSN_UID (insn)] = jump_chain[0];
2231 jump_chain[0] = insn;
2232 }
2233 }
2234 }
2235 }
2236
2237 /* Delete all labels already not referenced.
2238 Also find and return the last insn. */
2239
2240 static rtx
2241 delete_unreferenced_labels (f)
2242 rtx f;
2243 {
2244 rtx final = NULL_RTX;
2245 rtx insn;
2246
2247 for (insn = f; insn; )
2248 {
2249 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
2250 insn = delete_insn (insn);
2251 else
2252 {
2253 final = insn;
2254 insn = NEXT_INSN (insn);
2255 }
2256 }
2257
2258 return final;
2259 }
2260
2261 /* Delete various simple forms of moves which have no necessary
2262 side effect. */
2263
2264 static void
2265 delete_noop_moves (f)
2266 rtx f;
2267 {
2268 rtx insn, next;
2269
2270 for (insn = f; insn; )
2271 {
2272 next = NEXT_INSN (insn);
2273
2274 if (GET_CODE (insn) == INSN)
2275 {
2276 register rtx body = PATTERN (insn);
2277
2278 /* Combine stack_adjusts with following push_insns. */
2279 #ifdef PUSH_ROUNDING
2280 if (GET_CODE (body) == SET
2281 && SET_DEST (body) == stack_pointer_rtx
2282 && GET_CODE (SET_SRC (body)) == PLUS
2283 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2284 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2285 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2286 {
2287 rtx p;
2288 rtx stack_adjust_insn = insn;
2289 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2290 int total_pushed = 0;
2291 int pushes = 0;
2292
2293 /* Find all successive push insns. */
2294 p = insn;
2295 /* Don't convert more than three pushes;
2296 that starts adding too many displaced addresses
2297 and the whole thing starts becoming a losing
2298 proposition. */
2299 while (pushes < 3)
2300 {
2301 rtx pbody, dest;
2302 p = next_nonnote_insn (p);
2303 if (p == 0 || GET_CODE (p) != INSN)
2304 break;
2305 pbody = PATTERN (p);
2306 if (GET_CODE (pbody) != SET)
2307 break;
2308 dest = SET_DEST (pbody);
2309 /* Allow a no-op move between the adjust and the push. */
2310 if (GET_CODE (dest) == REG
2311 && GET_CODE (SET_SRC (pbody)) == REG
2312 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2313 continue;
2314 if (! (GET_CODE (dest) == MEM
2315 && GET_CODE (XEXP (dest, 0)) == POST_INC
2316 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2317 break;
2318 pushes++;
2319 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2320 > stack_adjust_amount)
2321 break;
2322 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2323 }
2324
2325 /* Discard the amount pushed from the stack adjust;
2326 maybe eliminate it entirely. */
2327 if (total_pushed >= stack_adjust_amount)
2328 {
2329 delete_computation (stack_adjust_insn);
2330 total_pushed = stack_adjust_amount;
2331 }
2332 else
2333 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2334 = GEN_INT (stack_adjust_amount - total_pushed);
2335
2336 /* Change the appropriate push insns to ordinary stores. */
2337 p = insn;
2338 while (total_pushed > 0)
2339 {
2340 rtx pbody, dest;
2341 p = next_nonnote_insn (p);
2342 if (GET_CODE (p) != INSN)
2343 break;
2344 pbody = PATTERN (p);
2345 if (GET_CODE (pbody) != SET)
2346 break;
2347 dest = SET_DEST (pbody);
2348 /* Allow a no-op move between the adjust and the push. */
2349 if (GET_CODE (dest) == REG
2350 && GET_CODE (SET_SRC (pbody)) == REG
2351 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2352 continue;
2353 if (! (GET_CODE (dest) == MEM
2354 && GET_CODE (XEXP (dest, 0)) == POST_INC
2355 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2356 break;
2357 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2358 /* If this push doesn't fully fit in the space
2359 of the stack adjust that we deleted,
2360 make another stack adjust here for what we
2361 didn't use up. There should be peepholes
2362 to recognize the resulting sequence of insns. */
2363 if (total_pushed < 0)
2364 {
2365 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2366 GEN_INT (- total_pushed)),
2367 p);
2368 break;
2369 }
2370 XEXP (dest, 0)
2371 = plus_constant (stack_pointer_rtx, total_pushed);
2372 }
2373 }
2374 #endif
2375
2376 /* Detect and delete no-op move instructions
2377 resulting from not allocating a parameter in a register. */
2378
2379 if (GET_CODE (body) == SET
2380 && (SET_DEST (body) == SET_SRC (body)
2381 || (GET_CODE (SET_DEST (body)) == MEM
2382 && GET_CODE (SET_SRC (body)) == MEM
2383 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2384 && ! (GET_CODE (SET_DEST (body)) == MEM
2385 && MEM_VOLATILE_P (SET_DEST (body)))
2386 && ! (GET_CODE (SET_SRC (body)) == MEM
2387 && MEM_VOLATILE_P (SET_SRC (body))))
2388 delete_computation (insn);
2389
2390 /* Detect and ignore no-op move instructions
2391 resulting from smart or fortuitous register allocation. */
2392
2393 else if (GET_CODE (body) == SET)
2394 {
2395 int sreg = true_regnum (SET_SRC (body));
2396 int dreg = true_regnum (SET_DEST (body));
2397
2398 if (sreg == dreg && sreg >= 0)
2399 delete_insn (insn);
2400 else if (sreg >= 0 && dreg >= 0)
2401 {
2402 rtx trial;
2403 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2404 sreg, NULL_PTR, dreg,
2405 GET_MODE (SET_SRC (body)));
2406
2407 if (tem != 0
2408 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2409 {
2410 /* DREG may have been the target of a REG_DEAD note in
2411 the insn which makes INSN redundant. If so, reorg
2412 would still think it is dead. So search for such a
2413 note and delete it if we find it. */
2414 if (! find_regno_note (insn, REG_UNUSED, dreg))
2415 for (trial = prev_nonnote_insn (insn);
2416 trial && GET_CODE (trial) != CODE_LABEL;
2417 trial = prev_nonnote_insn (trial))
2418 if (find_regno_note (trial, REG_DEAD, dreg))
2419 {
2420 remove_death (dreg, trial);
2421 break;
2422 }
2423
2424 /* Deleting insn could lose a death-note for SREG. */
2425 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2426 {
2427 /* Change this into a USE so that we won't emit
2428 code for it, but still can keep the note. */
2429 PATTERN (insn)
2430 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2431 INSN_CODE (insn) = -1;
2432 /* Remove all reg notes but the REG_DEAD one. */
2433 REG_NOTES (insn) = trial;
2434 XEXP (trial, 1) = NULL_RTX;
2435 }
2436 else
2437 delete_insn (insn);
2438 }
2439 }
2440 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2441 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2442 NULL_PTR, 0,
2443 GET_MODE (SET_DEST (body))))
2444 {
2445 /* This handles the case where we have two consecutive
2446 assignments of the same constant to pseudos that didn't
2447 get a hard reg. Each SET from the constant will be
2448 converted into a SET of the spill register and an
2449 output reload will be made following it. This produces
2450 two loads of the same constant into the same spill
2451 register. */
2452
2453 rtx in_insn = insn;
2454
2455 /* Look back for a death note for the first reg.
2456 If there is one, it is no longer accurate. */
2457 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2458 {
2459 if ((GET_CODE (in_insn) == INSN
2460 || GET_CODE (in_insn) == JUMP_INSN)
2461 && find_regno_note (in_insn, REG_DEAD, dreg))
2462 {
2463 remove_death (dreg, in_insn);
2464 break;
2465 }
2466 in_insn = PREV_INSN (in_insn);
2467 }
2468
2469 /* Delete the second load of the value. */
2470 delete_insn (insn);
2471 }
2472 }
2473 else if (GET_CODE (body) == PARALLEL)
2474 {
2475 /* If each part is a set between two identical registers or
2476 a USE or CLOBBER, delete the insn. */
2477 int i, sreg, dreg;
2478 rtx tem;
2479
2480 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2481 {
2482 tem = XVECEXP (body, 0, i);
2483 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2484 continue;
2485
2486 if (GET_CODE (tem) != SET
2487 || (sreg = true_regnum (SET_SRC (tem))) < 0
2488 || (dreg = true_regnum (SET_DEST (tem))) < 0
2489 || dreg != sreg)
2490 break;
2491 }
2492
2493 if (i < 0)
2494 delete_insn (insn);
2495 }
2496 /* Also delete insns to store bit fields if they are no-ops. */
2497 /* Not worth the hair to detect this in the big-endian case. */
2498 else if (! BYTES_BIG_ENDIAN
2499 && GET_CODE (body) == SET
2500 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2501 && XEXP (SET_DEST (body), 2) == const0_rtx
2502 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2503 && ! (GET_CODE (SET_SRC (body)) == MEM
2504 && MEM_VOLATILE_P (SET_SRC (body))))
2505 delete_insn (insn);
2506 }
2507 insn = next;
2508 }
2509 }
2510
2511 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2512 If so indicate that this function can drop off the end by returning
2513 1, else return 0.
2514
2515 CHECK_DELETED indicates whether we must check if the note being
2516 searched for has the deleted flag set.
2517
2518 DELETE_FINAL_NOTE indicates whether we should delete the note
2519 if we find it. */
2520
2521 static int
2522 calculate_can_reach_end (last, check_deleted, delete_final_note)
2523 rtx last;
2524 int check_deleted;
2525 int delete_final_note;
2526 {
2527 rtx insn = last;
2528 int n_labels = 1;
2529
2530 while (insn != NULL_RTX)
2531 {
2532 int ok = 0;
2533
2534 /* One label can follow the end-note: the return label. */
2535 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2536 ok = 1;
2537 /* Ordinary insns can follow it if returning a structure. */
2538 else if (GET_CODE (insn) == INSN)
2539 ok = 1;
2540 /* If machine uses explicit RETURN insns, no epilogue,
2541 then one of them follows the note. */
2542 else if (GET_CODE (insn) == JUMP_INSN
2543 && GET_CODE (PATTERN (insn)) == RETURN)
2544 ok = 1;
2545 /* A barrier can follow the return insn. */
2546 else if (GET_CODE (insn) == BARRIER)
2547 ok = 1;
2548 /* Other kinds of notes can follow also. */
2549 else if (GET_CODE (insn) == NOTE
2550 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2551 ok = 1;
2552
2553 if (ok != 1)
2554 break;
2555
2556 insn = PREV_INSN (insn);
2557 }
2558
2559 /* See if we backed up to the appropriate type of note. */
2560 if (insn != NULL_RTX
2561 && GET_CODE (insn) == NOTE
2562 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
2563 && (check_deleted == 0
2564 || ! INSN_DELETED_P (insn)))
2565 {
2566 if (delete_final_note)
2567 delete_insn (insn);
2568 return 1;
2569 }
2570
2571 return 0;
2572 }
2573
2574 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2575 jump. Assume that this unconditional jump is to the exit test code. If
2576 the code is sufficiently simple, make a copy of it before INSN,
2577 followed by a jump to the exit of the loop. Then delete the unconditional
2578 jump after INSN.
2579
2580 Return 1 if we made the change, else 0.
2581
2582 This is only safe immediately after a regscan pass because it uses the
2583 values of regno_first_uid and regno_last_uid. */
2584
2585 static int
2586 duplicate_loop_exit_test (loop_start)
2587 rtx loop_start;
2588 {
2589 rtx insn, set, reg, p, link;
2590 rtx copy = 0;
2591 int num_insns = 0;
2592 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2593 rtx lastexit;
2594 int max_reg = max_reg_num ();
2595 rtx *reg_map = 0;
2596
2597 /* Scan the exit code. We do not perform this optimization if any insn:
2598
2599 is a CALL_INSN
2600 is a CODE_LABEL
2601 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2602 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2603 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2604 is not valid.
2605
2606 We also do not do this if we find an insn with ASM_OPERANDS. While
2607 this restriction should not be necessary, copying an insn with
2608 ASM_OPERANDS can confuse asm_noperands in some cases.
2609
2610 Also, don't do this if the exit code is more than 20 insns. */
2611
2612 for (insn = exitcode;
2613 insn
2614 && ! (GET_CODE (insn) == NOTE
2615 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2616 insn = NEXT_INSN (insn))
2617 {
2618 switch (GET_CODE (insn))
2619 {
2620 case CODE_LABEL:
2621 case CALL_INSN:
2622 return 0;
2623 case NOTE:
2624 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2625 a jump immediately after the loop start that branches outside
2626 the loop but within an outer loop, near the exit test.
2627 If we copied this exit test and created a phony
2628 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2629 before the exit test look like these could be safely moved
2630 out of the loop even if they actually may be never executed.
2631 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2632
2633 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2634 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2635 return 0;
2636
2637 if (optimize < 2
2638 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2639 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2640 /* If we were to duplicate this code, we would not move
2641 the BLOCK notes, and so debugging the moved code would
2642 be difficult. Thus, we only move the code with -O2 or
2643 higher. */
2644 return 0;
2645
2646 break;
2647 case JUMP_INSN:
2648 case INSN:
2649 /* The code below would grossly mishandle REG_WAS_0 notes,
2650 so get rid of them here. */
2651 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2652 remove_note (insn, p);
2653 if (++num_insns > 20
2654 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2655 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
2656 || asm_noperands (PATTERN (insn)) > 0)
2657 return 0;
2658 break;
2659 default:
2660 break;
2661 }
2662 }
2663
2664 /* Unless INSN is zero, we can do the optimization. */
2665 if (insn == 0)
2666 return 0;
2667
2668 lastexit = insn;
2669
2670 /* See if any insn sets a register only used in the loop exit code and
2671 not a user variable. If so, replace it with a new register. */
2672 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2673 if (GET_CODE (insn) == INSN
2674 && (set = single_set (insn)) != 0
2675 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2676 || (GET_CODE (reg) == SUBREG
2677 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2678 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2679 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2680 {
2681 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2682 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2683 break;
2684
2685 if (p != lastexit)
2686 {
2687 /* We can do the replacement. Allocate reg_map if this is the
2688 first replacement we found. */
2689 if (reg_map == 0)
2690 {
2691 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2692 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2693 }
2694
2695 REG_LOOP_TEST_P (reg) = 1;
2696
2697 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2698 }
2699 }
2700
2701 /* Now copy each insn. */
2702 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2703 switch (GET_CODE (insn))
2704 {
2705 case BARRIER:
2706 copy = emit_barrier_before (loop_start);
2707 break;
2708 case NOTE:
2709 /* Only copy line-number notes. */
2710 if (NOTE_LINE_NUMBER (insn) >= 0)
2711 {
2712 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2713 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2714 }
2715 break;
2716
2717 case INSN:
2718 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2719 if (reg_map)
2720 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2721
2722 mark_jump_label (PATTERN (copy), copy, 0);
2723
2724 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2725 make them. */
2726 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2727 if (REG_NOTE_KIND (link) != REG_LABEL)
2728 REG_NOTES (copy)
2729 = copy_rtx (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2730 XEXP (link, 0),
2731 REG_NOTES (copy)));
2732 if (reg_map && REG_NOTES (copy))
2733 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2734 break;
2735
2736 case JUMP_INSN:
2737 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2738 if (reg_map)
2739 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2740 mark_jump_label (PATTERN (copy), copy, 0);
2741 if (REG_NOTES (insn))
2742 {
2743 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2744 if (reg_map)
2745 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2746 }
2747
2748 /* If this is a simple jump, add it to the jump chain. */
2749
2750 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2751 && simplejump_p (copy))
2752 {
2753 jump_chain[INSN_UID (copy)]
2754 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2755 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2756 }
2757 break;
2758
2759 default:
2760 abort ();
2761 }
2762
2763 /* Now clean up by emitting a jump to the end label and deleting the jump
2764 at the start of the loop. */
2765 if (! copy || GET_CODE (copy) != BARRIER)
2766 {
2767 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2768 loop_start);
2769 mark_jump_label (PATTERN (copy), copy, 0);
2770 if (INSN_UID (copy) < max_jump_chain
2771 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2772 {
2773 jump_chain[INSN_UID (copy)]
2774 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2775 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2776 }
2777 emit_barrier_before (loop_start);
2778 }
2779
2780 /* Mark the exit code as the virtual top of the converted loop. */
2781 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2782
2783 delete_insn (next_nonnote_insn (loop_start));
2784
2785 return 1;
2786 }
2787 \f
2788 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2789 loop-end notes between START and END out before START. Assume that
2790 END is not such a note. START may be such a note. Returns the value
2791 of the new starting insn, which may be different if the original start
2792 was such a note. */
2793
2794 rtx
2795 squeeze_notes (start, end)
2796 rtx start, end;
2797 {
2798 rtx insn;
2799 rtx next;
2800
2801 for (insn = start; insn != end; insn = next)
2802 {
2803 next = NEXT_INSN (insn);
2804 if (GET_CODE (insn) == NOTE
2805 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2806 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2807 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2808 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2809 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2810 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2811 {
2812 if (insn == start)
2813 start = next;
2814 else
2815 {
2816 rtx prev = PREV_INSN (insn);
2817 PREV_INSN (insn) = PREV_INSN (start);
2818 NEXT_INSN (insn) = start;
2819 NEXT_INSN (PREV_INSN (insn)) = insn;
2820 PREV_INSN (NEXT_INSN (insn)) = insn;
2821 NEXT_INSN (prev) = next;
2822 PREV_INSN (next) = prev;
2823 }
2824 }
2825 }
2826
2827 return start;
2828 }
2829 \f
2830 /* Compare the instructions before insn E1 with those before E2
2831 to find an opportunity for cross jumping.
2832 (This means detecting identical sequences of insns followed by
2833 jumps to the same place, or followed by a label and a jump
2834 to that label, and replacing one with a jump to the other.)
2835
2836 Assume E1 is a jump that jumps to label E2
2837 (that is not always true but it might as well be).
2838 Find the longest possible equivalent sequences
2839 and store the first insns of those sequences into *F1 and *F2.
2840 Store zero there if no equivalent preceding instructions are found.
2841
2842 We give up if we find a label in stream 1.
2843 Actually we could transfer that label into stream 2. */
2844
2845 static void
2846 find_cross_jump (e1, e2, minimum, f1, f2)
2847 rtx e1, e2;
2848 int minimum;
2849 rtx *f1, *f2;
2850 {
2851 register rtx i1 = e1, i2 = e2;
2852 register rtx p1, p2;
2853 int lose = 0;
2854
2855 rtx last1 = 0, last2 = 0;
2856 rtx afterlast1 = 0, afterlast2 = 0;
2857
2858 *f1 = 0;
2859 *f2 = 0;
2860
2861 while (1)
2862 {
2863 i1 = prev_nonnote_insn (i1);
2864
2865 i2 = PREV_INSN (i2);
2866 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2867 i2 = PREV_INSN (i2);
2868
2869 if (i1 == 0)
2870 break;
2871
2872 /* Don't allow the range of insns preceding E1 or E2
2873 to include the other (E2 or E1). */
2874 if (i2 == e1 || i1 == e2)
2875 break;
2876
2877 /* If we will get to this code by jumping, those jumps will be
2878 tensioned to go directly to the new label (before I2),
2879 so this cross-jumping won't cost extra. So reduce the minimum. */
2880 if (GET_CODE (i1) == CODE_LABEL)
2881 {
2882 --minimum;
2883 break;
2884 }
2885
2886 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2887 break;
2888
2889 /* Avoid moving insns across EH regions if either of the insns
2890 can throw. */
2891 if (flag_exceptions
2892 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2893 && !in_same_eh_region (i1, i2))
2894 break;
2895
2896 p1 = PATTERN (i1);
2897 p2 = PATTERN (i2);
2898
2899 /* If this is a CALL_INSN, compare register usage information.
2900 If we don't check this on stack register machines, the two
2901 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2902 numbers of stack registers in the same basic block.
2903 If we don't check this on machines with delay slots, a delay slot may
2904 be filled that clobbers a parameter expected by the subroutine.
2905
2906 ??? We take the simple route for now and assume that if they're
2907 equal, they were constructed identically. */
2908
2909 if (GET_CODE (i1) == CALL_INSN
2910 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2911 CALL_INSN_FUNCTION_USAGE (i2)))
2912 lose = 1;
2913
2914 #ifdef STACK_REGS
2915 /* If cross_jump_death_matters is not 0, the insn's mode
2916 indicates whether or not the insn contains any stack-like
2917 regs. */
2918
2919 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
2920 {
2921 /* If register stack conversion has already been done, then
2922 death notes must also be compared before it is certain that
2923 the two instruction streams match. */
2924
2925 rtx note;
2926 HARD_REG_SET i1_regset, i2_regset;
2927
2928 CLEAR_HARD_REG_SET (i1_regset);
2929 CLEAR_HARD_REG_SET (i2_regset);
2930
2931 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2932 if (REG_NOTE_KIND (note) == REG_DEAD
2933 && STACK_REG_P (XEXP (note, 0)))
2934 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2935
2936 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2937 if (REG_NOTE_KIND (note) == REG_DEAD
2938 && STACK_REG_P (XEXP (note, 0)))
2939 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2940
2941 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2942
2943 lose = 1;
2944
2945 done:
2946 ;
2947 }
2948 #endif
2949
2950 /* Don't allow old-style asm or volatile extended asms to be accepted
2951 for cross jumping purposes. It is conceptually correct to allow
2952 them, since cross-jumping preserves the dynamic instruction order
2953 even though it is changing the static instruction order. However,
2954 if an asm is being used to emit an assembler pseudo-op, such as
2955 the MIPS `.set reorder' pseudo-op, then the static instruction order
2956 matters and it must be preserved. */
2957 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2958 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2959 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2960 lose = 1;
2961
2962 if (lose || GET_CODE (p1) != GET_CODE (p2)
2963 || ! rtx_renumbered_equal_p (p1, p2))
2964 {
2965 /* The following code helps take care of G++ cleanups. */
2966 rtx equiv1;
2967 rtx equiv2;
2968
2969 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2970 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2971 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2972 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2973 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2974 /* If the equivalences are not to a constant, they may
2975 reference pseudos that no longer exist, so we can't
2976 use them. */
2977 && CONSTANT_P (XEXP (equiv1, 0))
2978 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2979 {
2980 rtx s1 = single_set (i1);
2981 rtx s2 = single_set (i2);
2982 if (s1 != 0 && s2 != 0
2983 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2984 {
2985 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2986 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2987 if (! rtx_renumbered_equal_p (p1, p2))
2988 cancel_changes (0);
2989 else if (apply_change_group ())
2990 goto win;
2991 }
2992 }
2993
2994 /* Insns fail to match; cross jumping is limited to the following
2995 insns. */
2996
2997 #ifdef HAVE_cc0
2998 /* Don't allow the insn after a compare to be shared by
2999 cross-jumping unless the compare is also shared.
3000 Here, if either of these non-matching insns is a compare,
3001 exclude the following insn from possible cross-jumping. */
3002 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3003 last1 = afterlast1, last2 = afterlast2, ++minimum;
3004 #endif
3005
3006 /* If cross-jumping here will feed a jump-around-jump
3007 optimization, this jump won't cost extra, so reduce
3008 the minimum. */
3009 if (GET_CODE (i1) == JUMP_INSN
3010 && JUMP_LABEL (i1)
3011 && prev_real_insn (JUMP_LABEL (i1)) == e1)
3012 --minimum;
3013 break;
3014 }
3015
3016 win:
3017 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3018 {
3019 /* Ok, this insn is potentially includable in a cross-jump here. */
3020 afterlast1 = last1, afterlast2 = last2;
3021 last1 = i1, last2 = i2, --minimum;
3022 }
3023 }
3024
3025 if (minimum <= 0 && last1 != 0 && last1 != e1)
3026 *f1 = last1, *f2 = last2;
3027 }
3028
3029 static void
3030 do_cross_jump (insn, newjpos, newlpos)
3031 rtx insn, newjpos, newlpos;
3032 {
3033 /* Find an existing label at this point
3034 or make a new one if there is none. */
3035 register rtx label = get_label_before (newlpos);
3036
3037 /* Make the same jump insn jump to the new point. */
3038 if (GET_CODE (PATTERN (insn)) == RETURN)
3039 {
3040 /* Remove from jump chain of returns. */
3041 delete_from_jump_chain (insn);
3042 /* Change the insn. */
3043 PATTERN (insn) = gen_jump (label);
3044 INSN_CODE (insn) = -1;
3045 JUMP_LABEL (insn) = label;
3046 LABEL_NUSES (label)++;
3047 /* Add to new the jump chain. */
3048 if (INSN_UID (label) < max_jump_chain
3049 && INSN_UID (insn) < max_jump_chain)
3050 {
3051 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3052 jump_chain[INSN_UID (label)] = insn;
3053 }
3054 }
3055 else
3056 redirect_jump (insn, label);
3057
3058 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3059 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3060 the NEWJPOS stream. */
3061
3062 while (newjpos != insn)
3063 {
3064 rtx lnote;
3065
3066 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3067 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3068 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3069 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3070 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3071 remove_note (newlpos, lnote);
3072
3073 delete_insn (newjpos);
3074 newjpos = next_real_insn (newjpos);
3075 newlpos = next_real_insn (newlpos);
3076 }
3077 }
3078 \f
3079 /* Return the label before INSN, or put a new label there. */
3080
3081 rtx
3082 get_label_before (insn)
3083 rtx insn;
3084 {
3085 rtx label;
3086
3087 /* Find an existing label at this point
3088 or make a new one if there is none. */
3089 label = prev_nonnote_insn (insn);
3090
3091 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3092 {
3093 rtx prev = PREV_INSN (insn);
3094
3095 label = gen_label_rtx ();
3096 emit_label_after (label, prev);
3097 LABEL_NUSES (label) = 0;
3098 }
3099 return label;
3100 }
3101
3102 /* Return the label after INSN, or put a new label there. */
3103
3104 rtx
3105 get_label_after (insn)
3106 rtx insn;
3107 {
3108 rtx label;
3109
3110 /* Find an existing label at this point
3111 or make a new one if there is none. */
3112 label = next_nonnote_insn (insn);
3113
3114 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3115 {
3116 label = gen_label_rtx ();
3117 emit_label_after (label, insn);
3118 LABEL_NUSES (label) = 0;
3119 }
3120 return label;
3121 }
3122 \f
3123 /* Return 1 if INSN is a jump that jumps to right after TARGET
3124 only on the condition that TARGET itself would drop through.
3125 Assumes that TARGET is a conditional jump. */
3126
3127 static int
3128 jump_back_p (insn, target)
3129 rtx insn, target;
3130 {
3131 rtx cinsn, ctarget;
3132 enum rtx_code codei, codet;
3133
3134 if (simplejump_p (insn) || ! condjump_p (insn)
3135 || simplejump_p (target)
3136 || target != prev_real_insn (JUMP_LABEL (insn)))
3137 return 0;
3138
3139 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3140 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3141
3142 codei = GET_CODE (cinsn);
3143 codet = GET_CODE (ctarget);
3144
3145 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3146 {
3147 if (! can_reverse_comparison_p (cinsn, insn))
3148 return 0;
3149 codei = reverse_condition (codei);
3150 }
3151
3152 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3153 {
3154 if (! can_reverse_comparison_p (ctarget, target))
3155 return 0;
3156 codet = reverse_condition (codet);
3157 }
3158
3159 return (codei == codet
3160 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3161 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3162 }
3163 \f
3164 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3165 return non-zero if it is safe to reverse this comparison. It is if our
3166 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3167 this is known to be an integer comparison. */
3168
3169 int
3170 can_reverse_comparison_p (comparison, insn)
3171 rtx comparison;
3172 rtx insn;
3173 {
3174 rtx arg0;
3175
3176 /* If this is not actually a comparison, we can't reverse it. */
3177 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3178 return 0;
3179
3180 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3181 /* If this is an NE comparison, it is safe to reverse it to an EQ
3182 comparison and vice versa, even for floating point. If no operands
3183 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3184 always false and NE is always true, so the reversal is also valid. */
3185 || flag_fast_math
3186 || GET_CODE (comparison) == NE
3187 || GET_CODE (comparison) == EQ)
3188 return 1;
3189
3190 arg0 = XEXP (comparison, 0);
3191
3192 /* Make sure ARG0 is one of the actual objects being compared. If we
3193 can't do this, we can't be sure the comparison can be reversed.
3194
3195 Handle cc0 and a MODE_CC register. */
3196 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3197 #ifdef HAVE_cc0
3198 || arg0 == cc0_rtx
3199 #endif
3200 )
3201 {
3202 rtx prev = prev_nonnote_insn (insn);
3203 rtx set;
3204
3205 /* If the comparison itself was a loop invariant, it could have been
3206 hoisted out of the loop. If we proceed to unroll such a loop, then
3207 we may not be able to find the comparison when copying the loop.
3208
3209 Returning zero in that case is the safe thing to do. */
3210 if (prev == 0)
3211 return 0;
3212
3213 set = single_set (prev);
3214 if (set == 0 || SET_DEST (set) != arg0)
3215 return 0;
3216
3217 arg0 = SET_SRC (set);
3218
3219 if (GET_CODE (arg0) == COMPARE)
3220 arg0 = XEXP (arg0, 0);
3221 }
3222
3223 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3224 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3225 return (GET_CODE (arg0) == CONST_INT
3226 || (GET_MODE (arg0) != VOIDmode
3227 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3228 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3229 }
3230
3231 /* Given an rtx-code for a comparison, return the code
3232 for the negated comparison.
3233 WATCH OUT! reverse_condition is not safe to use on a jump
3234 that might be acting on the results of an IEEE floating point comparison,
3235 because of the special treatment of non-signaling nans in comparisons.
3236 Use can_reverse_comparison_p to be sure. */
3237
3238 enum rtx_code
3239 reverse_condition (code)
3240 enum rtx_code code;
3241 {
3242 switch (code)
3243 {
3244 case EQ:
3245 return NE;
3246
3247 case NE:
3248 return EQ;
3249
3250 case GT:
3251 return LE;
3252
3253 case GE:
3254 return LT;
3255
3256 case LT:
3257 return GE;
3258
3259 case LE:
3260 return GT;
3261
3262 case GTU:
3263 return LEU;
3264
3265 case GEU:
3266 return LTU;
3267
3268 case LTU:
3269 return GEU;
3270
3271 case LEU:
3272 return GTU;
3273
3274 default:
3275 abort ();
3276 return UNKNOWN;
3277 }
3278 }
3279
3280 /* Similar, but return the code when two operands of a comparison are swapped.
3281 This IS safe for IEEE floating-point. */
3282
3283 enum rtx_code
3284 swap_condition (code)
3285 enum rtx_code code;
3286 {
3287 switch (code)
3288 {
3289 case EQ:
3290 case NE:
3291 return code;
3292
3293 case GT:
3294 return LT;
3295
3296 case GE:
3297 return LE;
3298
3299 case LT:
3300 return GT;
3301
3302 case LE:
3303 return GE;
3304
3305 case GTU:
3306 return LTU;
3307
3308 case GEU:
3309 return LEU;
3310
3311 case LTU:
3312 return GTU;
3313
3314 case LEU:
3315 return GEU;
3316
3317 default:
3318 abort ();
3319 return UNKNOWN;
3320 }
3321 }
3322
3323 /* Given a comparison CODE, return the corresponding unsigned comparison.
3324 If CODE is an equality comparison or already an unsigned comparison,
3325 CODE is returned. */
3326
3327 enum rtx_code
3328 unsigned_condition (code)
3329 enum rtx_code code;
3330 {
3331 switch (code)
3332 {
3333 case EQ:
3334 case NE:
3335 case GTU:
3336 case GEU:
3337 case LTU:
3338 case LEU:
3339 return code;
3340
3341 case GT:
3342 return GTU;
3343
3344 case GE:
3345 return GEU;
3346
3347 case LT:
3348 return LTU;
3349
3350 case LE:
3351 return LEU;
3352
3353 default:
3354 abort ();
3355 }
3356 }
3357
3358 /* Similarly, return the signed version of a comparison. */
3359
3360 enum rtx_code
3361 signed_condition (code)
3362 enum rtx_code code;
3363 {
3364 switch (code)
3365 {
3366 case EQ:
3367 case NE:
3368 case GT:
3369 case GE:
3370 case LT:
3371 case LE:
3372 return code;
3373
3374 case GTU:
3375 return GT;
3376
3377 case GEU:
3378 return GE;
3379
3380 case LTU:
3381 return LT;
3382
3383 case LEU:
3384 return LE;
3385
3386 default:
3387 abort ();
3388 }
3389 }
3390 \f
3391 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3392 truth of CODE1 implies the truth of CODE2. */
3393
3394 int
3395 comparison_dominates_p (code1, code2)
3396 enum rtx_code code1, code2;
3397 {
3398 if (code1 == code2)
3399 return 1;
3400
3401 switch (code1)
3402 {
3403 case EQ:
3404 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3405 return 1;
3406 break;
3407
3408 case LT:
3409 if (code2 == LE || code2 == NE)
3410 return 1;
3411 break;
3412
3413 case GT:
3414 if (code2 == GE || code2 == NE)
3415 return 1;
3416 break;
3417
3418 case LTU:
3419 if (code2 == LEU || code2 == NE)
3420 return 1;
3421 break;
3422
3423 case GTU:
3424 if (code2 == GEU || code2 == NE)
3425 return 1;
3426 break;
3427
3428 default:
3429 break;
3430 }
3431
3432 return 0;
3433 }
3434 \f
3435 /* Return 1 if INSN is an unconditional jump and nothing else. */
3436
3437 int
3438 simplejump_p (insn)
3439 rtx insn;
3440 {
3441 return (GET_CODE (insn) == JUMP_INSN
3442 && GET_CODE (PATTERN (insn)) == SET
3443 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3444 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3445 }
3446
3447 /* Return nonzero if INSN is a (possibly) conditional jump
3448 and nothing more. */
3449
3450 int
3451 condjump_p (insn)
3452 rtx insn;
3453 {
3454 register rtx x = PATTERN (insn);
3455 if (GET_CODE (x) != SET)
3456 return 0;
3457 if (GET_CODE (SET_DEST (x)) != PC)
3458 return 0;
3459 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3460 return 1;
3461 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3462 return 0;
3463 if (XEXP (SET_SRC (x), 2) == pc_rtx
3464 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3465 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3466 return 1;
3467 if (XEXP (SET_SRC (x), 1) == pc_rtx
3468 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3469 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3470 return 1;
3471 return 0;
3472 }
3473
3474 /* Return nonzero if INSN is a (possibly) conditional jump
3475 and nothing more. */
3476
3477 int
3478 condjump_in_parallel_p (insn)
3479 rtx insn;
3480 {
3481 register rtx x = PATTERN (insn);
3482
3483 if (GET_CODE (x) != PARALLEL)
3484 return 0;
3485 else
3486 x = XVECEXP (x, 0, 0);
3487
3488 if (GET_CODE (x) != SET)
3489 return 0;
3490 if (GET_CODE (SET_DEST (x)) != PC)
3491 return 0;
3492 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3493 return 1;
3494 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3495 return 0;
3496 if (XEXP (SET_SRC (x), 2) == pc_rtx
3497 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3498 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3499 return 1;
3500 if (XEXP (SET_SRC (x), 1) == pc_rtx
3501 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3502 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3503 return 1;
3504 return 0;
3505 }
3506
3507 /* Return the label of a conditional jump. */
3508
3509 rtx
3510 condjump_label (insn)
3511 rtx insn;
3512 {
3513 register rtx x = PATTERN (insn);
3514
3515 if (GET_CODE (x) == PARALLEL)
3516 x = XVECEXP (x, 0, 0);
3517 if (GET_CODE (x) != SET)
3518 return NULL_RTX;
3519 if (GET_CODE (SET_DEST (x)) != PC)
3520 return NULL_RTX;
3521 x = SET_SRC (x);
3522 if (GET_CODE (x) == LABEL_REF)
3523 return x;
3524 if (GET_CODE (x) != IF_THEN_ELSE)
3525 return NULL_RTX;
3526 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3527 return XEXP (x, 1);
3528 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3529 return XEXP (x, 2);
3530 return NULL_RTX;
3531 }
3532
3533 /* Return true if INSN is a (possibly conditional) return insn. */
3534
3535 static int
3536 returnjump_p_1 (loc, data)
3537 rtx *loc;
3538 void *data ATTRIBUTE_UNUSED;
3539 {
3540 rtx x = *loc;
3541 return GET_CODE (x) == RETURN;
3542 }
3543
3544 int
3545 returnjump_p (insn)
3546 rtx insn;
3547 {
3548 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3549 }
3550
3551 /* Return true if INSN is a jump that only transfers control and
3552 nothing more. */
3553
3554 int
3555 onlyjump_p (insn)
3556 rtx insn;
3557 {
3558 rtx set;
3559
3560 if (GET_CODE (insn) != JUMP_INSN)
3561 return 0;
3562
3563 set = single_set (insn);
3564 if (set == NULL)
3565 return 0;
3566 if (GET_CODE (SET_DEST (set)) != PC)
3567 return 0;
3568 if (side_effects_p (SET_SRC (set)))
3569 return 0;
3570
3571 return 1;
3572 }
3573
3574 #ifdef HAVE_cc0
3575
3576 /* Return 1 if X is an RTX that does nothing but set the condition codes
3577 and CLOBBER or USE registers.
3578 Return -1 if X does explicitly set the condition codes,
3579 but also does other things. */
3580
3581 int
3582 sets_cc0_p (x)
3583 rtx x ATTRIBUTE_UNUSED;
3584 {
3585 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3586 return 1;
3587 if (GET_CODE (x) == PARALLEL)
3588 {
3589 int i;
3590 int sets_cc0 = 0;
3591 int other_things = 0;
3592 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3593 {
3594 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3595 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3596 sets_cc0 = 1;
3597 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3598 other_things = 1;
3599 }
3600 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3601 }
3602 return 0;
3603 }
3604 #endif
3605 \f
3606 /* Follow any unconditional jump at LABEL;
3607 return the ultimate label reached by any such chain of jumps.
3608 If LABEL is not followed by a jump, return LABEL.
3609 If the chain loops or we can't find end, return LABEL,
3610 since that tells caller to avoid changing the insn.
3611
3612 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3613 a USE or CLOBBER. */
3614
3615 rtx
3616 follow_jumps (label)
3617 rtx label;
3618 {
3619 register rtx insn;
3620 register rtx next;
3621 register rtx value = label;
3622 register int depth;
3623
3624 for (depth = 0;
3625 (depth < 10
3626 && (insn = next_active_insn (value)) != 0
3627 && GET_CODE (insn) == JUMP_INSN
3628 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3629 || GET_CODE (PATTERN (insn)) == RETURN)
3630 && (next = NEXT_INSN (insn))
3631 && GET_CODE (next) == BARRIER);
3632 depth++)
3633 {
3634 /* Don't chain through the insn that jumps into a loop
3635 from outside the loop,
3636 since that would create multiple loop entry jumps
3637 and prevent loop optimization. */
3638 rtx tem;
3639 if (!reload_completed)
3640 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3641 if (GET_CODE (tem) == NOTE
3642 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3643 /* ??? Optional. Disables some optimizations, but makes
3644 gcov output more accurate with -O. */
3645 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3646 return value;
3647
3648 /* If we have found a cycle, make the insn jump to itself. */
3649 if (JUMP_LABEL (insn) == label)
3650 return label;
3651
3652 tem = next_active_insn (JUMP_LABEL (insn));
3653 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3654 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3655 break;
3656
3657 value = JUMP_LABEL (insn);
3658 }
3659 if (depth == 10)
3660 return label;
3661 return value;
3662 }
3663
3664 /* Assuming that field IDX of X is a vector of label_refs,
3665 replace each of them by the ultimate label reached by it.
3666 Return nonzero if a change is made.
3667 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3668
3669 static int
3670 tension_vector_labels (x, idx)
3671 register rtx x;
3672 register int idx;
3673 {
3674 int changed = 0;
3675 register int i;
3676 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3677 {
3678 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3679 register rtx nlabel = follow_jumps (olabel);
3680 if (nlabel && nlabel != olabel)
3681 {
3682 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3683 ++LABEL_NUSES (nlabel);
3684 if (--LABEL_NUSES (olabel) == 0)
3685 delete_insn (olabel);
3686 changed = 1;
3687 }
3688 }
3689 return changed;
3690 }
3691 \f
3692 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3693 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3694 in INSN, then store one of them in JUMP_LABEL (INSN).
3695 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3696 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3697 Also, when there are consecutive labels, canonicalize on the last of them.
3698
3699 Note that two labels separated by a loop-beginning note
3700 must be kept distinct if we have not yet done loop-optimization,
3701 because the gap between them is where loop-optimize
3702 will want to move invariant code to. CROSS_JUMP tells us
3703 that loop-optimization is done with.
3704
3705 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3706 two labels distinct if they are separated by only USE or CLOBBER insns. */
3707
3708 static void
3709 mark_jump_label (x, insn, cross_jump)
3710 register rtx x;
3711 rtx insn;
3712 int cross_jump;
3713 {
3714 register RTX_CODE code = GET_CODE (x);
3715 register int i;
3716 register const char *fmt;
3717
3718 switch (code)
3719 {
3720 case PC:
3721 case CC0:
3722 case REG:
3723 case SUBREG:
3724 case CONST_INT:
3725 case SYMBOL_REF:
3726 case CONST_DOUBLE:
3727 case CLOBBER:
3728 case CALL:
3729 return;
3730
3731 case MEM:
3732 /* If this is a constant-pool reference, see if it is a label. */
3733 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3734 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3735 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3736 break;
3737
3738 case LABEL_REF:
3739 {
3740 rtx label = XEXP (x, 0);
3741 rtx olabel = label;
3742 rtx note;
3743 rtx next;
3744
3745 if (GET_CODE (label) != CODE_LABEL)
3746 abort ();
3747
3748 /* Ignore references to labels of containing functions. */
3749 if (LABEL_REF_NONLOCAL_P (x))
3750 break;
3751
3752 /* If there are other labels following this one,
3753 replace it with the last of the consecutive labels. */
3754 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3755 {
3756 if (GET_CODE (next) == CODE_LABEL)
3757 label = next;
3758 else if (cross_jump && GET_CODE (next) == INSN
3759 && (GET_CODE (PATTERN (next)) == USE
3760 || GET_CODE (PATTERN (next)) == CLOBBER))
3761 continue;
3762 else if (GET_CODE (next) != NOTE)
3763 break;
3764 else if (! cross_jump
3765 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3766 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3767 /* ??? Optional. Disables some optimizations, but
3768 makes gcov output more accurate with -O. */
3769 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3770 break;
3771 }
3772
3773 XEXP (x, 0) = label;
3774 if (! insn || ! INSN_DELETED_P (insn))
3775 ++LABEL_NUSES (label);
3776
3777 if (insn)
3778 {
3779 if (GET_CODE (insn) == JUMP_INSN)
3780 JUMP_LABEL (insn) = label;
3781
3782 /* If we've changed OLABEL and we had a REG_LABEL note
3783 for it, update it as well. */
3784 else if (label != olabel
3785 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3786 XEXP (note, 0) = label;
3787
3788 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3789 is one. */
3790 else if (! find_reg_note (insn, REG_LABEL, label))
3791 {
3792 /* This code used to ignore labels which refered to dispatch
3793 tables to avoid flow.c generating worse code.
3794
3795 However, in the presense of global optimizations like
3796 gcse which call find_basic_blocks without calling
3797 life_analysis, not recording such labels will lead
3798 to compiler aborts because of inconsistencies in the
3799 flow graph. So we go ahead and record the label.
3800
3801 It may also be the case that the optimization argument
3802 is no longer valid because of the more accurate cfg
3803 we build in find_basic_blocks -- it no longer pessimizes
3804 code when it finds a REG_LABEL note. */
3805 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3806 REG_NOTES (insn));
3807 }
3808 }
3809 return;
3810 }
3811
3812 /* Do walk the labels in a vector, but not the first operand of an
3813 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3814 case ADDR_VEC:
3815 case ADDR_DIFF_VEC:
3816 if (! INSN_DELETED_P (insn))
3817 {
3818 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3819
3820 for (i = 0; i < XVECLEN (x, eltnum); i++)
3821 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3822 }
3823 return;
3824
3825 default:
3826 break;
3827 }
3828
3829 fmt = GET_RTX_FORMAT (code);
3830 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3831 {
3832 if (fmt[i] == 'e')
3833 mark_jump_label (XEXP (x, i), insn, cross_jump);
3834 else if (fmt[i] == 'E')
3835 {
3836 register int j;
3837 for (j = 0; j < XVECLEN (x, i); j++)
3838 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3839 }
3840 }
3841 }
3842
3843 /* If all INSN does is set the pc, delete it,
3844 and delete the insn that set the condition codes for it
3845 if that's what the previous thing was. */
3846
3847 void
3848 delete_jump (insn)
3849 rtx insn;
3850 {
3851 register rtx set = single_set (insn);
3852
3853 if (set && GET_CODE (SET_DEST (set)) == PC)
3854 delete_computation (insn);
3855 }
3856
3857 /* Recursively delete prior insns that compute the value (used only by INSN
3858 which the caller is deleting) stored in the register mentioned by NOTE
3859 which is a REG_DEAD note associated with INSN. */
3860
3861 static void
3862 delete_prior_computation (note, insn)
3863 rtx note;
3864 rtx insn;
3865 {
3866 rtx our_prev;
3867 rtx reg = XEXP (note, 0);
3868
3869 for (our_prev = prev_nonnote_insn (insn);
3870 our_prev && GET_CODE (our_prev) == INSN;
3871 our_prev = prev_nonnote_insn (our_prev))
3872 {
3873 rtx pat = PATTERN (our_prev);
3874
3875 /* If we reach a SEQUENCE, it is too complex to try to
3876 do anything with it, so give up. */
3877 if (GET_CODE (pat) == SEQUENCE)
3878 break;
3879
3880 if (GET_CODE (pat) == USE
3881 && GET_CODE (XEXP (pat, 0)) == INSN)
3882 /* reorg creates USEs that look like this. We leave them
3883 alone because reorg needs them for its own purposes. */
3884 break;
3885
3886 if (reg_set_p (reg, pat))
3887 {
3888 if (side_effects_p (pat))
3889 break;
3890
3891 if (GET_CODE (pat) == PARALLEL)
3892 {
3893 /* If we find a SET of something else, we can't
3894 delete the insn. */
3895
3896 int i;
3897
3898 for (i = 0; i < XVECLEN (pat, 0); i++)
3899 {
3900 rtx part = XVECEXP (pat, 0, i);
3901
3902 if (GET_CODE (part) == SET
3903 && SET_DEST (part) != reg)
3904 break;
3905 }
3906
3907 if (i == XVECLEN (pat, 0))
3908 delete_computation (our_prev);
3909 }
3910 else if (GET_CODE (pat) == SET
3911 && GET_CODE (SET_DEST (pat)) == REG)
3912 {
3913 int dest_regno = REGNO (SET_DEST (pat));
3914 int dest_endregno
3915 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
3916 ? HARD_REGNO_NREGS (dest_regno,
3917 GET_MODE (SET_DEST (pat))) : 1);
3918 int regno = REGNO (reg);
3919 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
3920 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
3921
3922 if (dest_regno >= regno
3923 && dest_endregno <= endregno)
3924 delete_computation (our_prev);
3925
3926 /* We may have a multi-word hard register and some, but not
3927 all, of the words of the register are needed in subsequent
3928 insns. Write REG_UNUSED notes for those parts that were not
3929 needed. */
3930 else if (dest_regno <= regno
3931 && dest_endregno >= endregno
3932 && ! find_regno_note (our_prev, REG_UNUSED, REGNO(reg)))
3933 {
3934 int i;
3935
3936 REG_NOTES (our_prev)
3937 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
3938
3939 for (i = dest_regno; i < dest_endregno; i++)
3940 if (! find_regno_note (our_prev, REG_UNUSED, i))
3941 break;
3942
3943 if (i == dest_endregno)
3944 delete_computation (our_prev);
3945 }
3946 }
3947
3948 break;
3949 }
3950
3951 /* If PAT references the register that dies here, it is an
3952 additional use. Hence any prior SET isn't dead. However, this
3953 insn becomes the new place for the REG_DEAD note. */
3954 if (reg_overlap_mentioned_p (reg, pat))
3955 {
3956 XEXP (note, 1) = REG_NOTES (our_prev);
3957 REG_NOTES (our_prev) = note;
3958 break;
3959 }
3960 }
3961 }
3962
3963 /* Delete INSN and recursively delete insns that compute values used only
3964 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3965 If we are running before flow.c, we need do nothing since flow.c will
3966 delete dead code. We also can't know if the registers being used are
3967 dead or not at this point.
3968
3969 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3970 nothing other than set a register that dies in this insn, we can delete
3971 that insn as well.
3972
3973 On machines with CC0, if CC0 is used in this insn, we may be able to
3974 delete the insn that set it. */
3975
3976 static void
3977 delete_computation (insn)
3978 rtx insn;
3979 {
3980 rtx note, next;
3981 rtx set;
3982
3983 #ifdef HAVE_cc0
3984 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3985 {
3986 rtx prev = prev_nonnote_insn (insn);
3987 /* We assume that at this stage
3988 CC's are always set explicitly
3989 and always immediately before the jump that
3990 will use them. So if the previous insn
3991 exists to set the CC's, delete it
3992 (unless it performs auto-increments, etc.). */
3993 if (prev && GET_CODE (prev) == INSN
3994 && sets_cc0_p (PATTERN (prev)))
3995 {
3996 if (sets_cc0_p (PATTERN (prev)) > 0
3997 && ! side_effects_p (PATTERN (prev)))
3998 delete_computation (prev);
3999 else
4000 /* Otherwise, show that cc0 won't be used. */
4001 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
4002 cc0_rtx, REG_NOTES (prev));
4003 }
4004 }
4005 #endif
4006
4007 #ifdef INSN_SCHEDULING
4008 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4009 reload has completed. The schedulers need to be fixed. Until
4010 they are, we must not rely on the death notes here. */
4011 if (reload_completed && flag_schedule_insns_after_reload)
4012 {
4013 delete_insn (insn);
4014 return;
4015 }
4016 #endif
4017
4018 set = single_set (insn);
4019
4020 for (note = REG_NOTES (insn); note; note = next)
4021 {
4022 next = XEXP (note, 1);
4023
4024 if (REG_NOTE_KIND (note) != REG_DEAD
4025 /* Verify that the REG_NOTE is legitimate. */
4026 || GET_CODE (XEXP (note, 0)) != REG)
4027 continue;
4028
4029 if (set && reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
4030 set = NULL_RTX;
4031
4032 delete_prior_computation (note, insn);
4033 }
4034
4035 /* The REG_DEAD note may have been omitted for a register
4036 which is both set and used by the insn. */
4037 if (set
4038 && GET_CODE (SET_DEST (set)) == REG
4039 && reg_mentioned_p (SET_DEST (set), SET_SRC (set)))
4040 {
4041 note = gen_rtx_EXPR_LIST (REG_DEAD, SET_DEST (set), NULL_RTX);
4042 delete_prior_computation (note, insn);
4043 }
4044
4045 delete_insn (insn);
4046 }
4047 \f
4048 /* Delete insn INSN from the chain of insns and update label ref counts.
4049 May delete some following insns as a consequence; may even delete
4050 a label elsewhere and insns that follow it.
4051
4052 Returns the first insn after INSN that was not deleted. */
4053
4054 rtx
4055 delete_insn (insn)
4056 register rtx insn;
4057 {
4058 register rtx next = NEXT_INSN (insn);
4059 register rtx prev = PREV_INSN (insn);
4060 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4061 register int dont_really_delete = 0;
4062
4063 while (next && INSN_DELETED_P (next))
4064 next = NEXT_INSN (next);
4065
4066 /* This insn is already deleted => return first following nondeleted. */
4067 if (INSN_DELETED_P (insn))
4068 return next;
4069
4070 if (was_code_label)
4071 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4072
4073 /* Don't delete user-declared labels. Convert them to special NOTEs
4074 instead. */
4075 if (was_code_label && LABEL_NAME (insn) != 0
4076 && optimize && ! dont_really_delete)
4077 {
4078 PUT_CODE (insn, NOTE);
4079 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4080 NOTE_SOURCE_FILE (insn) = 0;
4081 dont_really_delete = 1;
4082 }
4083 else
4084 /* Mark this insn as deleted. */
4085 INSN_DELETED_P (insn) = 1;
4086
4087 /* If this is an unconditional jump, delete it from the jump chain. */
4088 if (simplejump_p (insn))
4089 delete_from_jump_chain (insn);
4090
4091 /* If instruction is followed by a barrier,
4092 delete the barrier too. */
4093
4094 if (next != 0 && GET_CODE (next) == BARRIER)
4095 {
4096 INSN_DELETED_P (next) = 1;
4097 next = NEXT_INSN (next);
4098 }
4099
4100 /* Patch out INSN (and the barrier if any) */
4101
4102 if (optimize && ! dont_really_delete)
4103 {
4104 if (prev)
4105 {
4106 NEXT_INSN (prev) = next;
4107 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4108 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4109 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4110 }
4111
4112 if (next)
4113 {
4114 PREV_INSN (next) = prev;
4115 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4116 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4117 }
4118
4119 if (prev && NEXT_INSN (prev) == 0)
4120 set_last_insn (prev);
4121 }
4122
4123 /* If deleting a jump, decrement the count of the label,
4124 and delete the label if it is now unused. */
4125
4126 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4127 {
4128 rtx lab = JUMP_LABEL (insn), lab_next;
4129
4130 if (--LABEL_NUSES (lab) == 0)
4131 {
4132 /* This can delete NEXT or PREV,
4133 either directly if NEXT is JUMP_LABEL (INSN),
4134 or indirectly through more levels of jumps. */
4135 delete_insn (lab);
4136
4137 /* I feel a little doubtful about this loop,
4138 but I see no clean and sure alternative way
4139 to find the first insn after INSN that is not now deleted.
4140 I hope this works. */
4141 while (next && INSN_DELETED_P (next))
4142 next = NEXT_INSN (next);
4143 return next;
4144 }
4145 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4146 && GET_CODE (lab_next) == JUMP_INSN
4147 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4148 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4149 {
4150 /* If we're deleting the tablejump, delete the dispatch table.
4151 We may not be able to kill the label immediately preceeding
4152 just yet, as it might be referenced in code leading up to
4153 the tablejump. */
4154 delete_insn (lab_next);
4155 }
4156 }
4157
4158 /* Likewise if we're deleting a dispatch table. */
4159
4160 if (GET_CODE (insn) == JUMP_INSN
4161 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4162 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4163 {
4164 rtx pat = PATTERN (insn);
4165 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4166 int len = XVECLEN (pat, diff_vec_p);
4167
4168 for (i = 0; i < len; i++)
4169 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4170 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4171 while (next && INSN_DELETED_P (next))
4172 next = NEXT_INSN (next);
4173 return next;
4174 }
4175
4176 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4177 prev = PREV_INSN (prev);
4178
4179 /* If INSN was a label and a dispatch table follows it,
4180 delete the dispatch table. The tablejump must have gone already.
4181 It isn't useful to fall through into a table. */
4182
4183 if (was_code_label
4184 && NEXT_INSN (insn) != 0
4185 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4186 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4187 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4188 next = delete_insn (NEXT_INSN (insn));
4189
4190 /* If INSN was a label, delete insns following it if now unreachable. */
4191
4192 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4193 {
4194 register RTX_CODE code;
4195 while (next != 0
4196 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4197 || code == NOTE || code == BARRIER
4198 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4199 {
4200 if (code == NOTE
4201 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4202 next = NEXT_INSN (next);
4203 /* Keep going past other deleted labels to delete what follows. */
4204 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4205 next = NEXT_INSN (next);
4206 else
4207 /* Note: if this deletes a jump, it can cause more
4208 deletion of unreachable code, after a different label.
4209 As long as the value from this recursive call is correct,
4210 this invocation functions correctly. */
4211 next = delete_insn (next);
4212 }
4213 }
4214
4215 return next;
4216 }
4217
4218 /* Advance from INSN till reaching something not deleted
4219 then return that. May return INSN itself. */
4220
4221 rtx
4222 next_nondeleted_insn (insn)
4223 rtx insn;
4224 {
4225 while (INSN_DELETED_P (insn))
4226 insn = NEXT_INSN (insn);
4227 return insn;
4228 }
4229 \f
4230 /* Delete a range of insns from FROM to TO, inclusive.
4231 This is for the sake of peephole optimization, so assume
4232 that whatever these insns do will still be done by a new
4233 peephole insn that will replace them. */
4234
4235 void
4236 delete_for_peephole (from, to)
4237 register rtx from, to;
4238 {
4239 register rtx insn = from;
4240
4241 while (1)
4242 {
4243 register rtx next = NEXT_INSN (insn);
4244 register rtx prev = PREV_INSN (insn);
4245
4246 if (GET_CODE (insn) != NOTE)
4247 {
4248 INSN_DELETED_P (insn) = 1;
4249
4250 /* Patch this insn out of the chain. */
4251 /* We don't do this all at once, because we
4252 must preserve all NOTEs. */
4253 if (prev)
4254 NEXT_INSN (prev) = next;
4255
4256 if (next)
4257 PREV_INSN (next) = prev;
4258 }
4259
4260 if (insn == to)
4261 break;
4262 insn = next;
4263 }
4264
4265 /* Note that if TO is an unconditional jump
4266 we *do not* delete the BARRIER that follows,
4267 since the peephole that replaces this sequence
4268 is also an unconditional jump in that case. */
4269 }
4270 \f
4271 /* We have determined that INSN is never reached, and are about to
4272 delete it. Print a warning if the user asked for one.
4273
4274 To try to make this warning more useful, this should only be called
4275 once per basic block not reached, and it only warns when the basic
4276 block contains more than one line from the current function, and
4277 contains at least one operation. CSE and inlining can duplicate insns,
4278 so it's possible to get spurious warnings from this. */
4279
4280 void
4281 never_reached_warning (avoided_insn)
4282 rtx avoided_insn;
4283 {
4284 rtx insn;
4285 rtx a_line_note = NULL;
4286 int two_avoided_lines = 0;
4287 int contains_insn = 0;
4288
4289 if (! warn_notreached)
4290 return;
4291
4292 /* Scan forwards, looking at LINE_NUMBER notes, until
4293 we hit a LABEL or we run out of insns. */
4294
4295 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
4296 {
4297 if (GET_CODE (insn) == CODE_LABEL)
4298 break;
4299 else if (GET_CODE (insn) == NOTE /* A line number note? */
4300 && NOTE_LINE_NUMBER (insn) >= 0)
4301 {
4302 if (a_line_note == NULL)
4303 a_line_note = insn;
4304 else
4305 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
4306 != NOTE_LINE_NUMBER (insn));
4307 }
4308 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4309 contains_insn = 1;
4310 }
4311 if (two_avoided_lines && contains_insn)
4312 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
4313 NOTE_LINE_NUMBER (a_line_note),
4314 "will never be executed");
4315 }
4316 \f
4317 /* Invert the condition of the jump JUMP, and make it jump
4318 to label NLABEL instead of where it jumps now. */
4319
4320 int
4321 invert_jump (jump, nlabel)
4322 rtx jump, nlabel;
4323 {
4324 /* We have to either invert the condition and change the label or
4325 do neither. Either operation could fail. We first try to invert
4326 the jump. If that succeeds, we try changing the label. If that fails,
4327 we invert the jump back to what it was. */
4328
4329 if (! invert_exp (PATTERN (jump), jump))
4330 return 0;
4331
4332 if (redirect_jump (jump, nlabel))
4333 {
4334 if (flag_branch_probabilities)
4335 {
4336 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4337
4338 /* An inverted jump means that a probability taken becomes a
4339 probability not taken. Subtract the branch probability from the
4340 probability base to convert it back to a taken probability.
4341 (We don't flip the probability on a branch that's never taken. */
4342 if (note && XINT (XEXP (note, 0), 0) >= 0)
4343 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4344 }
4345
4346 return 1;
4347 }
4348
4349 if (! invert_exp (PATTERN (jump), jump))
4350 /* This should just be putting it back the way it was. */
4351 abort ();
4352
4353 return 0;
4354 }
4355
4356 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4357
4358 Return 1 if we can do so, 0 if we cannot find a way to do so that
4359 matches a pattern. */
4360
4361 int
4362 invert_exp (x, insn)
4363 rtx x;
4364 rtx insn;
4365 {
4366 register RTX_CODE code;
4367 register int i;
4368 register const char *fmt;
4369
4370 code = GET_CODE (x);
4371
4372 if (code == IF_THEN_ELSE)
4373 {
4374 register rtx comp = XEXP (x, 0);
4375 register rtx tem;
4376
4377 /* We can do this in two ways: The preferable way, which can only
4378 be done if this is not an integer comparison, is to reverse
4379 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4380 of the IF_THEN_ELSE. If we can't do either, fail. */
4381
4382 if (can_reverse_comparison_p (comp, insn)
4383 && validate_change (insn, &XEXP (x, 0),
4384 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4385 GET_MODE (comp), XEXP (comp, 0),
4386 XEXP (comp, 1)), 0))
4387 return 1;
4388
4389 tem = XEXP (x, 1);
4390 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4391 validate_change (insn, &XEXP (x, 2), tem, 1);
4392 return apply_change_group ();
4393 }
4394
4395 fmt = GET_RTX_FORMAT (code);
4396 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4397 {
4398 if (fmt[i] == 'e')
4399 if (! invert_exp (XEXP (x, i), insn))
4400 return 0;
4401 if (fmt[i] == 'E')
4402 {
4403 register int j;
4404 for (j = 0; j < XVECLEN (x, i); j++)
4405 if (!invert_exp (XVECEXP (x, i, j), insn))
4406 return 0;
4407 }
4408 }
4409
4410 return 1;
4411 }
4412 \f
4413 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4414 If the old jump target label is unused as a result,
4415 it and the code following it may be deleted.
4416
4417 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4418 RETURN insn.
4419
4420 The return value will be 1 if the change was made, 0 if it wasn't (this
4421 can only occur for NLABEL == 0). */
4422
4423 int
4424 redirect_jump (jump, nlabel)
4425 rtx jump, nlabel;
4426 {
4427 register rtx olabel = JUMP_LABEL (jump);
4428
4429 if (nlabel == olabel)
4430 return 1;
4431
4432 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4433 return 0;
4434
4435 /* If this is an unconditional branch, delete it from the jump_chain of
4436 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4437 have UID's in range and JUMP_CHAIN is valid). */
4438 if (jump_chain && (simplejump_p (jump)
4439 || GET_CODE (PATTERN (jump)) == RETURN))
4440 {
4441 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4442
4443 delete_from_jump_chain (jump);
4444 if (label_index < max_jump_chain
4445 && INSN_UID (jump) < max_jump_chain)
4446 {
4447 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4448 jump_chain[label_index] = jump;
4449 }
4450 }
4451
4452 JUMP_LABEL (jump) = nlabel;
4453 if (nlabel)
4454 ++LABEL_NUSES (nlabel);
4455
4456 if (olabel && --LABEL_NUSES (olabel) == 0)
4457 delete_insn (olabel);
4458
4459 return 1;
4460 }
4461
4462 /* Delete the instruction JUMP from any jump chain it might be on. */
4463
4464 static void
4465 delete_from_jump_chain (jump)
4466 rtx jump;
4467 {
4468 int index;
4469 rtx olabel = JUMP_LABEL (jump);
4470
4471 /* Handle unconditional jumps. */
4472 if (jump_chain && olabel != 0
4473 && INSN_UID (olabel) < max_jump_chain
4474 && simplejump_p (jump))
4475 index = INSN_UID (olabel);
4476 /* Handle return insns. */
4477 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4478 index = 0;
4479 else return;
4480
4481 if (jump_chain[index] == jump)
4482 jump_chain[index] = jump_chain[INSN_UID (jump)];
4483 else
4484 {
4485 rtx insn;
4486
4487 for (insn = jump_chain[index];
4488 insn != 0;
4489 insn = jump_chain[INSN_UID (insn)])
4490 if (jump_chain[INSN_UID (insn)] == jump)
4491 {
4492 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4493 break;
4494 }
4495 }
4496 }
4497
4498 /* If NLABEL is nonzero, throughout the rtx at LOC,
4499 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4500 zero, alter (RETURN) to (LABEL_REF NLABEL).
4501
4502 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4503 validity with validate_change. Convert (set (pc) (label_ref olabel))
4504 to (return).
4505
4506 Return 0 if we found a change we would like to make but it is invalid.
4507 Otherwise, return 1. */
4508
4509 int
4510 redirect_exp (loc, olabel, nlabel, insn)
4511 rtx *loc;
4512 rtx olabel, nlabel;
4513 rtx insn;
4514 {
4515 register rtx x = *loc;
4516 register RTX_CODE code = GET_CODE (x);
4517 register int i;
4518 register const char *fmt;
4519
4520 if (code == LABEL_REF)
4521 {
4522 if (XEXP (x, 0) == olabel)
4523 {
4524 if (nlabel)
4525 XEXP (x, 0) = nlabel;
4526 else
4527 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4528 return 1;
4529 }
4530 }
4531 else if (code == RETURN && olabel == 0)
4532 {
4533 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4534 if (loc == &PATTERN (insn))
4535 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4536 return validate_change (insn, loc, x, 0);
4537 }
4538
4539 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4540 && GET_CODE (SET_SRC (x)) == LABEL_REF
4541 && XEXP (SET_SRC (x), 0) == olabel)
4542 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4543
4544 fmt = GET_RTX_FORMAT (code);
4545 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4546 {
4547 if (fmt[i] == 'e')
4548 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4549 return 0;
4550 if (fmt[i] == 'E')
4551 {
4552 register int j;
4553 for (j = 0; j < XVECLEN (x, i); j++)
4554 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4555 return 0;
4556 }
4557 }
4558
4559 return 1;
4560 }
4561 \f
4562 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4563
4564 If the old jump target label (before the dispatch table) becomes unused,
4565 it and the dispatch table may be deleted. In that case, find the insn
4566 before the jump references that label and delete it and logical successors
4567 too. */
4568
4569 static void
4570 redirect_tablejump (jump, nlabel)
4571 rtx jump, nlabel;
4572 {
4573 register rtx olabel = JUMP_LABEL (jump);
4574
4575 /* Add this jump to the jump_chain of NLABEL. */
4576 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4577 && INSN_UID (jump) < max_jump_chain)
4578 {
4579 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4580 jump_chain[INSN_UID (nlabel)] = jump;
4581 }
4582
4583 PATTERN (jump) = gen_jump (nlabel);
4584 JUMP_LABEL (jump) = nlabel;
4585 ++LABEL_NUSES (nlabel);
4586 INSN_CODE (jump) = -1;
4587
4588 if (--LABEL_NUSES (olabel) == 0)
4589 {
4590 delete_labelref_insn (jump, olabel, 0);
4591 delete_insn (olabel);
4592 }
4593 }
4594
4595 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4596 If we found one, delete it and then delete this insn if DELETE_THIS is
4597 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4598
4599 static int
4600 delete_labelref_insn (insn, label, delete_this)
4601 rtx insn, label;
4602 int delete_this;
4603 {
4604 int deleted = 0;
4605 rtx link;
4606
4607 if (GET_CODE (insn) != NOTE
4608 && reg_mentioned_p (label, PATTERN (insn)))
4609 {
4610 if (delete_this)
4611 {
4612 delete_insn (insn);
4613 deleted = 1;
4614 }
4615 else
4616 return 1;
4617 }
4618
4619 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4620 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4621 {
4622 if (delete_this)
4623 {
4624 delete_insn (insn);
4625 deleted = 1;
4626 }
4627 else
4628 return 1;
4629 }
4630
4631 return deleted;
4632 }
4633 \f
4634 /* Like rtx_equal_p except that it considers two REGs as equal
4635 if they renumber to the same value and considers two commutative
4636 operations to be the same if the order of the operands has been
4637 reversed.
4638
4639 ??? Addition is not commutative on the PA due to the weird implicit
4640 space register selection rules for memory addresses. Therefore, we
4641 don't consider a + b == b + a.
4642
4643 We could/should make this test a little tighter. Possibly only
4644 disabling it on the PA via some backend macro or only disabling this
4645 case when the PLUS is inside a MEM. */
4646
4647 int
4648 rtx_renumbered_equal_p (x, y)
4649 rtx x, y;
4650 {
4651 register int i;
4652 register RTX_CODE code = GET_CODE (x);
4653 register const char *fmt;
4654
4655 if (x == y)
4656 return 1;
4657
4658 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4659 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4660 && GET_CODE (SUBREG_REG (y)) == REG)))
4661 {
4662 int reg_x = -1, reg_y = -1;
4663 int word_x = 0, word_y = 0;
4664
4665 if (GET_MODE (x) != GET_MODE (y))
4666 return 0;
4667
4668 /* If we haven't done any renumbering, don't
4669 make any assumptions. */
4670 if (reg_renumber == 0)
4671 return rtx_equal_p (x, y);
4672
4673 if (code == SUBREG)
4674 {
4675 reg_x = REGNO (SUBREG_REG (x));
4676 word_x = SUBREG_WORD (x);
4677
4678 if (reg_renumber[reg_x] >= 0)
4679 {
4680 reg_x = reg_renumber[reg_x] + word_x;
4681 word_x = 0;
4682 }
4683 }
4684
4685 else
4686 {
4687 reg_x = REGNO (x);
4688 if (reg_renumber[reg_x] >= 0)
4689 reg_x = reg_renumber[reg_x];
4690 }
4691
4692 if (GET_CODE (y) == SUBREG)
4693 {
4694 reg_y = REGNO (SUBREG_REG (y));
4695 word_y = SUBREG_WORD (y);
4696
4697 if (reg_renumber[reg_y] >= 0)
4698 {
4699 reg_y = reg_renumber[reg_y];
4700 word_y = 0;
4701 }
4702 }
4703
4704 else
4705 {
4706 reg_y = REGNO (y);
4707 if (reg_renumber[reg_y] >= 0)
4708 reg_y = reg_renumber[reg_y];
4709 }
4710
4711 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4712 }
4713
4714 /* Now we have disposed of all the cases
4715 in which different rtx codes can match. */
4716 if (code != GET_CODE (y))
4717 return 0;
4718
4719 switch (code)
4720 {
4721 case PC:
4722 case CC0:
4723 case ADDR_VEC:
4724 case ADDR_DIFF_VEC:
4725 return 0;
4726
4727 case CONST_INT:
4728 return INTVAL (x) == INTVAL (y);
4729
4730 case LABEL_REF:
4731 /* We can't assume nonlocal labels have their following insns yet. */
4732 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4733 return XEXP (x, 0) == XEXP (y, 0);
4734
4735 /* Two label-refs are equivalent if they point at labels
4736 in the same position in the instruction stream. */
4737 return (next_real_insn (XEXP (x, 0))
4738 == next_real_insn (XEXP (y, 0)));
4739
4740 case SYMBOL_REF:
4741 return XSTR (x, 0) == XSTR (y, 0);
4742
4743 case CODE_LABEL:
4744 /* If we didn't match EQ equality above, they aren't the same. */
4745 return 0;
4746
4747 default:
4748 break;
4749 }
4750
4751 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4752
4753 if (GET_MODE (x) != GET_MODE (y))
4754 return 0;
4755
4756 /* For commutative operations, the RTX match if the operand match in any
4757 order. Also handle the simple binary and unary cases without a loop.
4758
4759 ??? Don't consider PLUS a commutative operator; see comments above. */
4760 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4761 && code != PLUS)
4762 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4763 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4764 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4765 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4766 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4767 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4768 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4769 else if (GET_RTX_CLASS (code) == '1')
4770 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4771
4772 /* Compare the elements. If any pair of corresponding elements
4773 fail to match, return 0 for the whole things. */
4774
4775 fmt = GET_RTX_FORMAT (code);
4776 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4777 {
4778 register int j;
4779 switch (fmt[i])
4780 {
4781 case 'w':
4782 if (XWINT (x, i) != XWINT (y, i))
4783 return 0;
4784 break;
4785
4786 case 'i':
4787 if (XINT (x, i) != XINT (y, i))
4788 return 0;
4789 break;
4790
4791 case 's':
4792 if (strcmp (XSTR (x, i), XSTR (y, i)))
4793 return 0;
4794 break;
4795
4796 case 'e':
4797 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4798 return 0;
4799 break;
4800
4801 case 'u':
4802 if (XEXP (x, i) != XEXP (y, i))
4803 return 0;
4804 /* fall through. */
4805 case '0':
4806 break;
4807
4808 case 'E':
4809 if (XVECLEN (x, i) != XVECLEN (y, i))
4810 return 0;
4811 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4812 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4813 return 0;
4814 break;
4815
4816 default:
4817 abort ();
4818 }
4819 }
4820 return 1;
4821 }
4822 \f
4823 /* If X is a hard register or equivalent to one or a subregister of one,
4824 return the hard register number. If X is a pseudo register that was not
4825 assigned a hard register, return the pseudo register number. Otherwise,
4826 return -1. Any rtx is valid for X. */
4827
4828 int
4829 true_regnum (x)
4830 rtx x;
4831 {
4832 if (GET_CODE (x) == REG)
4833 {
4834 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4835 return reg_renumber[REGNO (x)];
4836 return REGNO (x);
4837 }
4838 if (GET_CODE (x) == SUBREG)
4839 {
4840 int base = true_regnum (SUBREG_REG (x));
4841 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4842 return SUBREG_WORD (x) + base;
4843 }
4844 return -1;
4845 }
4846 \f
4847 /* Optimize code of the form:
4848
4849 for (x = a[i]; x; ...)
4850 ...
4851 for (x = a[i]; x; ...)
4852 ...
4853 foo:
4854
4855 Loop optimize will change the above code into
4856
4857 if (x = a[i])
4858 for (;;)
4859 { ...; if (! (x = ...)) break; }
4860 if (x = a[i])
4861 for (;;)
4862 { ...; if (! (x = ...)) break; }
4863 foo:
4864
4865 In general, if the first test fails, the program can branch
4866 directly to `foo' and skip the second try which is doomed to fail.
4867 We run this after loop optimization and before flow analysis. */
4868
4869 /* When comparing the insn patterns, we track the fact that different
4870 pseudo-register numbers may have been used in each computation.
4871 The following array stores an equivalence -- same_regs[I] == J means
4872 that pseudo register I was used in the first set of tests in a context
4873 where J was used in the second set. We also count the number of such
4874 pending equivalences. If nonzero, the expressions really aren't the
4875 same. */
4876
4877 static int *same_regs;
4878
4879 static int num_same_regs;
4880
4881 /* Track any registers modified between the target of the first jump and
4882 the second jump. They never compare equal. */
4883
4884 static char *modified_regs;
4885
4886 /* Record if memory was modified. */
4887
4888 static int modified_mem;
4889
4890 /* Called via note_stores on each insn between the target of the first
4891 branch and the second branch. It marks any changed registers. */
4892
4893 static void
4894 mark_modified_reg (dest, x)
4895 rtx dest;
4896 rtx x ATTRIBUTE_UNUSED;
4897 {
4898 int regno, i;
4899
4900 if (GET_CODE (dest) == SUBREG)
4901 dest = SUBREG_REG (dest);
4902
4903 if (GET_CODE (dest) == MEM)
4904 modified_mem = 1;
4905
4906 if (GET_CODE (dest) != REG)
4907 return;
4908
4909 regno = REGNO (dest);
4910 if (regno >= FIRST_PSEUDO_REGISTER)
4911 modified_regs[regno] = 1;
4912 else
4913 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4914 modified_regs[regno + i] = 1;
4915 }
4916
4917 /* F is the first insn in the chain of insns. */
4918
4919 void
4920 thread_jumps (f, max_reg, flag_before_loop)
4921 rtx f;
4922 int max_reg;
4923 int flag_before_loop;
4924 {
4925 /* Basic algorithm is to find a conditional branch,
4926 the label it may branch to, and the branch after
4927 that label. If the two branches test the same condition,
4928 walk back from both branch paths until the insn patterns
4929 differ, or code labels are hit. If we make it back to
4930 the target of the first branch, then we know that the first branch
4931 will either always succeed or always fail depending on the relative
4932 senses of the two branches. So adjust the first branch accordingly
4933 in this case. */
4934
4935 rtx label, b1, b2, t1, t2;
4936 enum rtx_code code1, code2;
4937 rtx b1op0, b1op1, b2op0, b2op1;
4938 int changed = 1;
4939 int i;
4940 int *all_reset;
4941
4942 /* Allocate register tables and quick-reset table. */
4943 modified_regs = (char *) alloca (max_reg * sizeof (char));
4944 same_regs = (int *) alloca (max_reg * sizeof (int));
4945 all_reset = (int *) alloca (max_reg * sizeof (int));
4946 for (i = 0; i < max_reg; i++)
4947 all_reset[i] = -1;
4948
4949 while (changed)
4950 {
4951 changed = 0;
4952
4953 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4954 {
4955 /* Get to a candidate branch insn. */
4956 if (GET_CODE (b1) != JUMP_INSN
4957 || ! condjump_p (b1) || simplejump_p (b1)
4958 || JUMP_LABEL (b1) == 0)
4959 continue;
4960
4961 bzero (modified_regs, max_reg * sizeof (char));
4962 modified_mem = 0;
4963
4964 bcopy ((char *) all_reset, (char *) same_regs,
4965 max_reg * sizeof (int));
4966 num_same_regs = 0;
4967
4968 label = JUMP_LABEL (b1);
4969
4970 /* Look for a branch after the target. Record any registers and
4971 memory modified between the target and the branch. Stop when we
4972 get to a label since we can't know what was changed there. */
4973 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4974 {
4975 if (GET_CODE (b2) == CODE_LABEL)
4976 break;
4977
4978 else if (GET_CODE (b2) == JUMP_INSN)
4979 {
4980 /* If this is an unconditional jump and is the only use of
4981 its target label, we can follow it. */
4982 if (simplejump_p (b2)
4983 && JUMP_LABEL (b2) != 0
4984 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4985 {
4986 b2 = JUMP_LABEL (b2);
4987 continue;
4988 }
4989 else
4990 break;
4991 }
4992
4993 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4994 continue;
4995
4996 if (GET_CODE (b2) == CALL_INSN)
4997 {
4998 modified_mem = 1;
4999 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5000 if (call_used_regs[i] && ! fixed_regs[i]
5001 && i != STACK_POINTER_REGNUM
5002 && i != FRAME_POINTER_REGNUM
5003 && i != HARD_FRAME_POINTER_REGNUM
5004 && i != ARG_POINTER_REGNUM)
5005 modified_regs[i] = 1;
5006 }
5007
5008 note_stores (PATTERN (b2), mark_modified_reg);
5009 }
5010
5011 /* Check the next candidate branch insn from the label
5012 of the first. */
5013 if (b2 == 0
5014 || GET_CODE (b2) != JUMP_INSN
5015 || b2 == b1
5016 || ! condjump_p (b2)
5017 || simplejump_p (b2))
5018 continue;
5019
5020 /* Get the comparison codes and operands, reversing the
5021 codes if appropriate. If we don't have comparison codes,
5022 we can't do anything. */
5023 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
5024 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
5025 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
5026 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
5027 code1 = reverse_condition (code1);
5028
5029 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
5030 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
5031 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
5032 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
5033 code2 = reverse_condition (code2);
5034
5035 /* If they test the same things and knowing that B1 branches
5036 tells us whether or not B2 branches, check if we
5037 can thread the branch. */
5038 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
5039 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
5040 && (comparison_dominates_p (code1, code2)
5041 || (comparison_dominates_p (code1, reverse_condition (code2))
5042 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
5043 0),
5044 b1))))
5045 {
5046 t1 = prev_nonnote_insn (b1);
5047 t2 = prev_nonnote_insn (b2);
5048
5049 while (t1 != 0 && t2 != 0)
5050 {
5051 if (t2 == label)
5052 {
5053 /* We have reached the target of the first branch.
5054 If there are no pending register equivalents,
5055 we know that this branch will either always
5056 succeed (if the senses of the two branches are
5057 the same) or always fail (if not). */
5058 rtx new_label;
5059
5060 if (num_same_regs != 0)
5061 break;
5062
5063 if (comparison_dominates_p (code1, code2))
5064 new_label = JUMP_LABEL (b2);
5065 else
5066 new_label = get_label_after (b2);
5067
5068 if (JUMP_LABEL (b1) != new_label)
5069 {
5070 rtx prev = PREV_INSN (new_label);
5071
5072 if (flag_before_loop
5073 && GET_CODE (prev) == NOTE
5074 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5075 {
5076 /* Don't thread to the loop label. If a loop
5077 label is reused, loop optimization will
5078 be disabled for that loop. */
5079 new_label = gen_label_rtx ();
5080 emit_label_after (new_label, PREV_INSN (prev));
5081 }
5082 changed |= redirect_jump (b1, new_label);
5083 }
5084 break;
5085 }
5086
5087 /* If either of these is not a normal insn (it might be
5088 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5089 have already been skipped above.) Similarly, fail
5090 if the insns are different. */
5091 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5092 || recog_memoized (t1) != recog_memoized (t2)
5093 || ! rtx_equal_for_thread_p (PATTERN (t1),
5094 PATTERN (t2), t2))
5095 break;
5096
5097 t1 = prev_nonnote_insn (t1);
5098 t2 = prev_nonnote_insn (t2);
5099 }
5100 }
5101 }
5102 }
5103 }
5104 \f
5105 /* This is like RTX_EQUAL_P except that it knows about our handling of
5106 possibly equivalent registers and knows to consider volatile and
5107 modified objects as not equal.
5108
5109 YINSN is the insn containing Y. */
5110
5111 int
5112 rtx_equal_for_thread_p (x, y, yinsn)
5113 rtx x, y;
5114 rtx yinsn;
5115 {
5116 register int i;
5117 register int j;
5118 register enum rtx_code code;
5119 register const char *fmt;
5120
5121 code = GET_CODE (x);
5122 /* Rtx's of different codes cannot be equal. */
5123 if (code != GET_CODE (y))
5124 return 0;
5125
5126 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5127 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5128
5129 if (GET_MODE (x) != GET_MODE (y))
5130 return 0;
5131
5132 /* For floating-point, consider everything unequal. This is a bit
5133 pessimistic, but this pass would only rarely do anything for FP
5134 anyway. */
5135 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5136 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5137 return 0;
5138
5139 /* For commutative operations, the RTX match if the operand match in any
5140 order. Also handle the simple binary and unary cases without a loop. */
5141 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5142 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5143 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5144 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5145 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5146 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5147 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5148 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5149 else if (GET_RTX_CLASS (code) == '1')
5150 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5151
5152 /* Handle special-cases first. */
5153 switch (code)
5154 {
5155 case REG:
5156 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5157 return 1;
5158
5159 /* If neither is user variable or hard register, check for possible
5160 equivalence. */
5161 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5162 || REGNO (x) < FIRST_PSEUDO_REGISTER
5163 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5164 return 0;
5165
5166 if (same_regs[REGNO (x)] == -1)
5167 {
5168 same_regs[REGNO (x)] = REGNO (y);
5169 num_same_regs++;
5170
5171 /* If this is the first time we are seeing a register on the `Y'
5172 side, see if it is the last use. If not, we can't thread the
5173 jump, so mark it as not equivalent. */
5174 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5175 return 0;
5176
5177 return 1;
5178 }
5179 else
5180 return (same_regs[REGNO (x)] == REGNO (y));
5181
5182 break;
5183
5184 case MEM:
5185 /* If memory modified or either volatile, not equivalent.
5186 Else, check address. */
5187 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5188 return 0;
5189
5190 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5191
5192 case ASM_INPUT:
5193 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5194 return 0;
5195
5196 break;
5197
5198 case SET:
5199 /* Cancel a pending `same_regs' if setting equivalenced registers.
5200 Then process source. */
5201 if (GET_CODE (SET_DEST (x)) == REG
5202 && GET_CODE (SET_DEST (y)) == REG)
5203 {
5204 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
5205 {
5206 same_regs[REGNO (SET_DEST (x))] = -1;
5207 num_same_regs--;
5208 }
5209 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5210 return 0;
5211 }
5212 else
5213 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5214 return 0;
5215
5216 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5217
5218 case LABEL_REF:
5219 return XEXP (x, 0) == XEXP (y, 0);
5220
5221 case SYMBOL_REF:
5222 return XSTR (x, 0) == XSTR (y, 0);
5223
5224 default:
5225 break;
5226 }
5227
5228 if (x == y)
5229 return 1;
5230
5231 fmt = GET_RTX_FORMAT (code);
5232 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5233 {
5234 switch (fmt[i])
5235 {
5236 case 'w':
5237 if (XWINT (x, i) != XWINT (y, i))
5238 return 0;
5239 break;
5240
5241 case 'n':
5242 case 'i':
5243 if (XINT (x, i) != XINT (y, i))
5244 return 0;
5245 break;
5246
5247 case 'V':
5248 case 'E':
5249 /* Two vectors must have the same length. */
5250 if (XVECLEN (x, i) != XVECLEN (y, i))
5251 return 0;
5252
5253 /* And the corresponding elements must match. */
5254 for (j = 0; j < XVECLEN (x, i); j++)
5255 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5256 XVECEXP (y, i, j), yinsn) == 0)
5257 return 0;
5258 break;
5259
5260 case 'e':
5261 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5262 return 0;
5263 break;
5264
5265 case 'S':
5266 case 's':
5267 if (strcmp (XSTR (x, i), XSTR (y, i)))
5268 return 0;
5269 break;
5270
5271 case 'u':
5272 /* These are just backpointers, so they don't matter. */
5273 break;
5274
5275 case '0':
5276 case 't':
5277 break;
5278
5279 /* It is believed that rtx's at this level will never
5280 contain anything but integers and other rtx's,
5281 except for within LABEL_REFs and SYMBOL_REFs. */
5282 default:
5283 abort ();
5284 }
5285 }
5286 return 1;
5287 }
5288 \f
5289
5290 #ifndef HAVE_cc0
5291 /* Return the insn that NEW can be safely inserted in front of starting at
5292 the jump insn INSN. Return 0 if it is not safe to do this jump
5293 optimization. Note that NEW must contain a single set. */
5294
5295 static rtx
5296 find_insert_position (insn, new)
5297 rtx insn;
5298 rtx new;
5299 {
5300 int i;
5301 rtx prev;
5302
5303 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5304 if (GET_CODE (PATTERN (new)) != PARALLEL)
5305 return insn;
5306
5307 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5308 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5309 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5310 insn))
5311 break;
5312
5313 if (i < 0)
5314 return insn;
5315
5316 /* There is a good chance that the previous insn PREV sets the thing
5317 being clobbered (often the CC in a hard reg). If PREV does not
5318 use what NEW sets, we can insert NEW before PREV. */
5319
5320 prev = prev_active_insn (insn);
5321 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5322 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5323 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5324 insn)
5325 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5326 prev))
5327 return 0;
5328
5329 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5330 }
5331 #endif /* !HAVE_cc0 */