rtl.h (rebuild_jump_labels): Declare.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "hard-reg-set.h"
59 #include "regs.h"
60 #include "insn-config.h"
61 #include "insn-flags.h"
62 #include "insn-attr.h"
63 #include "recog.h"
64 #include "expr.h"
65 #include "real.h"
66 #include "except.h"
67 #include "toplev.h"
68
69 /* ??? Eventually must record somehow the labels used by jumps
70 from nested functions. */
71 /* Pre-record the next or previous real insn for each label?
72 No, this pass is very fast anyway. */
73 /* Condense consecutive labels?
74 This would make life analysis faster, maybe. */
75 /* Optimize jump y; x: ... y: jumpif... x?
76 Don't know if it is worth bothering with. */
77 /* Optimize two cases of conditional jump to conditional jump?
78 This can never delete any instruction or make anything dead,
79 or even change what is live at any point.
80 So perhaps let combiner do it. */
81
82 /* Vector indexed by uid.
83 For each CODE_LABEL, index by its uid to get first unconditional jump
84 that jumps to the label.
85 For each JUMP_INSN, index by its uid to get the next unconditional jump
86 that jumps to the same label.
87 Element 0 is the start of a chain of all return insns.
88 (It is safe to use element 0 because insn uid 0 is not used. */
89
90 static rtx *jump_chain;
91
92 /* List of labels referred to from initializers.
93 These can never be deleted. */
94 rtx forced_labels;
95
96 /* Maximum index in jump_chain. */
97
98 static int max_jump_chain;
99
100 /* Set nonzero by jump_optimize if control can fall through
101 to the end of the function. */
102 int can_reach_end;
103
104 /* Indicates whether death notes are significant in cross jump analysis.
105 Normally they are not significant, because of A and B jump to C,
106 and R dies in A, it must die in B. But this might not be true after
107 stack register conversion, and we must compare death notes in that
108 case. */
109
110 static int cross_jump_death_matters = 0;
111
112 static int init_label_info PROTO((rtx));
113 static void delete_barrier_successors PROTO((rtx));
114 static void mark_all_labels PROTO((rtx, int));
115 static rtx delete_unreferenced_labels PROTO((rtx));
116 static void delete_noop_moves PROTO((rtx));
117 static int calculate_can_reach_end PROTO((rtx, int, int));
118 static int duplicate_loop_exit_test PROTO((rtx));
119 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
120 static void do_cross_jump PROTO((rtx, rtx, rtx));
121 static int jump_back_p PROTO((rtx, rtx));
122 static int tension_vector_labels PROTO((rtx, int));
123 static void mark_jump_label PROTO((rtx, rtx, int));
124 static void delete_computation PROTO((rtx));
125 static void delete_from_jump_chain PROTO((rtx));
126 static int delete_labelref_insn PROTO((rtx, rtx, int));
127 static void mark_modified_reg PROTO((rtx, rtx));
128 static void redirect_tablejump PROTO((rtx, rtx));
129 static void jump_optimize_1 PROTO ((rtx, int, int, int, int));
130 #ifndef HAVE_cc0
131 static rtx find_insert_position PROTO((rtx, rtx));
132 #endif
133
134 /* Main external entry point into the jump optimizer. See comments before
135 jump_optimize_1 for descriptions of the arguments. */
136 void
137 jump_optimize (f, cross_jump, noop_moves, after_regscan)
138 rtx f;
139 int cross_jump;
140 int noop_moves;
141 int after_regscan;
142 {
143 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
144 }
145
146 /* Alternate entry into the jump optimizer. This entry point only rebuilds
147 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
148 instructions. */
149 void
150 rebuild_jump_labels (f)
151 rtx f;
152 {
153 jump_optimize_1 (f, 0, 0, 0, 1);
154 }
155
156 \f
157 /* Delete no-op jumps and optimize jumps to jumps
158 and jumps around jumps.
159 Delete unused labels and unreachable code.
160
161 If CROSS_JUMP is 1, detect matching code
162 before a jump and its destination and unify them.
163 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
164
165 If NOOP_MOVES is nonzero, delete no-op move insns.
166
167 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
168 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
169
170 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
171 and JUMP_LABEL field for jumping insns.
172
173 If `optimize' is zero, don't change any code,
174 just determine whether control drops off the end of the function.
175 This case occurs when we have -W and not -O.
176 It works because `delete_insn' checks the value of `optimize'
177 and refrains from actually deleting when that is 0. */
178
179 void
180 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
181 rtx f;
182 int cross_jump;
183 int noop_moves;
184 int after_regscan;
185 int mark_labels_only;
186 {
187 register rtx insn, next;
188 int changed;
189 int old_max_reg;
190 int first = 1;
191 int max_uid = 0;
192 rtx last_insn;
193
194 cross_jump_death_matters = (cross_jump == 2);
195 max_uid = init_label_info (f) + 1;
196
197 /* If we are performing cross jump optimizations, then initialize
198 tables mapping UIDs to EH regions to avoid incorrect movement
199 of insns from one EH region to another. */
200 if (flag_exceptions && cross_jump)
201 init_insn_eh_region (f, max_uid);
202
203 delete_barrier_successors (f);
204
205 /* Leave some extra room for labels and duplicate exit test insns
206 we make. */
207 max_jump_chain = max_uid * 14 / 10;
208 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
209 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
210
211 mark_all_labels (f, cross_jump);
212
213 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
214 notes. */
215 if (mark_labels_only)
216 return;
217
218 /* Keep track of labels used from static data;
219 they cannot ever be deleted. */
220
221 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
222 LABEL_NUSES (XEXP (insn, 0))++;
223
224 check_exception_handler_labels ();
225
226 /* Keep track of labels used for marking handlers for exception
227 regions; they cannot usually be deleted. */
228
229 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
230 LABEL_NUSES (XEXP (insn, 0))++;
231
232 exception_optimize ();
233
234 last_insn = delete_unreferenced_labels (f);
235
236 if (!optimize)
237 {
238 can_reach_end = calculate_can_reach_end (last_insn, 1, 0);
239
240 /* Zero the "deleted" flag of all the "deleted" insns. */
241 for (insn = f; insn; insn = NEXT_INSN (insn))
242 INSN_DELETED_P (insn) = 0;
243
244 /* Show that the jump chain is not valid. */
245 jump_chain = 0;
246 return;
247 }
248
249 #ifdef HAVE_return
250 if (HAVE_return)
251 {
252 /* If we fall through to the epilogue, see if we can insert a RETURN insn
253 in front of it. If the machine allows it at this point (we might be
254 after reload for a leaf routine), it will improve optimization for it
255 to be there. */
256 insn = get_last_insn ();
257 while (insn && GET_CODE (insn) == NOTE)
258 insn = PREV_INSN (insn);
259
260 if (insn && GET_CODE (insn) != BARRIER)
261 {
262 emit_jump_insn (gen_return ());
263 emit_barrier ();
264 }
265 }
266 #endif
267
268 if (noop_moves)
269 delete_noop_moves (f);
270
271 /* If we haven't yet gotten to reload and we have just run regscan,
272 delete any insn that sets a register that isn't used elsewhere.
273 This helps some of the optimizations below by having less insns
274 being jumped around. */
275
276 if (! reload_completed && after_regscan)
277 for (insn = f; insn; insn = next)
278 {
279 rtx set = single_set (insn);
280
281 next = NEXT_INSN (insn);
282
283 if (set && GET_CODE (SET_DEST (set)) == REG
284 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
285 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
286 /* We use regno_last_note_uid so as not to delete the setting
287 of a reg that's used in notes. A subsequent optimization
288 might arrange to use that reg for real. */
289 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
290 && ! side_effects_p (SET_SRC (set))
291 && ! find_reg_note (insn, REG_RETVAL, 0))
292 delete_insn (insn);
293 }
294
295 /* Now iterate optimizing jumps until nothing changes over one pass. */
296 changed = 1;
297 old_max_reg = max_reg_num ();
298 while (changed)
299 {
300 changed = 0;
301
302 for (insn = f; insn; insn = next)
303 {
304 rtx reallabelprev;
305 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
306 rtx nlabel;
307 int this_is_simplejump, this_is_condjump, reversep = 0;
308 int this_is_condjump_in_parallel;
309
310 #if 0
311 /* If NOT the first iteration, if this is the last jump pass
312 (just before final), do the special peephole optimizations.
313 Avoiding the first iteration gives ordinary jump opts
314 a chance to work before peephole opts. */
315
316 if (reload_completed && !first && !flag_no_peephole)
317 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
318 peephole (insn);
319 #endif
320
321 /* That could have deleted some insns after INSN, so check now
322 what the following insn is. */
323
324 next = NEXT_INSN (insn);
325
326 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
327 jump. Try to optimize by duplicating the loop exit test if so.
328 This is only safe immediately after regscan, because it uses
329 the values of regno_first_uid and regno_last_uid. */
330 if (after_regscan && GET_CODE (insn) == NOTE
331 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
332 && (temp1 = next_nonnote_insn (insn)) != 0
333 && simplejump_p (temp1))
334 {
335 temp = PREV_INSN (insn);
336 if (duplicate_loop_exit_test (insn))
337 {
338 changed = 1;
339 next = NEXT_INSN (temp);
340 continue;
341 }
342 }
343
344 if (GET_CODE (insn) != JUMP_INSN)
345 continue;
346
347 this_is_simplejump = simplejump_p (insn);
348 this_is_condjump = condjump_p (insn);
349 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
350
351 /* Tension the labels in dispatch tables. */
352
353 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
354 changed |= tension_vector_labels (PATTERN (insn), 0);
355 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
356 changed |= tension_vector_labels (PATTERN (insn), 1);
357
358 /* If a dispatch table always goes to the same place,
359 get rid of it and replace the insn that uses it. */
360
361 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
362 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
363 {
364 int i;
365 rtx pat = PATTERN (insn);
366 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
367 int len = XVECLEN (pat, diff_vec_p);
368 rtx dispatch = prev_real_insn (insn);
369
370 for (i = 0; i < len; i++)
371 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
372 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
373 break;
374 if (i == len
375 && dispatch != 0
376 && GET_CODE (dispatch) == JUMP_INSN
377 && JUMP_LABEL (dispatch) != 0
378 /* Don't mess with a casesi insn. */
379 && !(GET_CODE (PATTERN (dispatch)) == SET
380 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
381 == IF_THEN_ELSE))
382 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
383 {
384 redirect_tablejump (dispatch,
385 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
386 changed = 1;
387 }
388 }
389
390 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
391
392 /* If a jump references the end of the function, try to turn
393 it into a RETURN insn, possibly a conditional one. */
394 if (JUMP_LABEL (insn)
395 && (next_active_insn (JUMP_LABEL (insn)) == 0
396 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
397 == RETURN))
398 changed |= redirect_jump (insn, NULL_RTX);
399
400 /* Detect jump to following insn. */
401 if (reallabelprev == insn && condjump_p (insn))
402 {
403 next = next_real_insn (JUMP_LABEL (insn));
404 delete_jump (insn);
405 changed = 1;
406 continue;
407 }
408
409 /* If we have an unconditional jump preceded by a USE, try to put
410 the USE before the target and jump there. This simplifies many
411 of the optimizations below since we don't have to worry about
412 dealing with these USE insns. We only do this if the label
413 being branch to already has the identical USE or if code
414 never falls through to that label. */
415
416 if (this_is_simplejump
417 && (temp = prev_nonnote_insn (insn)) != 0
418 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
419 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
420 && (GET_CODE (temp1) == BARRIER
421 || (GET_CODE (temp1) == INSN
422 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
423 /* Don't do this optimization if we have a loop containing only
424 the USE instruction, and the loop start label has a usage
425 count of 1. This is because we will redo this optimization
426 everytime through the outer loop, and jump opt will never
427 exit. */
428 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
429 && temp2 == JUMP_LABEL (insn)
430 && LABEL_NUSES (temp2) == 1))
431 {
432 if (GET_CODE (temp1) == BARRIER)
433 {
434 emit_insn_after (PATTERN (temp), temp1);
435 temp1 = NEXT_INSN (temp1);
436 }
437
438 delete_insn (temp);
439 redirect_jump (insn, get_label_before (temp1));
440 reallabelprev = prev_real_insn (temp1);
441 changed = 1;
442 }
443
444 /* Simplify if (...) x = a; else x = b; by converting it
445 to x = b; if (...) x = a;
446 if B is sufficiently simple, the test doesn't involve X,
447 and nothing in the test modifies B or X.
448
449 If we have small register classes, we also can't do this if X
450 is a hard register.
451
452 If the "x = b;" insn has any REG_NOTES, we don't do this because
453 of the possibility that we are running after CSE and there is a
454 REG_EQUAL note that is only valid if the branch has already been
455 taken. If we move the insn with the REG_EQUAL note, we may
456 fold the comparison to always be false in a later CSE pass.
457 (We could also delete the REG_NOTES when moving the insn, but it
458 seems simpler to not move it.) An exception is that we can move
459 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
460 value is the same as "b".
461
462 INSN is the branch over the `else' part.
463
464 We set:
465
466 TEMP to the jump insn preceding "x = a;"
467 TEMP1 to X
468 TEMP2 to the insn that sets "x = b;"
469 TEMP3 to the insn that sets "x = a;"
470 TEMP4 to the set of "x = b"; */
471
472 if (this_is_simplejump
473 && (temp3 = prev_active_insn (insn)) != 0
474 && GET_CODE (temp3) == INSN
475 && (temp4 = single_set (temp3)) != 0
476 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
477 && (! SMALL_REGISTER_CLASSES
478 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
479 && (temp2 = next_active_insn (insn)) != 0
480 && GET_CODE (temp2) == INSN
481 && (temp4 = single_set (temp2)) != 0
482 && rtx_equal_p (SET_DEST (temp4), temp1)
483 && ! side_effects_p (SET_SRC (temp4))
484 && ! may_trap_p (SET_SRC (temp4))
485 && (REG_NOTES (temp2) == 0
486 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
487 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
488 && XEXP (REG_NOTES (temp2), 1) == 0
489 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
490 SET_SRC (temp4))))
491 && (temp = prev_active_insn (temp3)) != 0
492 && condjump_p (temp) && ! simplejump_p (temp)
493 /* TEMP must skip over the "x = a;" insn */
494 && prev_real_insn (JUMP_LABEL (temp)) == insn
495 && no_labels_between_p (insn, JUMP_LABEL (temp))
496 /* There must be no other entries to the "x = b;" insn. */
497 && no_labels_between_p (JUMP_LABEL (temp), temp2)
498 /* INSN must either branch to the insn after TEMP2 or the insn
499 after TEMP2 must branch to the same place as INSN. */
500 && (reallabelprev == temp2
501 || ((temp5 = next_active_insn (temp2)) != 0
502 && simplejump_p (temp5)
503 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
504 {
505 /* The test expression, X, may be a complicated test with
506 multiple branches. See if we can find all the uses of
507 the label that TEMP branches to without hitting a CALL_INSN
508 or a jump to somewhere else. */
509 rtx target = JUMP_LABEL (temp);
510 int nuses = LABEL_NUSES (target);
511 rtx p;
512 #ifdef HAVE_cc0
513 rtx q;
514 #endif
515
516 /* Set P to the first jump insn that goes around "x = a;". */
517 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
518 {
519 if (GET_CODE (p) == JUMP_INSN)
520 {
521 if (condjump_p (p) && ! simplejump_p (p)
522 && JUMP_LABEL (p) == target)
523 {
524 nuses--;
525 if (nuses == 0)
526 break;
527 }
528 else
529 break;
530 }
531 else if (GET_CODE (p) == CALL_INSN)
532 break;
533 }
534
535 #ifdef HAVE_cc0
536 /* We cannot insert anything between a set of cc and its use
537 so if P uses cc0, we must back up to the previous insn. */
538 q = prev_nonnote_insn (p);
539 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
540 && sets_cc0_p (PATTERN (q)))
541 p = q;
542 #endif
543
544 if (p)
545 p = PREV_INSN (p);
546
547 /* If we found all the uses and there was no data conflict, we
548 can move the assignment unless we can branch into the middle
549 from somewhere. */
550 if (nuses == 0 && p
551 && no_labels_between_p (p, insn)
552 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
553 && ! reg_set_between_p (temp1, p, temp3)
554 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
555 || ! modified_between_p (SET_SRC (temp4), p, temp2))
556 /* Verify that registers used by the jump are not clobbered
557 by the instruction being moved. */
558 && ! regs_set_between_p (PATTERN (temp),
559 PREV_INSN (temp2),
560 NEXT_INSN (temp2)))
561 {
562 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
563 delete_insn (temp2);
564
565 /* Set NEXT to an insn that we know won't go away. */
566 next = next_active_insn (insn);
567
568 /* Delete the jump around the set. Note that we must do
569 this before we redirect the test jumps so that it won't
570 delete the code immediately following the assignment
571 we moved (which might be a jump). */
572
573 delete_insn (insn);
574
575 /* We either have two consecutive labels or a jump to
576 a jump, so adjust all the JUMP_INSNs to branch to where
577 INSN branches to. */
578 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
579 if (GET_CODE (p) == JUMP_INSN)
580 redirect_jump (p, target);
581
582 changed = 1;
583 continue;
584 }
585 }
586
587 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
588 to x = a; if (...) goto l; x = b;
589 if A is sufficiently simple, the test doesn't involve X,
590 and nothing in the test modifies A or X.
591
592 If we have small register classes, we also can't do this if X
593 is a hard register.
594
595 If the "x = a;" insn has any REG_NOTES, we don't do this because
596 of the possibility that we are running after CSE and there is a
597 REG_EQUAL note that is only valid if the branch has already been
598 taken. If we move the insn with the REG_EQUAL note, we may
599 fold the comparison to always be false in a later CSE pass.
600 (We could also delete the REG_NOTES when moving the insn, but it
601 seems simpler to not move it.) An exception is that we can move
602 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
603 value is the same as "a".
604
605 INSN is the goto.
606
607 We set:
608
609 TEMP to the jump insn preceding "x = a;"
610 TEMP1 to X
611 TEMP2 to the insn that sets "x = b;"
612 TEMP3 to the insn that sets "x = a;"
613 TEMP4 to the set of "x = a"; */
614
615 if (this_is_simplejump
616 && (temp2 = next_active_insn (insn)) != 0
617 && GET_CODE (temp2) == INSN
618 && (temp4 = single_set (temp2)) != 0
619 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
620 && (! SMALL_REGISTER_CLASSES
621 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
622 && (temp3 = prev_active_insn (insn)) != 0
623 && GET_CODE (temp3) == INSN
624 && (temp4 = single_set (temp3)) != 0
625 && rtx_equal_p (SET_DEST (temp4), temp1)
626 && ! side_effects_p (SET_SRC (temp4))
627 && ! may_trap_p (SET_SRC (temp4))
628 && (REG_NOTES (temp3) == 0
629 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
630 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
631 && XEXP (REG_NOTES (temp3), 1) == 0
632 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
633 SET_SRC (temp4))))
634 && (temp = prev_active_insn (temp3)) != 0
635 && condjump_p (temp) && ! simplejump_p (temp)
636 /* TEMP must skip over the "x = a;" insn */
637 && prev_real_insn (JUMP_LABEL (temp)) == insn
638 && no_labels_between_p (temp, insn))
639 {
640 rtx prev_label = JUMP_LABEL (temp);
641 rtx insert_after = prev_nonnote_insn (temp);
642
643 #ifdef HAVE_cc0
644 /* We cannot insert anything between a set of cc and its use. */
645 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
646 && sets_cc0_p (PATTERN (insert_after)))
647 insert_after = prev_nonnote_insn (insert_after);
648 #endif
649 ++LABEL_NUSES (prev_label);
650
651 if (insert_after
652 && no_labels_between_p (insert_after, temp)
653 && ! reg_referenced_between_p (temp1, insert_after, temp3)
654 && ! reg_referenced_between_p (temp1, temp3,
655 NEXT_INSN (temp2))
656 && ! reg_set_between_p (temp1, insert_after, temp)
657 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
658 /* Verify that registers used by the jump are not clobbered
659 by the instruction being moved. */
660 && ! regs_set_between_p (PATTERN (temp),
661 PREV_INSN (temp3),
662 NEXT_INSN (temp3))
663 && invert_jump (temp, JUMP_LABEL (insn)))
664 {
665 emit_insn_after_with_line_notes (PATTERN (temp3),
666 insert_after, temp3);
667 delete_insn (temp3);
668 delete_insn (insn);
669 /* Set NEXT to an insn that we know won't go away. */
670 next = temp2;
671 changed = 1;
672 }
673 if (prev_label && --LABEL_NUSES (prev_label) == 0)
674 delete_insn (prev_label);
675 if (changed)
676 continue;
677 }
678
679 #ifndef HAVE_cc0
680 /* If we have if (...) x = exp; and branches are expensive,
681 EXP is a single insn, does not have any side effects, cannot
682 trap, and is not too costly, convert this to
683 t = exp; if (...) x = t;
684
685 Don't do this when we have CC0 because it is unlikely to help
686 and we'd need to worry about where to place the new insn and
687 the potential for conflicts. We also can't do this when we have
688 notes on the insn for the same reason as above.
689
690 We set:
691
692 TEMP to the "x = exp;" insn.
693 TEMP1 to the single set in the "x = exp;" insn.
694 TEMP2 to "x". */
695
696 if (! reload_completed
697 && this_is_condjump && ! this_is_simplejump
698 && BRANCH_COST >= 3
699 && (temp = next_nonnote_insn (insn)) != 0
700 && GET_CODE (temp) == INSN
701 && REG_NOTES (temp) == 0
702 && (reallabelprev == temp
703 || ((temp2 = next_active_insn (temp)) != 0
704 && simplejump_p (temp2)
705 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
706 && (temp1 = single_set (temp)) != 0
707 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
708 && (! SMALL_REGISTER_CLASSES
709 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
710 && GET_CODE (SET_SRC (temp1)) != REG
711 && GET_CODE (SET_SRC (temp1)) != SUBREG
712 && GET_CODE (SET_SRC (temp1)) != CONST_INT
713 && ! side_effects_p (SET_SRC (temp1))
714 && ! may_trap_p (SET_SRC (temp1))
715 && rtx_cost (SET_SRC (temp1), SET) < 10)
716 {
717 rtx new = gen_reg_rtx (GET_MODE (temp2));
718
719 if ((temp3 = find_insert_position (insn, temp))
720 && validate_change (temp, &SET_DEST (temp1), new, 0))
721 {
722 next = emit_insn_after (gen_move_insn (temp2, new), insn);
723 emit_insn_after_with_line_notes (PATTERN (temp),
724 PREV_INSN (temp3), temp);
725 delete_insn (temp);
726 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
727
728 if (after_regscan)
729 {
730 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
731 old_max_reg = max_reg_num ();
732 }
733 }
734 }
735
736 /* Similarly, if it takes two insns to compute EXP but they
737 have the same destination. Here TEMP3 will be the second
738 insn and TEMP4 the SET from that insn. */
739
740 if (! reload_completed
741 && this_is_condjump && ! this_is_simplejump
742 && BRANCH_COST >= 4
743 && (temp = next_nonnote_insn (insn)) != 0
744 && GET_CODE (temp) == INSN
745 && REG_NOTES (temp) == 0
746 && (temp3 = next_nonnote_insn (temp)) != 0
747 && GET_CODE (temp3) == INSN
748 && REG_NOTES (temp3) == 0
749 && (reallabelprev == temp3
750 || ((temp2 = next_active_insn (temp3)) != 0
751 && simplejump_p (temp2)
752 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
753 && (temp1 = single_set (temp)) != 0
754 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
755 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
756 && (! SMALL_REGISTER_CLASSES
757 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
758 && ! side_effects_p (SET_SRC (temp1))
759 && ! may_trap_p (SET_SRC (temp1))
760 && rtx_cost (SET_SRC (temp1), SET) < 10
761 && (temp4 = single_set (temp3)) != 0
762 && rtx_equal_p (SET_DEST (temp4), temp2)
763 && ! side_effects_p (SET_SRC (temp4))
764 && ! may_trap_p (SET_SRC (temp4))
765 && rtx_cost (SET_SRC (temp4), SET) < 10)
766 {
767 rtx new = gen_reg_rtx (GET_MODE (temp2));
768
769 if ((temp5 = find_insert_position (insn, temp))
770 && (temp6 = find_insert_position (insn, temp3))
771 && validate_change (temp, &SET_DEST (temp1), new, 0))
772 {
773 /* Use the earliest of temp5 and temp6. */
774 if (temp5 != insn)
775 temp6 = temp5;
776 next = emit_insn_after (gen_move_insn (temp2, new), insn);
777 emit_insn_after_with_line_notes (PATTERN (temp),
778 PREV_INSN (temp6), temp);
779 emit_insn_after_with_line_notes
780 (replace_rtx (PATTERN (temp3), temp2, new),
781 PREV_INSN (temp6), temp3);
782 delete_insn (temp);
783 delete_insn (temp3);
784 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
785
786 if (after_regscan)
787 {
788 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
789 old_max_reg = max_reg_num ();
790 }
791 }
792 }
793
794 /* Finally, handle the case where two insns are used to
795 compute EXP but a temporary register is used. Here we must
796 ensure that the temporary register is not used anywhere else. */
797
798 if (! reload_completed
799 && after_regscan
800 && this_is_condjump && ! this_is_simplejump
801 && BRANCH_COST >= 4
802 && (temp = next_nonnote_insn (insn)) != 0
803 && GET_CODE (temp) == INSN
804 && REG_NOTES (temp) == 0
805 && (temp3 = next_nonnote_insn (temp)) != 0
806 && GET_CODE (temp3) == INSN
807 && REG_NOTES (temp3) == 0
808 && (reallabelprev == temp3
809 || ((temp2 = next_active_insn (temp3)) != 0
810 && simplejump_p (temp2)
811 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
812 && (temp1 = single_set (temp)) != 0
813 && (temp5 = SET_DEST (temp1),
814 (GET_CODE (temp5) == REG
815 || (GET_CODE (temp5) == SUBREG
816 && (temp5 = SUBREG_REG (temp5),
817 GET_CODE (temp5) == REG))))
818 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
819 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
820 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
821 && ! side_effects_p (SET_SRC (temp1))
822 && ! may_trap_p (SET_SRC (temp1))
823 && rtx_cost (SET_SRC (temp1), SET) < 10
824 && (temp4 = single_set (temp3)) != 0
825 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
826 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
827 && (! SMALL_REGISTER_CLASSES
828 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
829 && rtx_equal_p (SET_DEST (temp4), temp2)
830 && ! side_effects_p (SET_SRC (temp4))
831 && ! may_trap_p (SET_SRC (temp4))
832 && rtx_cost (SET_SRC (temp4), SET) < 10)
833 {
834 rtx new = gen_reg_rtx (GET_MODE (temp2));
835
836 if ((temp5 = find_insert_position (insn, temp))
837 && (temp6 = find_insert_position (insn, temp3))
838 && validate_change (temp3, &SET_DEST (temp4), new, 0))
839 {
840 /* Use the earliest of temp5 and temp6. */
841 if (temp5 != insn)
842 temp6 = temp5;
843 next = emit_insn_after (gen_move_insn (temp2, new), insn);
844 emit_insn_after_with_line_notes (PATTERN (temp),
845 PREV_INSN (temp6), temp);
846 emit_insn_after_with_line_notes (PATTERN (temp3),
847 PREV_INSN (temp6), temp3);
848 delete_insn (temp);
849 delete_insn (temp3);
850 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
851
852 if (after_regscan)
853 {
854 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
855 old_max_reg = max_reg_num ();
856 }
857 }
858 }
859 #endif /* HAVE_cc0 */
860
861 /* Try to use a conditional move (if the target has them), or a
862 store-flag insn. The general case is:
863
864 1) x = a; if (...) x = b; and
865 2) if (...) x = b;
866
867 If the jump would be faster, the machine should not have defined
868 the movcc or scc insns!. These cases are often made by the
869 previous optimization.
870
871 The second case is treated as x = x; if (...) x = b;.
872
873 INSN here is the jump around the store. We set:
874
875 TEMP to the "x = b;" insn.
876 TEMP1 to X.
877 TEMP2 to B.
878 TEMP3 to A (X in the second case).
879 TEMP4 to the condition being tested.
880 TEMP5 to the earliest insn used to find the condition. */
881
882 if (/* We can't do this after reload has completed. */
883 ! reload_completed
884 && this_is_condjump && ! this_is_simplejump
885 /* Set TEMP to the "x = b;" insn. */
886 && (temp = next_nonnote_insn (insn)) != 0
887 && GET_CODE (temp) == INSN
888 && GET_CODE (PATTERN (temp)) == SET
889 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
890 && (! SMALL_REGISTER_CLASSES
891 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
892 && ! side_effects_p (temp2 = SET_SRC (PATTERN (temp)))
893 && ! may_trap_p (temp2)
894 /* Allow either form, but prefer the former if both apply.
895 There is no point in using the old value of TEMP1 if
896 it is a register, since cse will alias them. It can
897 lose if the old value were a hard register since CSE
898 won't replace hard registers. Avoid using TEMP3 if
899 small register classes and it is a hard register. */
900 && (((temp3 = reg_set_last (temp1, insn)) != 0
901 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
902 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
903 /* Make the latter case look like x = x; if (...) x = b; */
904 || (temp3 = temp1, 1))
905 /* INSN must either branch to the insn after TEMP or the insn
906 after TEMP must branch to the same place as INSN. */
907 && (reallabelprev == temp
908 || ((temp4 = next_active_insn (temp)) != 0
909 && simplejump_p (temp4)
910 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
911 && (temp4 = get_condition (insn, &temp5)) != 0
912 /* We must be comparing objects whose modes imply the size.
913 We could handle BLKmode if (1) emit_store_flag could
914 and (2) we could find the size reliably. */
915 && GET_MODE (XEXP (temp4, 0)) != BLKmode
916 /* Even if branches are cheap, the store_flag optimization
917 can win when the operation to be performed can be
918 expressed directly. */
919 #ifdef HAVE_cc0
920 /* If the previous insn sets CC0 and something else, we can't
921 do this since we are going to delete that insn. */
922
923 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
924 && GET_CODE (temp6) == INSN
925 && (sets_cc0_p (PATTERN (temp6)) == -1
926 || (sets_cc0_p (PATTERN (temp6)) == 1
927 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
928 #endif
929 )
930 {
931 #ifdef HAVE_conditional_move
932 /* First try a conditional move. */
933 {
934 enum rtx_code code = GET_CODE (temp4);
935 rtx var = temp1;
936 rtx cond0, cond1, aval, bval;
937 rtx target;
938
939 /* Copy the compared variables into cond0 and cond1, so that
940 any side effects performed in or after the old comparison,
941 will not affect our compare which will come later. */
942 /* ??? Is it possible to just use the comparison in the jump
943 insn? After all, we're going to delete it. We'd have
944 to modify emit_conditional_move to take a comparison rtx
945 instead or write a new function. */
946 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
947 /* We want the target to be able to simplify comparisons with
948 zero (and maybe other constants as well), so don't create
949 pseudos for them. There's no need to either. */
950 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
951 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
952 cond1 = XEXP (temp4, 1);
953 else
954 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
955
956 aval = temp3;
957 bval = temp2;
958
959 start_sequence ();
960 target = emit_conditional_move (var, code,
961 cond0, cond1, VOIDmode,
962 aval, bval, GET_MODE (var),
963 (code == LTU || code == GEU
964 || code == LEU || code == GTU));
965
966 if (target)
967 {
968 rtx seq1,seq2,last;
969
970 /* Save the conditional move sequence but don't emit it
971 yet. On some machines, like the alpha, it is possible
972 that temp5 == insn, so next generate the sequence that
973 saves the compared values and then emit both
974 sequences ensuring seq1 occurs before seq2. */
975 seq2 = get_insns ();
976 end_sequence ();
977
978 /* Now that we can't fail, generate the copy insns that
979 preserve the compared values. */
980 start_sequence ();
981 emit_move_insn (cond0, XEXP (temp4, 0));
982 if (cond1 != XEXP (temp4, 1))
983 emit_move_insn (cond1, XEXP (temp4, 1));
984 seq1 = get_insns ();
985 end_sequence ();
986
987 emit_insns_before (seq1, temp5);
988 /* Insert conditional move after insn, to be sure that
989 the jump and a possible compare won't be separated */
990 last = emit_insns_after (seq2, insn);
991
992 /* ??? We can also delete the insn that sets X to A.
993 Flow will do it too though. */
994 delete_insn (temp);
995 next = NEXT_INSN (insn);
996 delete_jump (insn);
997
998 if (after_regscan)
999 {
1000 reg_scan_update (seq1, NEXT_INSN (last), old_max_reg);
1001 old_max_reg = max_reg_num ();
1002 }
1003
1004 changed = 1;
1005 continue;
1006 }
1007 else
1008 end_sequence ();
1009 }
1010 #endif
1011
1012 /* That didn't work, try a store-flag insn.
1013
1014 We further divide the cases into:
1015
1016 1) x = a; if (...) x = b; and either A or B is zero,
1017 2) if (...) x = 0; and jumps are expensive,
1018 3) x = a; if (...) x = b; and A and B are constants where all
1019 the set bits in A are also set in B and jumps are expensive,
1020 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1021 more expensive, and
1022 5) if (...) x = b; if jumps are even more expensive. */
1023
1024 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1025 && ((GET_CODE (temp3) == CONST_INT)
1026 /* Make the latter case look like
1027 x = x; if (...) x = 0; */
1028 || (temp3 = temp1,
1029 ((BRANCH_COST >= 2
1030 && temp2 == const0_rtx)
1031 || BRANCH_COST >= 3)))
1032 /* If B is zero, OK; if A is zero, can only do (1) if we
1033 can reverse the condition. See if (3) applies possibly
1034 by reversing the condition. Prefer reversing to (4) when
1035 branches are very expensive. */
1036 && (((BRANCH_COST >= 2
1037 || STORE_FLAG_VALUE == -1
1038 || (STORE_FLAG_VALUE == 1
1039 /* Check that the mask is a power of two,
1040 so that it can probably be generated
1041 with a shift. */
1042 && GET_CODE (temp3) == CONST_INT
1043 && exact_log2 (INTVAL (temp3)) >= 0))
1044 && (reversep = 0, temp2 == const0_rtx))
1045 || ((BRANCH_COST >= 2
1046 || STORE_FLAG_VALUE == -1
1047 || (STORE_FLAG_VALUE == 1
1048 && GET_CODE (temp2) == CONST_INT
1049 && exact_log2 (INTVAL (temp2)) >= 0))
1050 && temp3 == const0_rtx
1051 && (reversep = can_reverse_comparison_p (temp4, insn)))
1052 || (BRANCH_COST >= 2
1053 && GET_CODE (temp2) == CONST_INT
1054 && GET_CODE (temp3) == CONST_INT
1055 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1056 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1057 && (reversep = can_reverse_comparison_p (temp4,
1058 insn)))))
1059 || BRANCH_COST >= 3)
1060 )
1061 {
1062 enum rtx_code code = GET_CODE (temp4);
1063 rtx uval, cval, var = temp1;
1064 int normalizep;
1065 rtx target;
1066
1067 /* If necessary, reverse the condition. */
1068 if (reversep)
1069 code = reverse_condition (code), uval = temp2, cval = temp3;
1070 else
1071 uval = temp3, cval = temp2;
1072
1073 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1074 is the constant 1, it is best to just compute the result
1075 directly. If UVAL is constant and STORE_FLAG_VALUE
1076 includes all of its bits, it is best to compute the flag
1077 value unnormalized and `and' it with UVAL. Otherwise,
1078 normalize to -1 and `and' with UVAL. */
1079 normalizep = (cval != const0_rtx ? -1
1080 : (uval == const1_rtx ? 1
1081 : (GET_CODE (uval) == CONST_INT
1082 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1083 ? 0 : -1));
1084
1085 /* We will be putting the store-flag insn immediately in
1086 front of the comparison that was originally being done,
1087 so we know all the variables in TEMP4 will be valid.
1088 However, this might be in front of the assignment of
1089 A to VAR. If it is, it would clobber the store-flag
1090 we will be emitting.
1091
1092 Therefore, emit into a temporary which will be copied to
1093 VAR immediately after TEMP. */
1094
1095 start_sequence ();
1096 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1097 XEXP (temp4, 0), XEXP (temp4, 1),
1098 VOIDmode,
1099 (code == LTU || code == LEU
1100 || code == GEU || code == GTU),
1101 normalizep);
1102 if (target)
1103 {
1104 rtx seq;
1105 rtx before = insn;
1106
1107 seq = get_insns ();
1108 end_sequence ();
1109
1110 /* Put the store-flag insns in front of the first insn
1111 used to compute the condition to ensure that we
1112 use the same values of them as the current
1113 comparison. However, the remainder of the insns we
1114 generate will be placed directly in front of the
1115 jump insn, in case any of the pseudos we use
1116 are modified earlier. */
1117
1118 emit_insns_before (seq, temp5);
1119
1120 start_sequence ();
1121
1122 /* Both CVAL and UVAL are non-zero. */
1123 if (cval != const0_rtx && uval != const0_rtx)
1124 {
1125 rtx tem1, tem2;
1126
1127 tem1 = expand_and (uval, target, NULL_RTX);
1128 if (GET_CODE (cval) == CONST_INT
1129 && GET_CODE (uval) == CONST_INT
1130 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1131 tem2 = cval;
1132 else
1133 {
1134 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1135 target, NULL_RTX, 0);
1136 tem2 = expand_and (cval, tem2,
1137 (GET_CODE (tem2) == REG
1138 ? tem2 : 0));
1139 }
1140
1141 /* If we usually make new pseudos, do so here. This
1142 turns out to help machines that have conditional
1143 move insns. */
1144 /* ??? Conditional moves have already been handled.
1145 This may be obsolete. */
1146
1147 if (flag_expensive_optimizations)
1148 target = 0;
1149
1150 target = expand_binop (GET_MODE (var), ior_optab,
1151 tem1, tem2, target,
1152 1, OPTAB_WIDEN);
1153 }
1154 else if (normalizep != 1)
1155 {
1156 /* We know that either CVAL or UVAL is zero. If
1157 UVAL is zero, negate TARGET and `and' with CVAL.
1158 Otherwise, `and' with UVAL. */
1159 if (uval == const0_rtx)
1160 {
1161 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1162 target, NULL_RTX, 0);
1163 uval = cval;
1164 }
1165
1166 target = expand_and (uval, target,
1167 (GET_CODE (target) == REG
1168 && ! preserve_subexpressions_p ()
1169 ? target : NULL_RTX));
1170 }
1171
1172 emit_move_insn (var, target);
1173 seq = get_insns ();
1174 end_sequence ();
1175 #ifdef HAVE_cc0
1176 /* If INSN uses CC0, we must not separate it from the
1177 insn that sets cc0. */
1178 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1179 before = prev_nonnote_insn (before);
1180 #endif
1181 emit_insns_before (seq, before);
1182
1183 delete_insn (temp);
1184 next = NEXT_INSN (insn);
1185 delete_jump (insn);
1186
1187 if (after_regscan)
1188 {
1189 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1190 old_max_reg = max_reg_num ();
1191 }
1192
1193 changed = 1;
1194 continue;
1195 }
1196 else
1197 end_sequence ();
1198 }
1199 }
1200
1201 /* If branches are expensive, convert
1202 if (foo) bar++; to bar += (foo != 0);
1203 and similarly for "bar--;"
1204
1205 INSN is the conditional branch around the arithmetic. We set:
1206
1207 TEMP is the arithmetic insn.
1208 TEMP1 is the SET doing the arithmetic.
1209 TEMP2 is the operand being incremented or decremented.
1210 TEMP3 to the condition being tested.
1211 TEMP4 to the earliest insn used to find the condition. */
1212
1213 if ((BRANCH_COST >= 2
1214 #ifdef HAVE_incscc
1215 || HAVE_incscc
1216 #endif
1217 #ifdef HAVE_decscc
1218 || HAVE_decscc
1219 #endif
1220 )
1221 && ! reload_completed
1222 && this_is_condjump && ! this_is_simplejump
1223 && (temp = next_nonnote_insn (insn)) != 0
1224 && (temp1 = single_set (temp)) != 0
1225 && (temp2 = SET_DEST (temp1),
1226 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1227 && GET_CODE (SET_SRC (temp1)) == PLUS
1228 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1229 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1230 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1231 && ! side_effects_p (temp2)
1232 && ! may_trap_p (temp2)
1233 /* INSN must either branch to the insn after TEMP or the insn
1234 after TEMP must branch to the same place as INSN. */
1235 && (reallabelprev == temp
1236 || ((temp3 = next_active_insn (temp)) != 0
1237 && simplejump_p (temp3)
1238 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1239 && (temp3 = get_condition (insn, &temp4)) != 0
1240 /* We must be comparing objects whose modes imply the size.
1241 We could handle BLKmode if (1) emit_store_flag could
1242 and (2) we could find the size reliably. */
1243 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1244 && can_reverse_comparison_p (temp3, insn))
1245 {
1246 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1247 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1248
1249 start_sequence ();
1250
1251 /* It must be the case that TEMP2 is not modified in the range
1252 [TEMP4, INSN). The one exception we make is if the insn
1253 before INSN sets TEMP2 to something which is also unchanged
1254 in that range. In that case, we can move the initialization
1255 into our sequence. */
1256
1257 if ((temp5 = prev_active_insn (insn)) != 0
1258 && no_labels_between_p (temp5, insn)
1259 && GET_CODE (temp5) == INSN
1260 && (temp6 = single_set (temp5)) != 0
1261 && rtx_equal_p (temp2, SET_DEST (temp6))
1262 && (CONSTANT_P (SET_SRC (temp6))
1263 || GET_CODE (SET_SRC (temp6)) == REG
1264 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1265 {
1266 emit_insn (PATTERN (temp5));
1267 init_insn = temp5;
1268 init = SET_SRC (temp6);
1269 }
1270
1271 if (CONSTANT_P (init)
1272 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1273 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1274 XEXP (temp3, 0), XEXP (temp3, 1),
1275 VOIDmode,
1276 (code == LTU || code == LEU
1277 || code == GTU || code == GEU), 1);
1278
1279 /* If we can do the store-flag, do the addition or
1280 subtraction. */
1281
1282 if (target)
1283 target = expand_binop (GET_MODE (temp2),
1284 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1285 ? add_optab : sub_optab),
1286 temp2, target, temp2, 0, OPTAB_WIDEN);
1287
1288 if (target != 0)
1289 {
1290 /* Put the result back in temp2 in case it isn't already.
1291 Then replace the jump, possible a CC0-setting insn in
1292 front of the jump, and TEMP, with the sequence we have
1293 made. */
1294
1295 if (target != temp2)
1296 emit_move_insn (temp2, target);
1297
1298 seq = get_insns ();
1299 end_sequence ();
1300
1301 emit_insns_before (seq, temp4);
1302 delete_insn (temp);
1303
1304 if (init_insn)
1305 delete_insn (init_insn);
1306
1307 next = NEXT_INSN (insn);
1308 #ifdef HAVE_cc0
1309 delete_insn (prev_nonnote_insn (insn));
1310 #endif
1311 delete_insn (insn);
1312
1313 if (after_regscan)
1314 {
1315 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1316 old_max_reg = max_reg_num ();
1317 }
1318
1319 changed = 1;
1320 continue;
1321 }
1322 else
1323 end_sequence ();
1324 }
1325
1326 /* Simplify if (...) x = 1; else {...} if (x) ...
1327 We recognize this case scanning backwards as well.
1328
1329 TEMP is the assignment to x;
1330 TEMP1 is the label at the head of the second if. */
1331 /* ?? This should call get_condition to find the values being
1332 compared, instead of looking for a COMPARE insn when HAVE_cc0
1333 is not defined. This would allow it to work on the m88k. */
1334 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1335 is not defined and the condition is tested by a separate compare
1336 insn. This is because the code below assumes that the result
1337 of the compare dies in the following branch.
1338
1339 Not only that, but there might be other insns between the
1340 compare and branch whose results are live. Those insns need
1341 to be executed.
1342
1343 A way to fix this is to move the insns at JUMP_LABEL (insn)
1344 to before INSN. If we are running before flow, they will
1345 be deleted if they aren't needed. But this doesn't work
1346 well after flow.
1347
1348 This is really a special-case of jump threading, anyway. The
1349 right thing to do is to replace this and jump threading with
1350 much simpler code in cse.
1351
1352 This code has been turned off in the non-cc0 case in the
1353 meantime. */
1354
1355 #ifdef HAVE_cc0
1356 else if (this_is_simplejump
1357 /* Safe to skip USE and CLOBBER insns here
1358 since they will not be deleted. */
1359 && (temp = prev_active_insn (insn))
1360 && no_labels_between_p (temp, insn)
1361 && GET_CODE (temp) == INSN
1362 && GET_CODE (PATTERN (temp)) == SET
1363 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1364 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1365 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1366 /* If we find that the next value tested is `x'
1367 (TEMP1 is the insn where this happens), win. */
1368 && GET_CODE (temp1) == INSN
1369 && GET_CODE (PATTERN (temp1)) == SET
1370 #ifdef HAVE_cc0
1371 /* Does temp1 `tst' the value of x? */
1372 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1373 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1374 && (temp1 = next_nonnote_insn (temp1))
1375 #else
1376 /* Does temp1 compare the value of x against zero? */
1377 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1378 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1379 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1380 == SET_DEST (PATTERN (temp)))
1381 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1382 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1383 #endif
1384 && condjump_p (temp1))
1385 {
1386 /* Get the if_then_else from the condjump. */
1387 rtx choice = SET_SRC (PATTERN (temp1));
1388 if (GET_CODE (choice) == IF_THEN_ELSE)
1389 {
1390 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1391 rtx val = SET_SRC (PATTERN (temp));
1392 rtx cond
1393 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1394 val, const0_rtx);
1395 rtx ultimate;
1396
1397 if (cond == const_true_rtx)
1398 ultimate = XEXP (choice, 1);
1399 else if (cond == const0_rtx)
1400 ultimate = XEXP (choice, 2);
1401 else
1402 ultimate = 0;
1403
1404 if (ultimate == pc_rtx)
1405 ultimate = get_label_after (temp1);
1406 else if (ultimate && GET_CODE (ultimate) != RETURN)
1407 ultimate = XEXP (ultimate, 0);
1408
1409 if (ultimate && JUMP_LABEL(insn) != ultimate)
1410 changed |= redirect_jump (insn, ultimate);
1411 }
1412 }
1413 #endif
1414
1415 #if 0
1416 /* @@ This needs a bit of work before it will be right.
1417
1418 Any type of comparison can be accepted for the first and
1419 second compare. When rewriting the first jump, we must
1420 compute the what conditions can reach label3, and use the
1421 appropriate code. We can not simply reverse/swap the code
1422 of the first jump. In some cases, the second jump must be
1423 rewritten also.
1424
1425 For example,
1426 < == converts to > ==
1427 < != converts to == >
1428 etc.
1429
1430 If the code is written to only accept an '==' test for the second
1431 compare, then all that needs to be done is to swap the condition
1432 of the first branch.
1433
1434 It is questionable whether we want this optimization anyways,
1435 since if the user wrote code like this because he/she knew that
1436 the jump to label1 is taken most of the time, then rewriting
1437 this gives slower code. */
1438 /* @@ This should call get_condition to find the values being
1439 compared, instead of looking for a COMPARE insn when HAVE_cc0
1440 is not defined. This would allow it to work on the m88k. */
1441 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1442 is not defined and the condition is tested by a separate compare
1443 insn. This is because the code below assumes that the result
1444 of the compare dies in the following branch. */
1445
1446 /* Simplify test a ~= b
1447 condjump label1;
1448 test a == b
1449 condjump label2;
1450 jump label3;
1451 label1:
1452
1453 rewriting as
1454 test a ~~= b
1455 condjump label3
1456 test a == b
1457 condjump label2
1458 label1:
1459
1460 where ~= is an inequality, e.g. >, and ~~= is the swapped
1461 inequality, e.g. <.
1462
1463 We recognize this case scanning backwards.
1464
1465 TEMP is the conditional jump to `label2';
1466 TEMP1 is the test for `a == b';
1467 TEMP2 is the conditional jump to `label1';
1468 TEMP3 is the test for `a ~= b'. */
1469 else if (this_is_simplejump
1470 && (temp = prev_active_insn (insn))
1471 && no_labels_between_p (temp, insn)
1472 && condjump_p (temp)
1473 && (temp1 = prev_active_insn (temp))
1474 && no_labels_between_p (temp1, temp)
1475 && GET_CODE (temp1) == INSN
1476 && GET_CODE (PATTERN (temp1)) == SET
1477 #ifdef HAVE_cc0
1478 && sets_cc0_p (PATTERN (temp1)) == 1
1479 #else
1480 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1481 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1482 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1483 #endif
1484 && (temp2 = prev_active_insn (temp1))
1485 && no_labels_between_p (temp2, temp1)
1486 && condjump_p (temp2)
1487 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1488 && (temp3 = prev_active_insn (temp2))
1489 && no_labels_between_p (temp3, temp2)
1490 && GET_CODE (PATTERN (temp3)) == SET
1491 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1492 SET_DEST (PATTERN (temp1)))
1493 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1494 SET_SRC (PATTERN (temp3)))
1495 && ! inequality_comparisons_p (PATTERN (temp))
1496 && inequality_comparisons_p (PATTERN (temp2)))
1497 {
1498 rtx fallthrough_label = JUMP_LABEL (temp2);
1499
1500 ++LABEL_NUSES (fallthrough_label);
1501 if (swap_jump (temp2, JUMP_LABEL (insn)))
1502 {
1503 delete_insn (insn);
1504 changed = 1;
1505 }
1506
1507 if (--LABEL_NUSES (fallthrough_label) == 0)
1508 delete_insn (fallthrough_label);
1509 }
1510 #endif
1511 /* Simplify if (...) {... x = 1;} if (x) ...
1512
1513 We recognize this case backwards.
1514
1515 TEMP is the test of `x';
1516 TEMP1 is the assignment to `x' at the end of the
1517 previous statement. */
1518 /* @@ This should call get_condition to find the values being
1519 compared, instead of looking for a COMPARE insn when HAVE_cc0
1520 is not defined. This would allow it to work on the m88k. */
1521 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1522 is not defined and the condition is tested by a separate compare
1523 insn. This is because the code below assumes that the result
1524 of the compare dies in the following branch. */
1525
1526 /* ??? This has to be turned off. The problem is that the
1527 unconditional jump might indirectly end up branching to the
1528 label between TEMP1 and TEMP. We can't detect this, in general,
1529 since it may become a jump to there after further optimizations.
1530 If that jump is done, it will be deleted, so we will retry
1531 this optimization in the next pass, thus an infinite loop.
1532
1533 The present code prevents this by putting the jump after the
1534 label, but this is not logically correct. */
1535 #if 0
1536 else if (this_is_condjump
1537 /* Safe to skip USE and CLOBBER insns here
1538 since they will not be deleted. */
1539 && (temp = prev_active_insn (insn))
1540 && no_labels_between_p (temp, insn)
1541 && GET_CODE (temp) == INSN
1542 && GET_CODE (PATTERN (temp)) == SET
1543 #ifdef HAVE_cc0
1544 && sets_cc0_p (PATTERN (temp)) == 1
1545 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1546 #else
1547 /* Temp must be a compare insn, we can not accept a register
1548 to register move here, since it may not be simply a
1549 tst insn. */
1550 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1551 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1552 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1553 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1554 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1555 #endif
1556 /* May skip USE or CLOBBER insns here
1557 for checking for opportunity, since we
1558 take care of them later. */
1559 && (temp1 = prev_active_insn (temp))
1560 && GET_CODE (temp1) == INSN
1561 && GET_CODE (PATTERN (temp1)) == SET
1562 #ifdef HAVE_cc0
1563 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1564 #else
1565 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1566 == SET_DEST (PATTERN (temp1)))
1567 #endif
1568 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1569 /* If this isn't true, cse will do the job. */
1570 && ! no_labels_between_p (temp1, temp))
1571 {
1572 /* Get the if_then_else from the condjump. */
1573 rtx choice = SET_SRC (PATTERN (insn));
1574 if (GET_CODE (choice) == IF_THEN_ELSE
1575 && (GET_CODE (XEXP (choice, 0)) == EQ
1576 || GET_CODE (XEXP (choice, 0)) == NE))
1577 {
1578 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1579 rtx last_insn;
1580 rtx ultimate;
1581 rtx p;
1582
1583 /* Get the place that condjump will jump to
1584 if it is reached from here. */
1585 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1586 == want_nonzero)
1587 ultimate = XEXP (choice, 1);
1588 else
1589 ultimate = XEXP (choice, 2);
1590 /* Get it as a CODE_LABEL. */
1591 if (ultimate == pc_rtx)
1592 ultimate = get_label_after (insn);
1593 else
1594 /* Get the label out of the LABEL_REF. */
1595 ultimate = XEXP (ultimate, 0);
1596
1597 /* Insert the jump immediately before TEMP, specifically
1598 after the label that is between TEMP1 and TEMP. */
1599 last_insn = PREV_INSN (temp);
1600
1601 /* If we would be branching to the next insn, the jump
1602 would immediately be deleted and the re-inserted in
1603 a subsequent pass over the code. So don't do anything
1604 in that case. */
1605 if (next_active_insn (last_insn)
1606 != next_active_insn (ultimate))
1607 {
1608 emit_barrier_after (last_insn);
1609 p = emit_jump_insn_after (gen_jump (ultimate),
1610 last_insn);
1611 JUMP_LABEL (p) = ultimate;
1612 ++LABEL_NUSES (ultimate);
1613 if (INSN_UID (ultimate) < max_jump_chain
1614 && INSN_CODE (p) < max_jump_chain)
1615 {
1616 jump_chain[INSN_UID (p)]
1617 = jump_chain[INSN_UID (ultimate)];
1618 jump_chain[INSN_UID (ultimate)] = p;
1619 }
1620 changed = 1;
1621 continue;
1622 }
1623 }
1624 }
1625 #endif
1626 /* Detect a conditional jump going to the same place
1627 as an immediately following unconditional jump. */
1628 else if (this_is_condjump
1629 && (temp = next_active_insn (insn)) != 0
1630 && simplejump_p (temp)
1631 && (next_active_insn (JUMP_LABEL (insn))
1632 == next_active_insn (JUMP_LABEL (temp))))
1633 {
1634 rtx tem = temp;
1635
1636 /* ??? Optional. Disables some optimizations, but makes
1637 gcov output more accurate with -O. */
1638 if (flag_test_coverage && !reload_completed)
1639 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1640 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1641 break;
1642
1643 if (tem == temp)
1644 {
1645 delete_jump (insn);
1646 changed = 1;
1647 continue;
1648 }
1649 }
1650 #ifdef HAVE_trap
1651 /* Detect a conditional jump jumping over an unconditional trap. */
1652 else if (HAVE_trap
1653 && this_is_condjump && ! this_is_simplejump
1654 && reallabelprev != 0
1655 && GET_CODE (reallabelprev) == INSN
1656 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1657 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1658 && prev_active_insn (reallabelprev) == insn
1659 && no_labels_between_p (insn, reallabelprev)
1660 && (temp2 = get_condition (insn, &temp4))
1661 && can_reverse_comparison_p (temp2, insn))
1662 {
1663 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1664 XEXP (temp2, 0), XEXP (temp2, 1),
1665 TRAP_CODE (PATTERN (reallabelprev)));
1666
1667 if (new)
1668 {
1669 emit_insn_before (new, temp4);
1670 delete_insn (reallabelprev);
1671 delete_jump (insn);
1672 changed = 1;
1673 continue;
1674 }
1675 }
1676 /* Detect a jump jumping to an unconditional trap. */
1677 else if (HAVE_trap && this_is_condjump
1678 && (temp = next_active_insn (JUMP_LABEL (insn)))
1679 && GET_CODE (temp) == INSN
1680 && GET_CODE (PATTERN (temp)) == TRAP_IF
1681 && (this_is_simplejump
1682 || (temp2 = get_condition (insn, &temp4))))
1683 {
1684 rtx tc = TRAP_CONDITION (PATTERN (temp));
1685
1686 if (tc == const_true_rtx
1687 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1688 {
1689 rtx new;
1690 /* Replace an unconditional jump to a trap with a trap. */
1691 if (this_is_simplejump)
1692 {
1693 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1694 delete_jump (insn);
1695 changed = 1;
1696 continue;
1697 }
1698 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1699 XEXP (temp2, 1),
1700 TRAP_CODE (PATTERN (temp)));
1701 if (new)
1702 {
1703 emit_insn_before (new, temp4);
1704 delete_jump (insn);
1705 changed = 1;
1706 continue;
1707 }
1708 }
1709 /* If the trap condition and jump condition are mutually
1710 exclusive, redirect the jump to the following insn. */
1711 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1712 && ! this_is_simplejump
1713 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1714 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1715 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1716 && redirect_jump (insn, get_label_after (temp)))
1717 {
1718 changed = 1;
1719 continue;
1720 }
1721 }
1722 #endif
1723
1724 /* Detect a conditional jump jumping over an unconditional jump. */
1725
1726 else if ((this_is_condjump || this_is_condjump_in_parallel)
1727 && ! this_is_simplejump
1728 && reallabelprev != 0
1729 && GET_CODE (reallabelprev) == JUMP_INSN
1730 && prev_active_insn (reallabelprev) == insn
1731 && no_labels_between_p (insn, reallabelprev)
1732 && simplejump_p (reallabelprev))
1733 {
1734 /* When we invert the unconditional jump, we will be
1735 decrementing the usage count of its old label.
1736 Make sure that we don't delete it now because that
1737 might cause the following code to be deleted. */
1738 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1739 rtx prev_label = JUMP_LABEL (insn);
1740
1741 if (prev_label)
1742 ++LABEL_NUSES (prev_label);
1743
1744 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1745 {
1746 /* It is very likely that if there are USE insns before
1747 this jump, they hold REG_DEAD notes. These REG_DEAD
1748 notes are no longer valid due to this optimization,
1749 and will cause the life-analysis that following passes
1750 (notably delayed-branch scheduling) to think that
1751 these registers are dead when they are not.
1752
1753 To prevent this trouble, we just remove the USE insns
1754 from the insn chain. */
1755
1756 while (prev_uses && GET_CODE (prev_uses) == INSN
1757 && GET_CODE (PATTERN (prev_uses)) == USE)
1758 {
1759 rtx useless = prev_uses;
1760 prev_uses = prev_nonnote_insn (prev_uses);
1761 delete_insn (useless);
1762 }
1763
1764 delete_insn (reallabelprev);
1765 next = insn;
1766 changed = 1;
1767 }
1768
1769 /* We can now safely delete the label if it is unreferenced
1770 since the delete_insn above has deleted the BARRIER. */
1771 if (prev_label && --LABEL_NUSES (prev_label) == 0)
1772 delete_insn (prev_label);
1773 continue;
1774 }
1775 else
1776 {
1777 /* Detect a jump to a jump. */
1778
1779 nlabel = follow_jumps (JUMP_LABEL (insn));
1780 if (nlabel != JUMP_LABEL (insn)
1781 && redirect_jump (insn, nlabel))
1782 {
1783 changed = 1;
1784 next = insn;
1785 }
1786
1787 /* Look for if (foo) bar; else break; */
1788 /* The insns look like this:
1789 insn = condjump label1;
1790 ...range1 (some insns)...
1791 jump label2;
1792 label1:
1793 ...range2 (some insns)...
1794 jump somewhere unconditionally
1795 label2: */
1796 {
1797 rtx label1 = next_label (insn);
1798 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1799 /* Don't do this optimization on the first round, so that
1800 jump-around-a-jump gets simplified before we ask here
1801 whether a jump is unconditional.
1802
1803 Also don't do it when we are called after reload since
1804 it will confuse reorg. */
1805 if (! first
1806 && (reload_completed ? ! flag_delayed_branch : 1)
1807 /* Make sure INSN is something we can invert. */
1808 && condjump_p (insn)
1809 && label1 != 0
1810 && JUMP_LABEL (insn) == label1
1811 && LABEL_NUSES (label1) == 1
1812 && GET_CODE (range1end) == JUMP_INSN
1813 && simplejump_p (range1end))
1814 {
1815 rtx label2 = next_label (label1);
1816 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1817 if (range1end != range2end
1818 && JUMP_LABEL (range1end) == label2
1819 && GET_CODE (range2end) == JUMP_INSN
1820 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1821 /* Invert the jump condition, so we
1822 still execute the same insns in each case. */
1823 && invert_jump (insn, label1))
1824 {
1825 rtx range1beg = next_active_insn (insn);
1826 rtx range2beg = next_active_insn (label1);
1827 rtx range1after, range2after;
1828 rtx range1before, range2before;
1829 rtx rangenext;
1830
1831 /* Include in each range any notes before it, to be
1832 sure that we get the line number note if any, even
1833 if there are other notes here. */
1834 while (PREV_INSN (range1beg)
1835 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
1836 range1beg = PREV_INSN (range1beg);
1837
1838 while (PREV_INSN (range2beg)
1839 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
1840 range2beg = PREV_INSN (range2beg);
1841
1842 /* Don't move NOTEs for blocks or loops; shift them
1843 outside the ranges, where they'll stay put. */
1844 range1beg = squeeze_notes (range1beg, range1end);
1845 range2beg = squeeze_notes (range2beg, range2end);
1846
1847 /* Get current surrounds of the 2 ranges. */
1848 range1before = PREV_INSN (range1beg);
1849 range2before = PREV_INSN (range2beg);
1850 range1after = NEXT_INSN (range1end);
1851 range2after = NEXT_INSN (range2end);
1852
1853 /* Splice range2 where range1 was. */
1854 NEXT_INSN (range1before) = range2beg;
1855 PREV_INSN (range2beg) = range1before;
1856 NEXT_INSN (range2end) = range1after;
1857 PREV_INSN (range1after) = range2end;
1858 /* Splice range1 where range2 was. */
1859 NEXT_INSN (range2before) = range1beg;
1860 PREV_INSN (range1beg) = range2before;
1861 NEXT_INSN (range1end) = range2after;
1862 PREV_INSN (range2after) = range1end;
1863
1864 /* Check for a loop end note between the end of
1865 range2, and the next code label. If there is one,
1866 then what we have really seen is
1867 if (foo) break; end_of_loop;
1868 and moved the break sequence outside the loop.
1869 We must move the LOOP_END note to where the
1870 loop really ends now, or we will confuse loop
1871 optimization. Stop if we find a LOOP_BEG note
1872 first, since we don't want to move the LOOP_END
1873 note in that case. */
1874 for (;range2after != label2; range2after = rangenext)
1875 {
1876 rangenext = NEXT_INSN (range2after);
1877 if (GET_CODE (range2after) == NOTE)
1878 {
1879 if (NOTE_LINE_NUMBER (range2after)
1880 == NOTE_INSN_LOOP_END)
1881 {
1882 NEXT_INSN (PREV_INSN (range2after))
1883 = rangenext;
1884 PREV_INSN (rangenext)
1885 = PREV_INSN (range2after);
1886 PREV_INSN (range2after)
1887 = PREV_INSN (range1beg);
1888 NEXT_INSN (range2after) = range1beg;
1889 NEXT_INSN (PREV_INSN (range1beg))
1890 = range2after;
1891 PREV_INSN (range1beg) = range2after;
1892 }
1893 else if (NOTE_LINE_NUMBER (range2after)
1894 == NOTE_INSN_LOOP_BEG)
1895 break;
1896 }
1897 }
1898 changed = 1;
1899 continue;
1900 }
1901 }
1902 }
1903
1904 /* Now that the jump has been tensioned,
1905 try cross jumping: check for identical code
1906 before the jump and before its target label. */
1907
1908 /* First, cross jumping of conditional jumps: */
1909
1910 if (cross_jump && condjump_p (insn))
1911 {
1912 rtx newjpos, newlpos;
1913 rtx x = prev_real_insn (JUMP_LABEL (insn));
1914
1915 /* A conditional jump may be crossjumped
1916 only if the place it jumps to follows
1917 an opposing jump that comes back here. */
1918
1919 if (x != 0 && ! jump_back_p (x, insn))
1920 /* We have no opposing jump;
1921 cannot cross jump this insn. */
1922 x = 0;
1923
1924 newjpos = 0;
1925 /* TARGET is nonzero if it is ok to cross jump
1926 to code before TARGET. If so, see if matches. */
1927 if (x != 0)
1928 find_cross_jump (insn, x, 2,
1929 &newjpos, &newlpos);
1930
1931 if (newjpos != 0)
1932 {
1933 do_cross_jump (insn, newjpos, newlpos);
1934 /* Make the old conditional jump
1935 into an unconditional one. */
1936 SET_SRC (PATTERN (insn))
1937 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
1938 INSN_CODE (insn) = -1;
1939 emit_barrier_after (insn);
1940 /* Add to jump_chain unless this is a new label
1941 whose UID is too large. */
1942 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1943 {
1944 jump_chain[INSN_UID (insn)]
1945 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1946 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1947 }
1948 changed = 1;
1949 next = insn;
1950 }
1951 }
1952
1953 /* Cross jumping of unconditional jumps:
1954 a few differences. */
1955
1956 if (cross_jump && simplejump_p (insn))
1957 {
1958 rtx newjpos, newlpos;
1959 rtx target;
1960
1961 newjpos = 0;
1962
1963 /* TARGET is nonzero if it is ok to cross jump
1964 to code before TARGET. If so, see if matches. */
1965 find_cross_jump (insn, JUMP_LABEL (insn), 1,
1966 &newjpos, &newlpos);
1967
1968 /* If cannot cross jump to code before the label,
1969 see if we can cross jump to another jump to
1970 the same label. */
1971 /* Try each other jump to this label. */
1972 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
1973 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1974 target != 0 && newjpos == 0;
1975 target = jump_chain[INSN_UID (target)])
1976 if (target != insn
1977 && JUMP_LABEL (target) == JUMP_LABEL (insn)
1978 /* Ignore TARGET if it's deleted. */
1979 && ! INSN_DELETED_P (target))
1980 find_cross_jump (insn, target, 2,
1981 &newjpos, &newlpos);
1982
1983 if (newjpos != 0)
1984 {
1985 do_cross_jump (insn, newjpos, newlpos);
1986 changed = 1;
1987 next = insn;
1988 }
1989 }
1990
1991 /* This code was dead in the previous jump.c! */
1992 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
1993 {
1994 /* Return insns all "jump to the same place"
1995 so we can cross-jump between any two of them. */
1996
1997 rtx newjpos, newlpos, target;
1998
1999 newjpos = 0;
2000
2001 /* If cannot cross jump to code before the label,
2002 see if we can cross jump to another jump to
2003 the same label. */
2004 /* Try each other jump to this label. */
2005 for (target = jump_chain[0];
2006 target != 0 && newjpos == 0;
2007 target = jump_chain[INSN_UID (target)])
2008 if (target != insn
2009 && ! INSN_DELETED_P (target)
2010 && GET_CODE (PATTERN (target)) == RETURN)
2011 find_cross_jump (insn, target, 2,
2012 &newjpos, &newlpos);
2013
2014 if (newjpos != 0)
2015 {
2016 do_cross_jump (insn, newjpos, newlpos);
2017 changed = 1;
2018 next = insn;
2019 }
2020 }
2021 }
2022 }
2023
2024 first = 0;
2025 }
2026
2027 /* Delete extraneous line number notes.
2028 Note that two consecutive notes for different lines are not really
2029 extraneous. There should be some indication where that line belonged,
2030 even if it became empty. */
2031
2032 {
2033 rtx last_note = 0;
2034
2035 for (insn = f; insn; insn = NEXT_INSN (insn))
2036 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2037 {
2038 /* Delete this note if it is identical to previous note. */
2039 if (last_note
2040 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2041 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2042 {
2043 delete_insn (insn);
2044 continue;
2045 }
2046
2047 last_note = insn;
2048 }
2049 }
2050
2051 #ifdef HAVE_return
2052 if (HAVE_return)
2053 {
2054 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2055 in front of it. If the machine allows it at this point (we might be
2056 after reload for a leaf routine), it will improve optimization for it
2057 to be there. We do this both here and at the start of this pass since
2058 the RETURN might have been deleted by some of our optimizations. */
2059 insn = get_last_insn ();
2060 while (insn && GET_CODE (insn) == NOTE)
2061 insn = PREV_INSN (insn);
2062
2063 if (insn && GET_CODE (insn) != BARRIER)
2064 {
2065 emit_jump_insn (gen_return ());
2066 emit_barrier ();
2067 }
2068 }
2069 #endif
2070
2071 can_reach_end = calculate_can_reach_end (last_insn, 0, 1);
2072
2073 /* Show JUMP_CHAIN no longer valid. */
2074 jump_chain = 0;
2075 }
2076 \f
2077 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2078 notes whose labels don't occur in the insn any more. Returns the
2079 largest INSN_UID found. */
2080 static int
2081 init_label_info (f)
2082 rtx f;
2083 {
2084 int largest_uid = 0;
2085 rtx insn;
2086
2087 for (insn = f; insn; insn = NEXT_INSN (insn))
2088 {
2089 if (GET_CODE (insn) == CODE_LABEL)
2090 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2091 else if (GET_CODE (insn) == JUMP_INSN)
2092 JUMP_LABEL (insn) = 0;
2093 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2094 {
2095 rtx note, next;
2096
2097 for (note = REG_NOTES (insn); note; note = next)
2098 {
2099 next = XEXP (note, 1);
2100 if (REG_NOTE_KIND (note) == REG_LABEL
2101 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2102 remove_note (insn, note);
2103 }
2104 }
2105 if (INSN_UID (insn) > largest_uid)
2106 largest_uid = INSN_UID (insn);
2107 }
2108
2109 return largest_uid;
2110 }
2111
2112 /* Delete insns following barriers, up to next label.
2113
2114 Also delete no-op jumps created by gcse. */
2115 static void
2116 delete_barrier_successors (f)
2117 rtx f;
2118 {
2119 rtx insn;
2120
2121 for (insn = f; insn;)
2122 {
2123 if (GET_CODE (insn) == BARRIER)
2124 {
2125 insn = NEXT_INSN (insn);
2126 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2127 {
2128 if (GET_CODE (insn) == NOTE
2129 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2130 insn = NEXT_INSN (insn);
2131 else
2132 insn = delete_insn (insn);
2133 }
2134 /* INSN is now the code_label. */
2135 }
2136 /* Also remove (set (pc) (pc)) insns which can be created by
2137 gcse. We eliminate such insns now to avoid having them
2138 cause problems later. */
2139 else if (GET_CODE (insn) == JUMP_INSN
2140 && SET_SRC (PATTERN (insn)) == pc_rtx
2141 && SET_DEST (PATTERN (insn)) == pc_rtx)
2142 insn = delete_insn (insn);
2143
2144 else
2145 insn = NEXT_INSN (insn);
2146 }
2147 }
2148
2149 /* Mark the label each jump jumps to.
2150 Combine consecutive labels, and count uses of labels.
2151
2152 For each label, make a chain (using `jump_chain')
2153 of all the *unconditional* jumps that jump to it;
2154 also make a chain of all returns.
2155
2156 CROSS_JUMP indicates whether we are doing cross jumping
2157 and if we are whether we will be paying attention to
2158 death notes or not. */
2159
2160 static void
2161 mark_all_labels (f, cross_jump)
2162 rtx f;
2163 int cross_jump;
2164 {
2165 rtx insn;
2166
2167 for (insn = f; insn; insn = NEXT_INSN (insn))
2168 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2169 {
2170 mark_jump_label (PATTERN (insn), insn, cross_jump);
2171 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2172 {
2173 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2174 {
2175 jump_chain[INSN_UID (insn)]
2176 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2177 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2178 }
2179 if (GET_CODE (PATTERN (insn)) == RETURN)
2180 {
2181 jump_chain[INSN_UID (insn)] = jump_chain[0];
2182 jump_chain[0] = insn;
2183 }
2184 }
2185 }
2186 }
2187
2188 /* Delete all labels already not referenced.
2189 Also find and return the last insn. */
2190
2191 static rtx
2192 delete_unreferenced_labels (f)
2193 rtx f;
2194 {
2195 rtx final = NULL_RTX;
2196 rtx insn;
2197
2198 for (insn = f; insn; )
2199 {
2200 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
2201 insn = delete_insn (insn);
2202 else
2203 {
2204 final = insn;
2205 insn = NEXT_INSN (insn);
2206 }
2207 }
2208
2209 return final;
2210 }
2211
2212 /* Delete various simple forms of moves which have no necessary
2213 side effect. */
2214
2215 static void
2216 delete_noop_moves (f)
2217 rtx f;
2218 {
2219 rtx insn, next;
2220
2221 for (insn = f; insn; )
2222 {
2223 next = NEXT_INSN (insn);
2224
2225 if (GET_CODE (insn) == INSN)
2226 {
2227 register rtx body = PATTERN (insn);
2228
2229 /* Combine stack_adjusts with following push_insns. */
2230 #ifdef PUSH_ROUNDING
2231 if (GET_CODE (body) == SET
2232 && SET_DEST (body) == stack_pointer_rtx
2233 && GET_CODE (SET_SRC (body)) == PLUS
2234 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2235 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2236 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2237 {
2238 rtx p;
2239 rtx stack_adjust_insn = insn;
2240 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2241 int total_pushed = 0;
2242 int pushes = 0;
2243
2244 /* Find all successive push insns. */
2245 p = insn;
2246 /* Don't convert more than three pushes;
2247 that starts adding too many displaced addresses
2248 and the whole thing starts becoming a losing
2249 proposition. */
2250 while (pushes < 3)
2251 {
2252 rtx pbody, dest;
2253 p = next_nonnote_insn (p);
2254 if (p == 0 || GET_CODE (p) != INSN)
2255 break;
2256 pbody = PATTERN (p);
2257 if (GET_CODE (pbody) != SET)
2258 break;
2259 dest = SET_DEST (pbody);
2260 /* Allow a no-op move between the adjust and the push. */
2261 if (GET_CODE (dest) == REG
2262 && GET_CODE (SET_SRC (pbody)) == REG
2263 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2264 continue;
2265 if (! (GET_CODE (dest) == MEM
2266 && GET_CODE (XEXP (dest, 0)) == POST_INC
2267 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2268 break;
2269 pushes++;
2270 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2271 > stack_adjust_amount)
2272 break;
2273 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2274 }
2275
2276 /* Discard the amount pushed from the stack adjust;
2277 maybe eliminate it entirely. */
2278 if (total_pushed >= stack_adjust_amount)
2279 {
2280 delete_computation (stack_adjust_insn);
2281 total_pushed = stack_adjust_amount;
2282 }
2283 else
2284 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2285 = GEN_INT (stack_adjust_amount - total_pushed);
2286
2287 /* Change the appropriate push insns to ordinary stores. */
2288 p = insn;
2289 while (total_pushed > 0)
2290 {
2291 rtx pbody, dest;
2292 p = next_nonnote_insn (p);
2293 if (GET_CODE (p) != INSN)
2294 break;
2295 pbody = PATTERN (p);
2296 if (GET_CODE (pbody) != SET)
2297 break;
2298 dest = SET_DEST (pbody);
2299 /* Allow a no-op move between the adjust and the push. */
2300 if (GET_CODE (dest) == REG
2301 && GET_CODE (SET_SRC (pbody)) == REG
2302 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2303 continue;
2304 if (! (GET_CODE (dest) == MEM
2305 && GET_CODE (XEXP (dest, 0)) == POST_INC
2306 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2307 break;
2308 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2309 /* If this push doesn't fully fit in the space
2310 of the stack adjust that we deleted,
2311 make another stack adjust here for what we
2312 didn't use up. There should be peepholes
2313 to recognize the resulting sequence of insns. */
2314 if (total_pushed < 0)
2315 {
2316 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2317 GEN_INT (- total_pushed)),
2318 p);
2319 break;
2320 }
2321 XEXP (dest, 0)
2322 = plus_constant (stack_pointer_rtx, total_pushed);
2323 }
2324 }
2325 #endif
2326
2327 /* Detect and delete no-op move instructions
2328 resulting from not allocating a parameter in a register. */
2329
2330 if (GET_CODE (body) == SET
2331 && (SET_DEST (body) == SET_SRC (body)
2332 || (GET_CODE (SET_DEST (body)) == MEM
2333 && GET_CODE (SET_SRC (body)) == MEM
2334 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2335 && ! (GET_CODE (SET_DEST (body)) == MEM
2336 && MEM_VOLATILE_P (SET_DEST (body)))
2337 && ! (GET_CODE (SET_SRC (body)) == MEM
2338 && MEM_VOLATILE_P (SET_SRC (body))))
2339 delete_computation (insn);
2340
2341 /* Detect and ignore no-op move instructions
2342 resulting from smart or fortuitous register allocation. */
2343
2344 else if (GET_CODE (body) == SET)
2345 {
2346 int sreg = true_regnum (SET_SRC (body));
2347 int dreg = true_regnum (SET_DEST (body));
2348
2349 if (sreg == dreg && sreg >= 0)
2350 delete_insn (insn);
2351 else if (sreg >= 0 && dreg >= 0)
2352 {
2353 rtx trial;
2354 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2355 sreg, NULL_PTR, dreg,
2356 GET_MODE (SET_SRC (body)));
2357
2358 if (tem != 0
2359 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2360 {
2361 /* DREG may have been the target of a REG_DEAD note in
2362 the insn which makes INSN redundant. If so, reorg
2363 would still think it is dead. So search for such a
2364 note and delete it if we find it. */
2365 if (! find_regno_note (insn, REG_UNUSED, dreg))
2366 for (trial = prev_nonnote_insn (insn);
2367 trial && GET_CODE (trial) != CODE_LABEL;
2368 trial = prev_nonnote_insn (trial))
2369 if (find_regno_note (trial, REG_DEAD, dreg))
2370 {
2371 remove_death (dreg, trial);
2372 break;
2373 }
2374
2375 /* Deleting insn could lose a death-note for SREG. */
2376 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2377 {
2378 /* Change this into a USE so that we won't emit
2379 code for it, but still can keep the note. */
2380 PATTERN (insn)
2381 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2382 INSN_CODE (insn) = -1;
2383 /* Remove all reg notes but the REG_DEAD one. */
2384 REG_NOTES (insn) = trial;
2385 XEXP (trial, 1) = NULL_RTX;
2386 }
2387 else
2388 delete_insn (insn);
2389 }
2390 }
2391 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2392 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2393 NULL_PTR, 0,
2394 GET_MODE (SET_DEST (body))))
2395 {
2396 /* This handles the case where we have two consecutive
2397 assignments of the same constant to pseudos that didn't
2398 get a hard reg. Each SET from the constant will be
2399 converted into a SET of the spill register and an
2400 output reload will be made following it. This produces
2401 two loads of the same constant into the same spill
2402 register. */
2403
2404 rtx in_insn = insn;
2405
2406 /* Look back for a death note for the first reg.
2407 If there is one, it is no longer accurate. */
2408 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2409 {
2410 if ((GET_CODE (in_insn) == INSN
2411 || GET_CODE (in_insn) == JUMP_INSN)
2412 && find_regno_note (in_insn, REG_DEAD, dreg))
2413 {
2414 remove_death (dreg, in_insn);
2415 break;
2416 }
2417 in_insn = PREV_INSN (in_insn);
2418 }
2419
2420 /* Delete the second load of the value. */
2421 delete_insn (insn);
2422 }
2423 }
2424 else if (GET_CODE (body) == PARALLEL)
2425 {
2426 /* If each part is a set between two identical registers or
2427 a USE or CLOBBER, delete the insn. */
2428 int i, sreg, dreg;
2429 rtx tem;
2430
2431 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2432 {
2433 tem = XVECEXP (body, 0, i);
2434 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2435 continue;
2436
2437 if (GET_CODE (tem) != SET
2438 || (sreg = true_regnum (SET_SRC (tem))) < 0
2439 || (dreg = true_regnum (SET_DEST (tem))) < 0
2440 || dreg != sreg)
2441 break;
2442 }
2443
2444 if (i < 0)
2445 delete_insn (insn);
2446 }
2447 /* Also delete insns to store bit fields if they are no-ops. */
2448 /* Not worth the hair to detect this in the big-endian case. */
2449 else if (! BYTES_BIG_ENDIAN
2450 && GET_CODE (body) == SET
2451 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2452 && XEXP (SET_DEST (body), 2) == const0_rtx
2453 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2454 && ! (GET_CODE (SET_SRC (body)) == MEM
2455 && MEM_VOLATILE_P (SET_SRC (body))))
2456 delete_insn (insn);
2457 }
2458 insn = next;
2459 }
2460 }
2461
2462 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2463 If so indicate that this function can drop off the end by returning
2464 1, else return 0.
2465
2466 CHECK_DELETED indicates whether we must check if the note being
2467 searched for has the deleted flag set.
2468
2469 DELETE_FINAL_NOTE indicates whether we should delete the note
2470 if we find it. */
2471
2472 static int
2473 calculate_can_reach_end (last, check_deleted, delete_final_note)
2474 rtx last;
2475 int check_deleted;
2476 int delete_final_note;
2477 {
2478 rtx insn = last;
2479 int n_labels = 1;
2480
2481 while (insn != NULL_RTX)
2482 {
2483 int ok = 0;
2484
2485 /* One label can follow the end-note: the return label. */
2486 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2487 ok = 1;
2488 /* Ordinary insns can follow it if returning a structure. */
2489 else if (GET_CODE (insn) == INSN)
2490 ok = 1;
2491 /* If machine uses explicit RETURN insns, no epilogue,
2492 then one of them follows the note. */
2493 else if (GET_CODE (insn) == JUMP_INSN
2494 && GET_CODE (PATTERN (insn)) == RETURN)
2495 ok = 1;
2496 /* A barrier can follow the return insn. */
2497 else if (GET_CODE (insn) == BARRIER)
2498 ok = 1;
2499 /* Other kinds of notes can follow also. */
2500 else if (GET_CODE (insn) == NOTE
2501 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2502 ok = 1;
2503
2504 if (ok != 1)
2505 break;
2506
2507 insn = PREV_INSN (insn);
2508 }
2509
2510 /* See if we backed up to the appropriate type of note. */
2511 if (insn != NULL_RTX
2512 && GET_CODE (insn) == NOTE
2513 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
2514 && (check_deleted == 0
2515 || ! INSN_DELETED_P (insn)))
2516 {
2517 if (delete_final_note)
2518 delete_insn (insn);
2519 return 1;
2520 }
2521
2522 return 0;
2523 }
2524
2525 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2526 jump. Assume that this unconditional jump is to the exit test code. If
2527 the code is sufficiently simple, make a copy of it before INSN,
2528 followed by a jump to the exit of the loop. Then delete the unconditional
2529 jump after INSN.
2530
2531 Return 1 if we made the change, else 0.
2532
2533 This is only safe immediately after a regscan pass because it uses the
2534 values of regno_first_uid and regno_last_uid. */
2535
2536 static int
2537 duplicate_loop_exit_test (loop_start)
2538 rtx loop_start;
2539 {
2540 rtx insn, set, reg, p, link;
2541 rtx copy = 0;
2542 int num_insns = 0;
2543 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2544 rtx lastexit;
2545 int max_reg = max_reg_num ();
2546 rtx *reg_map = 0;
2547
2548 /* Scan the exit code. We do not perform this optimization if any insn:
2549
2550 is a CALL_INSN
2551 is a CODE_LABEL
2552 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2553 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2554 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2555 is not valid.
2556
2557 We also do not do this if we find an insn with ASM_OPERANDS. While
2558 this restriction should not be necessary, copying an insn with
2559 ASM_OPERANDS can confuse asm_noperands in some cases.
2560
2561 Also, don't do this if the exit code is more than 20 insns. */
2562
2563 for (insn = exitcode;
2564 insn
2565 && ! (GET_CODE (insn) == NOTE
2566 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2567 insn = NEXT_INSN (insn))
2568 {
2569 switch (GET_CODE (insn))
2570 {
2571 case CODE_LABEL:
2572 case CALL_INSN:
2573 return 0;
2574 case NOTE:
2575 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2576 a jump immediately after the loop start that branches outside
2577 the loop but within an outer loop, near the exit test.
2578 If we copied this exit test and created a phony
2579 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2580 before the exit test look like these could be safely moved
2581 out of the loop even if they actually may be never executed.
2582 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2583
2584 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2585 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2586 return 0;
2587
2588 if (optimize < 2
2589 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2590 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2591 /* If we were to duplicate this code, we would not move
2592 the BLOCK notes, and so debugging the moved code would
2593 be difficult. Thus, we only move the code with -O2 or
2594 higher. */
2595 return 0;
2596
2597 break;
2598 case JUMP_INSN:
2599 case INSN:
2600 /* The code below would grossly mishandle REG_WAS_0 notes,
2601 so get rid of them here. */
2602 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2603 remove_note (insn, p);
2604 if (++num_insns > 20
2605 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2606 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
2607 || asm_noperands (PATTERN (insn)) > 0)
2608 return 0;
2609 break;
2610 default:
2611 break;
2612 }
2613 }
2614
2615 /* Unless INSN is zero, we can do the optimization. */
2616 if (insn == 0)
2617 return 0;
2618
2619 lastexit = insn;
2620
2621 /* See if any insn sets a register only used in the loop exit code and
2622 not a user variable. If so, replace it with a new register. */
2623 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2624 if (GET_CODE (insn) == INSN
2625 && (set = single_set (insn)) != 0
2626 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2627 || (GET_CODE (reg) == SUBREG
2628 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2629 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2630 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2631 {
2632 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2633 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2634 break;
2635
2636 if (p != lastexit)
2637 {
2638 /* We can do the replacement. Allocate reg_map if this is the
2639 first replacement we found. */
2640 if (reg_map == 0)
2641 {
2642 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2643 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2644 }
2645
2646 REG_LOOP_TEST_P (reg) = 1;
2647
2648 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2649 }
2650 }
2651
2652 /* Now copy each insn. */
2653 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2654 switch (GET_CODE (insn))
2655 {
2656 case BARRIER:
2657 copy = emit_barrier_before (loop_start);
2658 break;
2659 case NOTE:
2660 /* Only copy line-number notes. */
2661 if (NOTE_LINE_NUMBER (insn) >= 0)
2662 {
2663 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2664 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2665 }
2666 break;
2667
2668 case INSN:
2669 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2670 if (reg_map)
2671 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2672
2673 mark_jump_label (PATTERN (copy), copy, 0);
2674
2675 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2676 make them. */
2677 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2678 if (REG_NOTE_KIND (link) != REG_LABEL)
2679 REG_NOTES (copy)
2680 = copy_rtx (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2681 XEXP (link, 0),
2682 REG_NOTES (copy)));
2683 if (reg_map && REG_NOTES (copy))
2684 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2685 break;
2686
2687 case JUMP_INSN:
2688 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2689 if (reg_map)
2690 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2691 mark_jump_label (PATTERN (copy), copy, 0);
2692 if (REG_NOTES (insn))
2693 {
2694 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2695 if (reg_map)
2696 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2697 }
2698
2699 /* If this is a simple jump, add it to the jump chain. */
2700
2701 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2702 && simplejump_p (copy))
2703 {
2704 jump_chain[INSN_UID (copy)]
2705 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2706 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2707 }
2708 break;
2709
2710 default:
2711 abort ();
2712 }
2713
2714 /* Now clean up by emitting a jump to the end label and deleting the jump
2715 at the start of the loop. */
2716 if (! copy || GET_CODE (copy) != BARRIER)
2717 {
2718 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2719 loop_start);
2720 mark_jump_label (PATTERN (copy), copy, 0);
2721 if (INSN_UID (copy) < max_jump_chain
2722 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2723 {
2724 jump_chain[INSN_UID (copy)]
2725 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2726 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2727 }
2728 emit_barrier_before (loop_start);
2729 }
2730
2731 /* Mark the exit code as the virtual top of the converted loop. */
2732 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2733
2734 delete_insn (next_nonnote_insn (loop_start));
2735
2736 return 1;
2737 }
2738 \f
2739 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2740 loop-end notes between START and END out before START. Assume that
2741 END is not such a note. START may be such a note. Returns the value
2742 of the new starting insn, which may be different if the original start
2743 was such a note. */
2744
2745 rtx
2746 squeeze_notes (start, end)
2747 rtx start, end;
2748 {
2749 rtx insn;
2750 rtx next;
2751
2752 for (insn = start; insn != end; insn = next)
2753 {
2754 next = NEXT_INSN (insn);
2755 if (GET_CODE (insn) == NOTE
2756 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2757 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2758 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2759 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2760 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2761 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2762 {
2763 if (insn == start)
2764 start = next;
2765 else
2766 {
2767 rtx prev = PREV_INSN (insn);
2768 PREV_INSN (insn) = PREV_INSN (start);
2769 NEXT_INSN (insn) = start;
2770 NEXT_INSN (PREV_INSN (insn)) = insn;
2771 PREV_INSN (NEXT_INSN (insn)) = insn;
2772 NEXT_INSN (prev) = next;
2773 PREV_INSN (next) = prev;
2774 }
2775 }
2776 }
2777
2778 return start;
2779 }
2780 \f
2781 /* Compare the instructions before insn E1 with those before E2
2782 to find an opportunity for cross jumping.
2783 (This means detecting identical sequences of insns followed by
2784 jumps to the same place, or followed by a label and a jump
2785 to that label, and replacing one with a jump to the other.)
2786
2787 Assume E1 is a jump that jumps to label E2
2788 (that is not always true but it might as well be).
2789 Find the longest possible equivalent sequences
2790 and store the first insns of those sequences into *F1 and *F2.
2791 Store zero there if no equivalent preceding instructions are found.
2792
2793 We give up if we find a label in stream 1.
2794 Actually we could transfer that label into stream 2. */
2795
2796 static void
2797 find_cross_jump (e1, e2, minimum, f1, f2)
2798 rtx e1, e2;
2799 int minimum;
2800 rtx *f1, *f2;
2801 {
2802 register rtx i1 = e1, i2 = e2;
2803 register rtx p1, p2;
2804 int lose = 0;
2805
2806 rtx last1 = 0, last2 = 0;
2807 rtx afterlast1 = 0, afterlast2 = 0;
2808
2809 *f1 = 0;
2810 *f2 = 0;
2811
2812 while (1)
2813 {
2814 i1 = prev_nonnote_insn (i1);
2815
2816 i2 = PREV_INSN (i2);
2817 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2818 i2 = PREV_INSN (i2);
2819
2820 if (i1 == 0)
2821 break;
2822
2823 /* Don't allow the range of insns preceding E1 or E2
2824 to include the other (E2 or E1). */
2825 if (i2 == e1 || i1 == e2)
2826 break;
2827
2828 /* If we will get to this code by jumping, those jumps will be
2829 tensioned to go directly to the new label (before I2),
2830 so this cross-jumping won't cost extra. So reduce the minimum. */
2831 if (GET_CODE (i1) == CODE_LABEL)
2832 {
2833 --minimum;
2834 break;
2835 }
2836
2837 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2838 break;
2839
2840 /* Avoid moving insns across EH regions if either of the insns
2841 can throw. */
2842 if (flag_exceptions
2843 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2844 && !in_same_eh_region (i1, i2))
2845 break;
2846
2847 p1 = PATTERN (i1);
2848 p2 = PATTERN (i2);
2849
2850 /* If this is a CALL_INSN, compare register usage information.
2851 If we don't check this on stack register machines, the two
2852 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2853 numbers of stack registers in the same basic block.
2854 If we don't check this on machines with delay slots, a delay slot may
2855 be filled that clobbers a parameter expected by the subroutine.
2856
2857 ??? We take the simple route for now and assume that if they're
2858 equal, they were constructed identically. */
2859
2860 if (GET_CODE (i1) == CALL_INSN
2861 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2862 CALL_INSN_FUNCTION_USAGE (i2)))
2863 lose = 1;
2864
2865 #ifdef STACK_REGS
2866 /* If cross_jump_death_matters is not 0, the insn's mode
2867 indicates whether or not the insn contains any stack-like
2868 regs. */
2869
2870 if (!lose && cross_jump_death_matters && GET_MODE (i1) == QImode)
2871 {
2872 /* If register stack conversion has already been done, then
2873 death notes must also be compared before it is certain that
2874 the two instruction streams match. */
2875
2876 rtx note;
2877 HARD_REG_SET i1_regset, i2_regset;
2878
2879 CLEAR_HARD_REG_SET (i1_regset);
2880 CLEAR_HARD_REG_SET (i2_regset);
2881
2882 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2883 if (REG_NOTE_KIND (note) == REG_DEAD
2884 && STACK_REG_P (XEXP (note, 0)))
2885 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2886
2887 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2888 if (REG_NOTE_KIND (note) == REG_DEAD
2889 && STACK_REG_P (XEXP (note, 0)))
2890 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2891
2892 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2893
2894 lose = 1;
2895
2896 done:
2897 ;
2898 }
2899 #endif
2900
2901 /* Don't allow old-style asm or volatile extended asms to be accepted
2902 for cross jumping purposes. It is conceptually correct to allow
2903 them, since cross-jumping preserves the dynamic instruction order
2904 even though it is changing the static instruction order. However,
2905 if an asm is being used to emit an assembler pseudo-op, such as
2906 the MIPS `.set reorder' pseudo-op, then the static instruction order
2907 matters and it must be preserved. */
2908 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2909 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2910 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2911 lose = 1;
2912
2913 if (lose || GET_CODE (p1) != GET_CODE (p2)
2914 || ! rtx_renumbered_equal_p (p1, p2))
2915 {
2916 /* The following code helps take care of G++ cleanups. */
2917 rtx equiv1;
2918 rtx equiv2;
2919
2920 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2921 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2922 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2923 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2924 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2925 /* If the equivalences are not to a constant, they may
2926 reference pseudos that no longer exist, so we can't
2927 use them. */
2928 && CONSTANT_P (XEXP (equiv1, 0))
2929 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2930 {
2931 rtx s1 = single_set (i1);
2932 rtx s2 = single_set (i2);
2933 if (s1 != 0 && s2 != 0
2934 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2935 {
2936 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2937 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2938 if (! rtx_renumbered_equal_p (p1, p2))
2939 cancel_changes (0);
2940 else if (apply_change_group ())
2941 goto win;
2942 }
2943 }
2944
2945 /* Insns fail to match; cross jumping is limited to the following
2946 insns. */
2947
2948 #ifdef HAVE_cc0
2949 /* Don't allow the insn after a compare to be shared by
2950 cross-jumping unless the compare is also shared.
2951 Here, if either of these non-matching insns is a compare,
2952 exclude the following insn from possible cross-jumping. */
2953 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2954 last1 = afterlast1, last2 = afterlast2, ++minimum;
2955 #endif
2956
2957 /* If cross-jumping here will feed a jump-around-jump
2958 optimization, this jump won't cost extra, so reduce
2959 the minimum. */
2960 if (GET_CODE (i1) == JUMP_INSN
2961 && JUMP_LABEL (i1)
2962 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2963 --minimum;
2964 break;
2965 }
2966
2967 win:
2968 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2969 {
2970 /* Ok, this insn is potentially includable in a cross-jump here. */
2971 afterlast1 = last1, afterlast2 = last2;
2972 last1 = i1, last2 = i2, --minimum;
2973 }
2974 }
2975
2976 if (minimum <= 0 && last1 != 0 && last1 != e1)
2977 *f1 = last1, *f2 = last2;
2978 }
2979
2980 static void
2981 do_cross_jump (insn, newjpos, newlpos)
2982 rtx insn, newjpos, newlpos;
2983 {
2984 /* Find an existing label at this point
2985 or make a new one if there is none. */
2986 register rtx label = get_label_before (newlpos);
2987
2988 /* Make the same jump insn jump to the new point. */
2989 if (GET_CODE (PATTERN (insn)) == RETURN)
2990 {
2991 /* Remove from jump chain of returns. */
2992 delete_from_jump_chain (insn);
2993 /* Change the insn. */
2994 PATTERN (insn) = gen_jump (label);
2995 INSN_CODE (insn) = -1;
2996 JUMP_LABEL (insn) = label;
2997 LABEL_NUSES (label)++;
2998 /* Add to new the jump chain. */
2999 if (INSN_UID (label) < max_jump_chain
3000 && INSN_UID (insn) < max_jump_chain)
3001 {
3002 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3003 jump_chain[INSN_UID (label)] = insn;
3004 }
3005 }
3006 else
3007 redirect_jump (insn, label);
3008
3009 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3010 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3011 the NEWJPOS stream. */
3012
3013 while (newjpos != insn)
3014 {
3015 rtx lnote;
3016
3017 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3018 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3019 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3020 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3021 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3022 remove_note (newlpos, lnote);
3023
3024 delete_insn (newjpos);
3025 newjpos = next_real_insn (newjpos);
3026 newlpos = next_real_insn (newlpos);
3027 }
3028 }
3029 \f
3030 /* Return the label before INSN, or put a new label there. */
3031
3032 rtx
3033 get_label_before (insn)
3034 rtx insn;
3035 {
3036 rtx label;
3037
3038 /* Find an existing label at this point
3039 or make a new one if there is none. */
3040 label = prev_nonnote_insn (insn);
3041
3042 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3043 {
3044 rtx prev = PREV_INSN (insn);
3045
3046 label = gen_label_rtx ();
3047 emit_label_after (label, prev);
3048 LABEL_NUSES (label) = 0;
3049 }
3050 return label;
3051 }
3052
3053 /* Return the label after INSN, or put a new label there. */
3054
3055 rtx
3056 get_label_after (insn)
3057 rtx insn;
3058 {
3059 rtx label;
3060
3061 /* Find an existing label at this point
3062 or make a new one if there is none. */
3063 label = next_nonnote_insn (insn);
3064
3065 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3066 {
3067 label = gen_label_rtx ();
3068 emit_label_after (label, insn);
3069 LABEL_NUSES (label) = 0;
3070 }
3071 return label;
3072 }
3073 \f
3074 /* Return 1 if INSN is a jump that jumps to right after TARGET
3075 only on the condition that TARGET itself would drop through.
3076 Assumes that TARGET is a conditional jump. */
3077
3078 static int
3079 jump_back_p (insn, target)
3080 rtx insn, target;
3081 {
3082 rtx cinsn, ctarget;
3083 enum rtx_code codei, codet;
3084
3085 if (simplejump_p (insn) || ! condjump_p (insn)
3086 || simplejump_p (target)
3087 || target != prev_real_insn (JUMP_LABEL (insn)))
3088 return 0;
3089
3090 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3091 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3092
3093 codei = GET_CODE (cinsn);
3094 codet = GET_CODE (ctarget);
3095
3096 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3097 {
3098 if (! can_reverse_comparison_p (cinsn, insn))
3099 return 0;
3100 codei = reverse_condition (codei);
3101 }
3102
3103 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3104 {
3105 if (! can_reverse_comparison_p (ctarget, target))
3106 return 0;
3107 codet = reverse_condition (codet);
3108 }
3109
3110 return (codei == codet
3111 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3112 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3113 }
3114 \f
3115 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3116 return non-zero if it is safe to reverse this comparison. It is if our
3117 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3118 this is known to be an integer comparison. */
3119
3120 int
3121 can_reverse_comparison_p (comparison, insn)
3122 rtx comparison;
3123 rtx insn;
3124 {
3125 rtx arg0;
3126
3127 /* If this is not actually a comparison, we can't reverse it. */
3128 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3129 return 0;
3130
3131 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3132 /* If this is an NE comparison, it is safe to reverse it to an EQ
3133 comparison and vice versa, even for floating point. If no operands
3134 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3135 always false and NE is always true, so the reversal is also valid. */
3136 || flag_fast_math
3137 || GET_CODE (comparison) == NE
3138 || GET_CODE (comparison) == EQ)
3139 return 1;
3140
3141 arg0 = XEXP (comparison, 0);
3142
3143 /* Make sure ARG0 is one of the actual objects being compared. If we
3144 can't do this, we can't be sure the comparison can be reversed.
3145
3146 Handle cc0 and a MODE_CC register. */
3147 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3148 #ifdef HAVE_cc0
3149 || arg0 == cc0_rtx
3150 #endif
3151 )
3152 {
3153 rtx prev = prev_nonnote_insn (insn);
3154 rtx set = single_set (prev);
3155
3156 if (set == 0 || SET_DEST (set) != arg0)
3157 return 0;
3158
3159 arg0 = SET_SRC (set);
3160
3161 if (GET_CODE (arg0) == COMPARE)
3162 arg0 = XEXP (arg0, 0);
3163 }
3164
3165 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3166 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3167 return (GET_CODE (arg0) == CONST_INT
3168 || (GET_MODE (arg0) != VOIDmode
3169 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3170 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3171 }
3172
3173 /* Given an rtx-code for a comparison, return the code
3174 for the negated comparison.
3175 WATCH OUT! reverse_condition is not safe to use on a jump
3176 that might be acting on the results of an IEEE floating point comparison,
3177 because of the special treatment of non-signaling nans in comparisons.
3178 Use can_reverse_comparison_p to be sure. */
3179
3180 enum rtx_code
3181 reverse_condition (code)
3182 enum rtx_code code;
3183 {
3184 switch (code)
3185 {
3186 case EQ:
3187 return NE;
3188
3189 case NE:
3190 return EQ;
3191
3192 case GT:
3193 return LE;
3194
3195 case GE:
3196 return LT;
3197
3198 case LT:
3199 return GE;
3200
3201 case LE:
3202 return GT;
3203
3204 case GTU:
3205 return LEU;
3206
3207 case GEU:
3208 return LTU;
3209
3210 case LTU:
3211 return GEU;
3212
3213 case LEU:
3214 return GTU;
3215
3216 default:
3217 abort ();
3218 return UNKNOWN;
3219 }
3220 }
3221
3222 /* Similar, but return the code when two operands of a comparison are swapped.
3223 This IS safe for IEEE floating-point. */
3224
3225 enum rtx_code
3226 swap_condition (code)
3227 enum rtx_code code;
3228 {
3229 switch (code)
3230 {
3231 case EQ:
3232 case NE:
3233 return code;
3234
3235 case GT:
3236 return LT;
3237
3238 case GE:
3239 return LE;
3240
3241 case LT:
3242 return GT;
3243
3244 case LE:
3245 return GE;
3246
3247 case GTU:
3248 return LTU;
3249
3250 case GEU:
3251 return LEU;
3252
3253 case LTU:
3254 return GTU;
3255
3256 case LEU:
3257 return GEU;
3258
3259 default:
3260 abort ();
3261 return UNKNOWN;
3262 }
3263 }
3264
3265 /* Given a comparison CODE, return the corresponding unsigned comparison.
3266 If CODE is an equality comparison or already an unsigned comparison,
3267 CODE is returned. */
3268
3269 enum rtx_code
3270 unsigned_condition (code)
3271 enum rtx_code code;
3272 {
3273 switch (code)
3274 {
3275 case EQ:
3276 case NE:
3277 case GTU:
3278 case GEU:
3279 case LTU:
3280 case LEU:
3281 return code;
3282
3283 case GT:
3284 return GTU;
3285
3286 case GE:
3287 return GEU;
3288
3289 case LT:
3290 return LTU;
3291
3292 case LE:
3293 return LEU;
3294
3295 default:
3296 abort ();
3297 }
3298 }
3299
3300 /* Similarly, return the signed version of a comparison. */
3301
3302 enum rtx_code
3303 signed_condition (code)
3304 enum rtx_code code;
3305 {
3306 switch (code)
3307 {
3308 case EQ:
3309 case NE:
3310 case GT:
3311 case GE:
3312 case LT:
3313 case LE:
3314 return code;
3315
3316 case GTU:
3317 return GT;
3318
3319 case GEU:
3320 return GE;
3321
3322 case LTU:
3323 return LT;
3324
3325 case LEU:
3326 return LE;
3327
3328 default:
3329 abort ();
3330 }
3331 }
3332 \f
3333 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3334 truth of CODE1 implies the truth of CODE2. */
3335
3336 int
3337 comparison_dominates_p (code1, code2)
3338 enum rtx_code code1, code2;
3339 {
3340 if (code1 == code2)
3341 return 1;
3342
3343 switch (code1)
3344 {
3345 case EQ:
3346 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3347 return 1;
3348 break;
3349
3350 case LT:
3351 if (code2 == LE || code2 == NE)
3352 return 1;
3353 break;
3354
3355 case GT:
3356 if (code2 == GE || code2 == NE)
3357 return 1;
3358 break;
3359
3360 case LTU:
3361 if (code2 == LEU || code2 == NE)
3362 return 1;
3363 break;
3364
3365 case GTU:
3366 if (code2 == GEU || code2 == NE)
3367 return 1;
3368 break;
3369
3370 default:
3371 break;
3372 }
3373
3374 return 0;
3375 }
3376 \f
3377 /* Return 1 if INSN is an unconditional jump and nothing else. */
3378
3379 int
3380 simplejump_p (insn)
3381 rtx insn;
3382 {
3383 return (GET_CODE (insn) == JUMP_INSN
3384 && GET_CODE (PATTERN (insn)) == SET
3385 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3386 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3387 }
3388
3389 /* Return nonzero if INSN is a (possibly) conditional jump
3390 and nothing more. */
3391
3392 int
3393 condjump_p (insn)
3394 rtx insn;
3395 {
3396 register rtx x = PATTERN (insn);
3397 if (GET_CODE (x) != SET)
3398 return 0;
3399 if (GET_CODE (SET_DEST (x)) != PC)
3400 return 0;
3401 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3402 return 1;
3403 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3404 return 0;
3405 if (XEXP (SET_SRC (x), 2) == pc_rtx
3406 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3407 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3408 return 1;
3409 if (XEXP (SET_SRC (x), 1) == pc_rtx
3410 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3411 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3412 return 1;
3413 return 0;
3414 }
3415
3416 /* Return nonzero if INSN is a (possibly) conditional jump
3417 and nothing more. */
3418
3419 int
3420 condjump_in_parallel_p (insn)
3421 rtx insn;
3422 {
3423 register rtx x = PATTERN (insn);
3424
3425 if (GET_CODE (x) != PARALLEL)
3426 return 0;
3427 else
3428 x = XVECEXP (x, 0, 0);
3429
3430 if (GET_CODE (x) != SET)
3431 return 0;
3432 if (GET_CODE (SET_DEST (x)) != PC)
3433 return 0;
3434 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3435 return 1;
3436 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3437 return 0;
3438 if (XEXP (SET_SRC (x), 2) == pc_rtx
3439 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3440 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3441 return 1;
3442 if (XEXP (SET_SRC (x), 1) == pc_rtx
3443 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3444 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3445 return 1;
3446 return 0;
3447 }
3448
3449 /* Return the label of a conditional jump. */
3450
3451 rtx
3452 condjump_label (insn)
3453 rtx insn;
3454 {
3455 register rtx x = PATTERN (insn);
3456
3457 if (GET_CODE (x) == PARALLEL)
3458 x = XVECEXP (x, 0, 0);
3459 if (GET_CODE (x) != SET)
3460 return NULL_RTX;
3461 if (GET_CODE (SET_DEST (x)) != PC)
3462 return NULL_RTX;
3463 x = SET_SRC (x);
3464 if (GET_CODE (x) == LABEL_REF)
3465 return x;
3466 if (GET_CODE (x) != IF_THEN_ELSE)
3467 return NULL_RTX;
3468 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3469 return XEXP (x, 1);
3470 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3471 return XEXP (x, 2);
3472 return NULL_RTX;
3473 }
3474
3475 /* Return true if INSN is a (possibly conditional) return insn. */
3476
3477 static int
3478 returnjump_p_1 (loc, data)
3479 rtx *loc;
3480 void *data ATTRIBUTE_UNUSED;
3481 {
3482 rtx x = *loc;
3483 return GET_CODE (x) == RETURN;
3484 }
3485
3486 int
3487 returnjump_p (insn)
3488 rtx insn;
3489 {
3490 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3491 }
3492
3493 #ifdef HAVE_cc0
3494
3495 /* Return 1 if X is an RTX that does nothing but set the condition codes
3496 and CLOBBER or USE registers.
3497 Return -1 if X does explicitly set the condition codes,
3498 but also does other things. */
3499
3500 int
3501 sets_cc0_p (x)
3502 rtx x ATTRIBUTE_UNUSED;
3503 {
3504 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3505 return 1;
3506 if (GET_CODE (x) == PARALLEL)
3507 {
3508 int i;
3509 int sets_cc0 = 0;
3510 int other_things = 0;
3511 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3512 {
3513 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3514 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3515 sets_cc0 = 1;
3516 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3517 other_things = 1;
3518 }
3519 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3520 }
3521 return 0;
3522 }
3523 #endif
3524 \f
3525 /* Follow any unconditional jump at LABEL;
3526 return the ultimate label reached by any such chain of jumps.
3527 If LABEL is not followed by a jump, return LABEL.
3528 If the chain loops or we can't find end, return LABEL,
3529 since that tells caller to avoid changing the insn.
3530
3531 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3532 a USE or CLOBBER. */
3533
3534 rtx
3535 follow_jumps (label)
3536 rtx label;
3537 {
3538 register rtx insn;
3539 register rtx next;
3540 register rtx value = label;
3541 register int depth;
3542
3543 for (depth = 0;
3544 (depth < 10
3545 && (insn = next_active_insn (value)) != 0
3546 && GET_CODE (insn) == JUMP_INSN
3547 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3548 || GET_CODE (PATTERN (insn)) == RETURN)
3549 && (next = NEXT_INSN (insn))
3550 && GET_CODE (next) == BARRIER);
3551 depth++)
3552 {
3553 /* Don't chain through the insn that jumps into a loop
3554 from outside the loop,
3555 since that would create multiple loop entry jumps
3556 and prevent loop optimization. */
3557 rtx tem;
3558 if (!reload_completed)
3559 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3560 if (GET_CODE (tem) == NOTE
3561 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3562 /* ??? Optional. Disables some optimizations, but makes
3563 gcov output more accurate with -O. */
3564 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3565 return value;
3566
3567 /* If we have found a cycle, make the insn jump to itself. */
3568 if (JUMP_LABEL (insn) == label)
3569 return label;
3570
3571 tem = next_active_insn (JUMP_LABEL (insn));
3572 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3573 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3574 break;
3575
3576 value = JUMP_LABEL (insn);
3577 }
3578 if (depth == 10)
3579 return label;
3580 return value;
3581 }
3582
3583 /* Assuming that field IDX of X is a vector of label_refs,
3584 replace each of them by the ultimate label reached by it.
3585 Return nonzero if a change is made.
3586 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3587
3588 static int
3589 tension_vector_labels (x, idx)
3590 register rtx x;
3591 register int idx;
3592 {
3593 int changed = 0;
3594 register int i;
3595 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3596 {
3597 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3598 register rtx nlabel = follow_jumps (olabel);
3599 if (nlabel && nlabel != olabel)
3600 {
3601 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3602 ++LABEL_NUSES (nlabel);
3603 if (--LABEL_NUSES (olabel) == 0)
3604 delete_insn (olabel);
3605 changed = 1;
3606 }
3607 }
3608 return changed;
3609 }
3610 \f
3611 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3612 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3613 in INSN, then store one of them in JUMP_LABEL (INSN).
3614 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3615 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3616 Also, when there are consecutive labels, canonicalize on the last of them.
3617
3618 Note that two labels separated by a loop-beginning note
3619 must be kept distinct if we have not yet done loop-optimization,
3620 because the gap between them is where loop-optimize
3621 will want to move invariant code to. CROSS_JUMP tells us
3622 that loop-optimization is done with.
3623
3624 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3625 two labels distinct if they are separated by only USE or CLOBBER insns. */
3626
3627 static void
3628 mark_jump_label (x, insn, cross_jump)
3629 register rtx x;
3630 rtx insn;
3631 int cross_jump;
3632 {
3633 register RTX_CODE code = GET_CODE (x);
3634 register int i;
3635 register char *fmt;
3636
3637 switch (code)
3638 {
3639 case PC:
3640 case CC0:
3641 case REG:
3642 case SUBREG:
3643 case CONST_INT:
3644 case SYMBOL_REF:
3645 case CONST_DOUBLE:
3646 case CLOBBER:
3647 case CALL:
3648 return;
3649
3650 case MEM:
3651 /* If this is a constant-pool reference, see if it is a label. */
3652 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3653 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3654 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3655 break;
3656
3657 case LABEL_REF:
3658 {
3659 rtx label = XEXP (x, 0);
3660 rtx olabel = label;
3661 rtx note;
3662 rtx next;
3663
3664 if (GET_CODE (label) != CODE_LABEL)
3665 abort ();
3666
3667 /* Ignore references to labels of containing functions. */
3668 if (LABEL_REF_NONLOCAL_P (x))
3669 break;
3670
3671 /* If there are other labels following this one,
3672 replace it with the last of the consecutive labels. */
3673 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3674 {
3675 if (GET_CODE (next) == CODE_LABEL)
3676 label = next;
3677 else if (cross_jump && GET_CODE (next) == INSN
3678 && (GET_CODE (PATTERN (next)) == USE
3679 || GET_CODE (PATTERN (next)) == CLOBBER))
3680 continue;
3681 else if (GET_CODE (next) != NOTE)
3682 break;
3683 else if (! cross_jump
3684 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3685 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3686 /* ??? Optional. Disables some optimizations, but
3687 makes gcov output more accurate with -O. */
3688 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3689 break;
3690 }
3691
3692 XEXP (x, 0) = label;
3693 if (! insn || ! INSN_DELETED_P (insn))
3694 ++LABEL_NUSES (label);
3695
3696 if (insn)
3697 {
3698 if (GET_CODE (insn) == JUMP_INSN)
3699 JUMP_LABEL (insn) = label;
3700
3701 /* If we've changed OLABEL and we had a REG_LABEL note
3702 for it, update it as well. */
3703 else if (label != olabel
3704 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3705 XEXP (note, 0) = label;
3706
3707 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3708 is one. */
3709 else if (! find_reg_note (insn, REG_LABEL, label))
3710 {
3711 /* This code used to ignore labels which refered to dispatch
3712 tables to avoid flow.c generating worse code.
3713
3714 However, in the presense of global optimizations like
3715 gcse which call find_basic_blocks without calling
3716 life_analysis, not recording such labels will lead
3717 to compiler aborts because of inconsistencies in the
3718 flow graph. So we go ahead and record the label.
3719
3720 It may also be the case that the optimization argument
3721 is no longer valid because of the more accurate cfg
3722 we build in find_basic_blocks -- it no longer pessimizes
3723 code when it finds a REG_LABEL note. */
3724 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3725 REG_NOTES (insn));
3726 }
3727 }
3728 return;
3729 }
3730
3731 /* Do walk the labels in a vector, but not the first operand of an
3732 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3733 case ADDR_VEC:
3734 case ADDR_DIFF_VEC:
3735 if (! INSN_DELETED_P (insn))
3736 {
3737 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3738
3739 for (i = 0; i < XVECLEN (x, eltnum); i++)
3740 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3741 }
3742 return;
3743
3744 default:
3745 break;
3746 }
3747
3748 fmt = GET_RTX_FORMAT (code);
3749 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3750 {
3751 if (fmt[i] == 'e')
3752 mark_jump_label (XEXP (x, i), insn, cross_jump);
3753 else if (fmt[i] == 'E')
3754 {
3755 register int j;
3756 for (j = 0; j < XVECLEN (x, i); j++)
3757 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3758 }
3759 }
3760 }
3761
3762 /* If all INSN does is set the pc, delete it,
3763 and delete the insn that set the condition codes for it
3764 if that's what the previous thing was. */
3765
3766 void
3767 delete_jump (insn)
3768 rtx insn;
3769 {
3770 register rtx set = single_set (insn);
3771
3772 if (set && GET_CODE (SET_DEST (set)) == PC)
3773 delete_computation (insn);
3774 }
3775
3776 /* Delete INSN and recursively delete insns that compute values used only
3777 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3778 If we are running before flow.c, we need do nothing since flow.c will
3779 delete dead code. We also can't know if the registers being used are
3780 dead or not at this point.
3781
3782 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3783 nothing other than set a register that dies in this insn, we can delete
3784 that insn as well.
3785
3786 On machines with CC0, if CC0 is used in this insn, we may be able to
3787 delete the insn that set it. */
3788
3789 static void
3790 delete_computation (insn)
3791 rtx insn;
3792 {
3793 rtx note, next;
3794
3795 #ifdef HAVE_cc0
3796 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3797 {
3798 rtx prev = prev_nonnote_insn (insn);
3799 /* We assume that at this stage
3800 CC's are always set explicitly
3801 and always immediately before the jump that
3802 will use them. So if the previous insn
3803 exists to set the CC's, delete it
3804 (unless it performs auto-increments, etc.). */
3805 if (prev && GET_CODE (prev) == INSN
3806 && sets_cc0_p (PATTERN (prev)))
3807 {
3808 if (sets_cc0_p (PATTERN (prev)) > 0
3809 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
3810 delete_computation (prev);
3811 else
3812 /* Otherwise, show that cc0 won't be used. */
3813 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
3814 cc0_rtx, REG_NOTES (prev));
3815 }
3816 }
3817 #endif
3818
3819 #ifdef INSN_SCHEDULING
3820 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
3821 reload has completed. The schedulers need to be fixed. Until
3822 they are, we must not rely on the death notes here. */
3823 if (reload_completed && flag_schedule_insns_after_reload)
3824 {
3825 delete_insn (insn);
3826 return;
3827 }
3828 #endif
3829
3830 for (note = REG_NOTES (insn); note; note = next)
3831 {
3832 rtx our_prev;
3833
3834 next = XEXP (note, 1);
3835
3836 if (REG_NOTE_KIND (note) != REG_DEAD
3837 /* Verify that the REG_NOTE is legitimate. */
3838 || GET_CODE (XEXP (note, 0)) != REG)
3839 continue;
3840
3841 for (our_prev = prev_nonnote_insn (insn);
3842 our_prev && GET_CODE (our_prev) == INSN;
3843 our_prev = prev_nonnote_insn (our_prev))
3844 {
3845 /* If we reach a SEQUENCE, it is too complex to try to
3846 do anything with it, so give up. */
3847 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
3848 break;
3849
3850 if (GET_CODE (PATTERN (our_prev)) == USE
3851 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
3852 /* reorg creates USEs that look like this. We leave them
3853 alone because reorg needs them for its own purposes. */
3854 break;
3855
3856 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
3857 {
3858 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
3859 break;
3860
3861 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
3862 {
3863 /* If we find a SET of something else, we can't
3864 delete the insn. */
3865
3866 int i;
3867
3868 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
3869 {
3870 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
3871
3872 if (GET_CODE (part) == SET
3873 && SET_DEST (part) != XEXP (note, 0))
3874 break;
3875 }
3876
3877 if (i == XVECLEN (PATTERN (our_prev), 0))
3878 delete_computation (our_prev);
3879 }
3880 else if (GET_CODE (PATTERN (our_prev)) == SET
3881 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
3882 delete_computation (our_prev);
3883
3884 break;
3885 }
3886
3887 /* If OUR_PREV references the register that dies here, it is an
3888 additional use. Hence any prior SET isn't dead. However, this
3889 insn becomes the new place for the REG_DEAD note. */
3890 if (reg_overlap_mentioned_p (XEXP (note, 0),
3891 PATTERN (our_prev)))
3892 {
3893 XEXP (note, 1) = REG_NOTES (our_prev);
3894 REG_NOTES (our_prev) = note;
3895 break;
3896 }
3897 }
3898 }
3899
3900 delete_insn (insn);
3901 }
3902 \f
3903 /* Delete insn INSN from the chain of insns and update label ref counts.
3904 May delete some following insns as a consequence; may even delete
3905 a label elsewhere and insns that follow it.
3906
3907 Returns the first insn after INSN that was not deleted. */
3908
3909 rtx
3910 delete_insn (insn)
3911 register rtx insn;
3912 {
3913 register rtx next = NEXT_INSN (insn);
3914 register rtx prev = PREV_INSN (insn);
3915 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
3916 register int dont_really_delete = 0;
3917
3918 while (next && INSN_DELETED_P (next))
3919 next = NEXT_INSN (next);
3920
3921 /* This insn is already deleted => return first following nondeleted. */
3922 if (INSN_DELETED_P (insn))
3923 return next;
3924
3925 /* Don't delete user-declared labels. Convert them to special NOTEs
3926 instead. */
3927 if (was_code_label && LABEL_NAME (insn) != 0
3928 && optimize && ! dont_really_delete)
3929 {
3930 PUT_CODE (insn, NOTE);
3931 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
3932 NOTE_SOURCE_FILE (insn) = 0;
3933 dont_really_delete = 1;
3934 }
3935 else
3936 /* Mark this insn as deleted. */
3937 INSN_DELETED_P (insn) = 1;
3938
3939 /* If this is an unconditional jump, delete it from the jump chain. */
3940 if (simplejump_p (insn))
3941 delete_from_jump_chain (insn);
3942
3943 /* If instruction is followed by a barrier,
3944 delete the barrier too. */
3945
3946 if (next != 0 && GET_CODE (next) == BARRIER)
3947 {
3948 INSN_DELETED_P (next) = 1;
3949 next = NEXT_INSN (next);
3950 }
3951
3952 /* Patch out INSN (and the barrier if any) */
3953
3954 if (optimize && ! dont_really_delete)
3955 {
3956 if (prev)
3957 {
3958 NEXT_INSN (prev) = next;
3959 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3960 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
3961 XVECLEN (PATTERN (prev), 0) - 1)) = next;
3962 }
3963
3964 if (next)
3965 {
3966 PREV_INSN (next) = prev;
3967 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3968 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3969 }
3970
3971 if (prev && NEXT_INSN (prev) == 0)
3972 set_last_insn (prev);
3973 }
3974
3975 /* If deleting a jump, decrement the count of the label,
3976 and delete the label if it is now unused. */
3977
3978 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3979 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3980 {
3981 /* This can delete NEXT or PREV,
3982 either directly if NEXT is JUMP_LABEL (INSN),
3983 or indirectly through more levels of jumps. */
3984 delete_insn (JUMP_LABEL (insn));
3985 /* I feel a little doubtful about this loop,
3986 but I see no clean and sure alternative way
3987 to find the first insn after INSN that is not now deleted.
3988 I hope this works. */
3989 while (next && INSN_DELETED_P (next))
3990 next = NEXT_INSN (next);
3991 return next;
3992 }
3993
3994 /* Likewise if we're deleting a dispatch table. */
3995
3996 if (GET_CODE (insn) == JUMP_INSN
3997 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3998 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3999 {
4000 rtx pat = PATTERN (insn);
4001 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4002 int len = XVECLEN (pat, diff_vec_p);
4003
4004 for (i = 0; i < len; i++)
4005 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4006 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4007 while (next && INSN_DELETED_P (next))
4008 next = NEXT_INSN (next);
4009 return next;
4010 }
4011
4012 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4013 prev = PREV_INSN (prev);
4014
4015 /* If INSN was a label and a dispatch table follows it,
4016 delete the dispatch table. The tablejump must have gone already.
4017 It isn't useful to fall through into a table. */
4018
4019 if (was_code_label
4020 && NEXT_INSN (insn) != 0
4021 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4022 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4023 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4024 next = delete_insn (NEXT_INSN (insn));
4025
4026 /* If INSN was a label, delete insns following it if now unreachable. */
4027
4028 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4029 {
4030 register RTX_CODE code;
4031 while (next != 0
4032 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4033 || code == NOTE || code == BARRIER
4034 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4035 {
4036 if (code == NOTE
4037 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4038 next = NEXT_INSN (next);
4039 /* Keep going past other deleted labels to delete what follows. */
4040 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4041 next = NEXT_INSN (next);
4042 else
4043 /* Note: if this deletes a jump, it can cause more
4044 deletion of unreachable code, after a different label.
4045 As long as the value from this recursive call is correct,
4046 this invocation functions correctly. */
4047 next = delete_insn (next);
4048 }
4049 }
4050
4051 return next;
4052 }
4053
4054 /* Advance from INSN till reaching something not deleted
4055 then return that. May return INSN itself. */
4056
4057 rtx
4058 next_nondeleted_insn (insn)
4059 rtx insn;
4060 {
4061 while (INSN_DELETED_P (insn))
4062 insn = NEXT_INSN (insn);
4063 return insn;
4064 }
4065 \f
4066 /* Delete a range of insns from FROM to TO, inclusive.
4067 This is for the sake of peephole optimization, so assume
4068 that whatever these insns do will still be done by a new
4069 peephole insn that will replace them. */
4070
4071 void
4072 delete_for_peephole (from, to)
4073 register rtx from, to;
4074 {
4075 register rtx insn = from;
4076
4077 while (1)
4078 {
4079 register rtx next = NEXT_INSN (insn);
4080 register rtx prev = PREV_INSN (insn);
4081
4082 if (GET_CODE (insn) != NOTE)
4083 {
4084 INSN_DELETED_P (insn) = 1;
4085
4086 /* Patch this insn out of the chain. */
4087 /* We don't do this all at once, because we
4088 must preserve all NOTEs. */
4089 if (prev)
4090 NEXT_INSN (prev) = next;
4091
4092 if (next)
4093 PREV_INSN (next) = prev;
4094 }
4095
4096 if (insn == to)
4097 break;
4098 insn = next;
4099 }
4100
4101 /* Note that if TO is an unconditional jump
4102 we *do not* delete the BARRIER that follows,
4103 since the peephole that replaces this sequence
4104 is also an unconditional jump in that case. */
4105 }
4106 \f
4107 /* Invert the condition of the jump JUMP, and make it jump
4108 to label NLABEL instead of where it jumps now. */
4109
4110 int
4111 invert_jump (jump, nlabel)
4112 rtx jump, nlabel;
4113 {
4114 /* We have to either invert the condition and change the label or
4115 do neither. Either operation could fail. We first try to invert
4116 the jump. If that succeeds, we try changing the label. If that fails,
4117 we invert the jump back to what it was. */
4118
4119 if (! invert_exp (PATTERN (jump), jump))
4120 return 0;
4121
4122 if (redirect_jump (jump, nlabel))
4123 {
4124 if (flag_branch_probabilities)
4125 {
4126 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4127
4128 /* An inverted jump means that a probability taken becomes a
4129 probability not taken. Subtract the branch probability from the
4130 probability base to convert it back to a taken probability.
4131 (We don't flip the probability on a branch that's never taken. */
4132 if (note && XINT (XEXP (note, 0), 0) >= 0)
4133 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4134 }
4135
4136 return 1;
4137 }
4138
4139 if (! invert_exp (PATTERN (jump), jump))
4140 /* This should just be putting it back the way it was. */
4141 abort ();
4142
4143 return 0;
4144 }
4145
4146 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4147
4148 Return 1 if we can do so, 0 if we cannot find a way to do so that
4149 matches a pattern. */
4150
4151 int
4152 invert_exp (x, insn)
4153 rtx x;
4154 rtx insn;
4155 {
4156 register RTX_CODE code;
4157 register int i;
4158 register char *fmt;
4159
4160 code = GET_CODE (x);
4161
4162 if (code == IF_THEN_ELSE)
4163 {
4164 register rtx comp = XEXP (x, 0);
4165 register rtx tem;
4166
4167 /* We can do this in two ways: The preferable way, which can only
4168 be done if this is not an integer comparison, is to reverse
4169 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4170 of the IF_THEN_ELSE. If we can't do either, fail. */
4171
4172 if (can_reverse_comparison_p (comp, insn)
4173 && validate_change (insn, &XEXP (x, 0),
4174 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4175 GET_MODE (comp), XEXP (comp, 0),
4176 XEXP (comp, 1)), 0))
4177 return 1;
4178
4179 tem = XEXP (x, 1);
4180 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4181 validate_change (insn, &XEXP (x, 2), tem, 1);
4182 return apply_change_group ();
4183 }
4184
4185 fmt = GET_RTX_FORMAT (code);
4186 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4187 {
4188 if (fmt[i] == 'e')
4189 if (! invert_exp (XEXP (x, i), insn))
4190 return 0;
4191 if (fmt[i] == 'E')
4192 {
4193 register int j;
4194 for (j = 0; j < XVECLEN (x, i); j++)
4195 if (!invert_exp (XVECEXP (x, i, j), insn))
4196 return 0;
4197 }
4198 }
4199
4200 return 1;
4201 }
4202 \f
4203 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4204 If the old jump target label is unused as a result,
4205 it and the code following it may be deleted.
4206
4207 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4208 RETURN insn.
4209
4210 The return value will be 1 if the change was made, 0 if it wasn't (this
4211 can only occur for NLABEL == 0). */
4212
4213 int
4214 redirect_jump (jump, nlabel)
4215 rtx jump, nlabel;
4216 {
4217 register rtx olabel = JUMP_LABEL (jump);
4218
4219 if (nlabel == olabel)
4220 return 1;
4221
4222 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4223 return 0;
4224
4225 /* If this is an unconditional branch, delete it from the jump_chain of
4226 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4227 have UID's in range and JUMP_CHAIN is valid). */
4228 if (jump_chain && (simplejump_p (jump)
4229 || GET_CODE (PATTERN (jump)) == RETURN))
4230 {
4231 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4232
4233 delete_from_jump_chain (jump);
4234 if (label_index < max_jump_chain
4235 && INSN_UID (jump) < max_jump_chain)
4236 {
4237 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4238 jump_chain[label_index] = jump;
4239 }
4240 }
4241
4242 JUMP_LABEL (jump) = nlabel;
4243 if (nlabel)
4244 ++LABEL_NUSES (nlabel);
4245
4246 if (olabel && --LABEL_NUSES (olabel) == 0)
4247 delete_insn (olabel);
4248
4249 return 1;
4250 }
4251
4252 /* Delete the instruction JUMP from any jump chain it might be on. */
4253
4254 static void
4255 delete_from_jump_chain (jump)
4256 rtx jump;
4257 {
4258 int index;
4259 rtx olabel = JUMP_LABEL (jump);
4260
4261 /* Handle unconditional jumps. */
4262 if (jump_chain && olabel != 0
4263 && INSN_UID (olabel) < max_jump_chain
4264 && simplejump_p (jump))
4265 index = INSN_UID (olabel);
4266 /* Handle return insns. */
4267 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4268 index = 0;
4269 else return;
4270
4271 if (jump_chain[index] == jump)
4272 jump_chain[index] = jump_chain[INSN_UID (jump)];
4273 else
4274 {
4275 rtx insn;
4276
4277 for (insn = jump_chain[index];
4278 insn != 0;
4279 insn = jump_chain[INSN_UID (insn)])
4280 if (jump_chain[INSN_UID (insn)] == jump)
4281 {
4282 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4283 break;
4284 }
4285 }
4286 }
4287
4288 /* If NLABEL is nonzero, throughout the rtx at LOC,
4289 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4290 zero, alter (RETURN) to (LABEL_REF NLABEL).
4291
4292 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4293 validity with validate_change. Convert (set (pc) (label_ref olabel))
4294 to (return).
4295
4296 Return 0 if we found a change we would like to make but it is invalid.
4297 Otherwise, return 1. */
4298
4299 int
4300 redirect_exp (loc, olabel, nlabel, insn)
4301 rtx *loc;
4302 rtx olabel, nlabel;
4303 rtx insn;
4304 {
4305 register rtx x = *loc;
4306 register RTX_CODE code = GET_CODE (x);
4307 register int i;
4308 register char *fmt;
4309
4310 if (code == LABEL_REF)
4311 {
4312 if (XEXP (x, 0) == olabel)
4313 {
4314 if (nlabel)
4315 XEXP (x, 0) = nlabel;
4316 else
4317 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4318 return 1;
4319 }
4320 }
4321 else if (code == RETURN && olabel == 0)
4322 {
4323 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4324 if (loc == &PATTERN (insn))
4325 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4326 return validate_change (insn, loc, x, 0);
4327 }
4328
4329 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4330 && GET_CODE (SET_SRC (x)) == LABEL_REF
4331 && XEXP (SET_SRC (x), 0) == olabel)
4332 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4333
4334 fmt = GET_RTX_FORMAT (code);
4335 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4336 {
4337 if (fmt[i] == 'e')
4338 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4339 return 0;
4340 if (fmt[i] == 'E')
4341 {
4342 register int j;
4343 for (j = 0; j < XVECLEN (x, i); j++)
4344 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4345 return 0;
4346 }
4347 }
4348
4349 return 1;
4350 }
4351 \f
4352 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4353
4354 If the old jump target label (before the dispatch table) becomes unused,
4355 it and the dispatch table may be deleted. In that case, find the insn
4356 before the jump references that label and delete it and logical successors
4357 too. */
4358
4359 static void
4360 redirect_tablejump (jump, nlabel)
4361 rtx jump, nlabel;
4362 {
4363 register rtx olabel = JUMP_LABEL (jump);
4364
4365 /* Add this jump to the jump_chain of NLABEL. */
4366 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4367 && INSN_UID (jump) < max_jump_chain)
4368 {
4369 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4370 jump_chain[INSN_UID (nlabel)] = jump;
4371 }
4372
4373 PATTERN (jump) = gen_jump (nlabel);
4374 JUMP_LABEL (jump) = nlabel;
4375 ++LABEL_NUSES (nlabel);
4376 INSN_CODE (jump) = -1;
4377
4378 if (--LABEL_NUSES (olabel) == 0)
4379 {
4380 delete_labelref_insn (jump, olabel, 0);
4381 delete_insn (olabel);
4382 }
4383 }
4384
4385 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4386 If we found one, delete it and then delete this insn if DELETE_THIS is
4387 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4388
4389 static int
4390 delete_labelref_insn (insn, label, delete_this)
4391 rtx insn, label;
4392 int delete_this;
4393 {
4394 int deleted = 0;
4395 rtx link;
4396
4397 if (GET_CODE (insn) != NOTE
4398 && reg_mentioned_p (label, PATTERN (insn)))
4399 {
4400 if (delete_this)
4401 {
4402 delete_insn (insn);
4403 deleted = 1;
4404 }
4405 else
4406 return 1;
4407 }
4408
4409 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4410 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4411 {
4412 if (delete_this)
4413 {
4414 delete_insn (insn);
4415 deleted = 1;
4416 }
4417 else
4418 return 1;
4419 }
4420
4421 return deleted;
4422 }
4423 \f
4424 /* Like rtx_equal_p except that it considers two REGs as equal
4425 if they renumber to the same value and considers two commutative
4426 operations to be the same if the order of the operands has been
4427 reversed.
4428
4429 ??? Addition is not commutative on the PA due to the weird implicit
4430 space register selection rules for memory addresses. Therefore, we
4431 don't consider a + b == b + a.
4432
4433 We could/should make this test a little tighter. Possibly only
4434 disabling it on the PA via some backend macro or only disabling this
4435 case when the PLUS is inside a MEM. */
4436
4437 int
4438 rtx_renumbered_equal_p (x, y)
4439 rtx x, y;
4440 {
4441 register int i;
4442 register RTX_CODE code = GET_CODE (x);
4443 register char *fmt;
4444
4445 if (x == y)
4446 return 1;
4447
4448 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4449 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4450 && GET_CODE (SUBREG_REG (y)) == REG)))
4451 {
4452 int reg_x = -1, reg_y = -1;
4453 int word_x = 0, word_y = 0;
4454
4455 if (GET_MODE (x) != GET_MODE (y))
4456 return 0;
4457
4458 /* If we haven't done any renumbering, don't
4459 make any assumptions. */
4460 if (reg_renumber == 0)
4461 return rtx_equal_p (x, y);
4462
4463 if (code == SUBREG)
4464 {
4465 reg_x = REGNO (SUBREG_REG (x));
4466 word_x = SUBREG_WORD (x);
4467
4468 if (reg_renumber[reg_x] >= 0)
4469 {
4470 reg_x = reg_renumber[reg_x] + word_x;
4471 word_x = 0;
4472 }
4473 }
4474
4475 else
4476 {
4477 reg_x = REGNO (x);
4478 if (reg_renumber[reg_x] >= 0)
4479 reg_x = reg_renumber[reg_x];
4480 }
4481
4482 if (GET_CODE (y) == SUBREG)
4483 {
4484 reg_y = REGNO (SUBREG_REG (y));
4485 word_y = SUBREG_WORD (y);
4486
4487 if (reg_renumber[reg_y] >= 0)
4488 {
4489 reg_y = reg_renumber[reg_y];
4490 word_y = 0;
4491 }
4492 }
4493
4494 else
4495 {
4496 reg_y = REGNO (y);
4497 if (reg_renumber[reg_y] >= 0)
4498 reg_y = reg_renumber[reg_y];
4499 }
4500
4501 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4502 }
4503
4504 /* Now we have disposed of all the cases
4505 in which different rtx codes can match. */
4506 if (code != GET_CODE (y))
4507 return 0;
4508
4509 switch (code)
4510 {
4511 case PC:
4512 case CC0:
4513 case ADDR_VEC:
4514 case ADDR_DIFF_VEC:
4515 return 0;
4516
4517 case CONST_INT:
4518 return INTVAL (x) == INTVAL (y);
4519
4520 case LABEL_REF:
4521 /* We can't assume nonlocal labels have their following insns yet. */
4522 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4523 return XEXP (x, 0) == XEXP (y, 0);
4524
4525 /* Two label-refs are equivalent if they point at labels
4526 in the same position in the instruction stream. */
4527 return (next_real_insn (XEXP (x, 0))
4528 == next_real_insn (XEXP (y, 0)));
4529
4530 case SYMBOL_REF:
4531 return XSTR (x, 0) == XSTR (y, 0);
4532
4533 case CODE_LABEL:
4534 /* If we didn't match EQ equality above, they aren't the same. */
4535 return 0;
4536
4537 default:
4538 break;
4539 }
4540
4541 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4542
4543 if (GET_MODE (x) != GET_MODE (y))
4544 return 0;
4545
4546 /* For commutative operations, the RTX match if the operand match in any
4547 order. Also handle the simple binary and unary cases without a loop.
4548
4549 ??? Don't consider PLUS a commutative operator; see comments above. */
4550 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4551 && code != PLUS)
4552 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4553 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4554 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4555 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4556 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4557 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4558 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4559 else if (GET_RTX_CLASS (code) == '1')
4560 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4561
4562 /* Compare the elements. If any pair of corresponding elements
4563 fail to match, return 0 for the whole things. */
4564
4565 fmt = GET_RTX_FORMAT (code);
4566 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4567 {
4568 register int j;
4569 switch (fmt[i])
4570 {
4571 case 'w':
4572 if (XWINT (x, i) != XWINT (y, i))
4573 return 0;
4574 break;
4575
4576 case 'i':
4577 if (XINT (x, i) != XINT (y, i))
4578 return 0;
4579 break;
4580
4581 case 's':
4582 if (strcmp (XSTR (x, i), XSTR (y, i)))
4583 return 0;
4584 break;
4585
4586 case 'e':
4587 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4588 return 0;
4589 break;
4590
4591 case 'u':
4592 if (XEXP (x, i) != XEXP (y, i))
4593 return 0;
4594 /* fall through. */
4595 case '0':
4596 break;
4597
4598 case 'E':
4599 if (XVECLEN (x, i) != XVECLEN (y, i))
4600 return 0;
4601 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4602 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4603 return 0;
4604 break;
4605
4606 default:
4607 abort ();
4608 }
4609 }
4610 return 1;
4611 }
4612 \f
4613 /* If X is a hard register or equivalent to one or a subregister of one,
4614 return the hard register number. If X is a pseudo register that was not
4615 assigned a hard register, return the pseudo register number. Otherwise,
4616 return -1. Any rtx is valid for X. */
4617
4618 int
4619 true_regnum (x)
4620 rtx x;
4621 {
4622 if (GET_CODE (x) == REG)
4623 {
4624 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4625 return reg_renumber[REGNO (x)];
4626 return REGNO (x);
4627 }
4628 if (GET_CODE (x) == SUBREG)
4629 {
4630 int base = true_regnum (SUBREG_REG (x));
4631 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4632 return SUBREG_WORD (x) + base;
4633 }
4634 return -1;
4635 }
4636 \f
4637 /* Optimize code of the form:
4638
4639 for (x = a[i]; x; ...)
4640 ...
4641 for (x = a[i]; x; ...)
4642 ...
4643 foo:
4644
4645 Loop optimize will change the above code into
4646
4647 if (x = a[i])
4648 for (;;)
4649 { ...; if (! (x = ...)) break; }
4650 if (x = a[i])
4651 for (;;)
4652 { ...; if (! (x = ...)) break; }
4653 foo:
4654
4655 In general, if the first test fails, the program can branch
4656 directly to `foo' and skip the second try which is doomed to fail.
4657 We run this after loop optimization and before flow analysis. */
4658
4659 /* When comparing the insn patterns, we track the fact that different
4660 pseudo-register numbers may have been used in each computation.
4661 The following array stores an equivalence -- same_regs[I] == J means
4662 that pseudo register I was used in the first set of tests in a context
4663 where J was used in the second set. We also count the number of such
4664 pending equivalences. If nonzero, the expressions really aren't the
4665 same. */
4666
4667 static int *same_regs;
4668
4669 static int num_same_regs;
4670
4671 /* Track any registers modified between the target of the first jump and
4672 the second jump. They never compare equal. */
4673
4674 static char *modified_regs;
4675
4676 /* Record if memory was modified. */
4677
4678 static int modified_mem;
4679
4680 /* Called via note_stores on each insn between the target of the first
4681 branch and the second branch. It marks any changed registers. */
4682
4683 static void
4684 mark_modified_reg (dest, x)
4685 rtx dest;
4686 rtx x ATTRIBUTE_UNUSED;
4687 {
4688 int regno, i;
4689
4690 if (GET_CODE (dest) == SUBREG)
4691 dest = SUBREG_REG (dest);
4692
4693 if (GET_CODE (dest) == MEM)
4694 modified_mem = 1;
4695
4696 if (GET_CODE (dest) != REG)
4697 return;
4698
4699 regno = REGNO (dest);
4700 if (regno >= FIRST_PSEUDO_REGISTER)
4701 modified_regs[regno] = 1;
4702 else
4703 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4704 modified_regs[regno + i] = 1;
4705 }
4706
4707 /* F is the first insn in the chain of insns. */
4708
4709 void
4710 thread_jumps (f, max_reg, flag_before_loop)
4711 rtx f;
4712 int max_reg;
4713 int flag_before_loop;
4714 {
4715 /* Basic algorithm is to find a conditional branch,
4716 the label it may branch to, and the branch after
4717 that label. If the two branches test the same condition,
4718 walk back from both branch paths until the insn patterns
4719 differ, or code labels are hit. If we make it back to
4720 the target of the first branch, then we know that the first branch
4721 will either always succeed or always fail depending on the relative
4722 senses of the two branches. So adjust the first branch accordingly
4723 in this case. */
4724
4725 rtx label, b1, b2, t1, t2;
4726 enum rtx_code code1, code2;
4727 rtx b1op0, b1op1, b2op0, b2op1;
4728 int changed = 1;
4729 int i;
4730 int *all_reset;
4731
4732 /* Allocate register tables and quick-reset table. */
4733 modified_regs = (char *) alloca (max_reg * sizeof (char));
4734 same_regs = (int *) alloca (max_reg * sizeof (int));
4735 all_reset = (int *) alloca (max_reg * sizeof (int));
4736 for (i = 0; i < max_reg; i++)
4737 all_reset[i] = -1;
4738
4739 while (changed)
4740 {
4741 changed = 0;
4742
4743 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4744 {
4745 /* Get to a candidate branch insn. */
4746 if (GET_CODE (b1) != JUMP_INSN
4747 || ! condjump_p (b1) || simplejump_p (b1)
4748 || JUMP_LABEL (b1) == 0)
4749 continue;
4750
4751 bzero (modified_regs, max_reg * sizeof (char));
4752 modified_mem = 0;
4753
4754 bcopy ((char *) all_reset, (char *) same_regs,
4755 max_reg * sizeof (int));
4756 num_same_regs = 0;
4757
4758 label = JUMP_LABEL (b1);
4759
4760 /* Look for a branch after the target. Record any registers and
4761 memory modified between the target and the branch. Stop when we
4762 get to a label since we can't know what was changed there. */
4763 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4764 {
4765 if (GET_CODE (b2) == CODE_LABEL)
4766 break;
4767
4768 else if (GET_CODE (b2) == JUMP_INSN)
4769 {
4770 /* If this is an unconditional jump and is the only use of
4771 its target label, we can follow it. */
4772 if (simplejump_p (b2)
4773 && JUMP_LABEL (b2) != 0
4774 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4775 {
4776 b2 = JUMP_LABEL (b2);
4777 continue;
4778 }
4779 else
4780 break;
4781 }
4782
4783 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4784 continue;
4785
4786 if (GET_CODE (b2) == CALL_INSN)
4787 {
4788 modified_mem = 1;
4789 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4790 if (call_used_regs[i] && ! fixed_regs[i]
4791 && i != STACK_POINTER_REGNUM
4792 && i != FRAME_POINTER_REGNUM
4793 && i != HARD_FRAME_POINTER_REGNUM
4794 && i != ARG_POINTER_REGNUM)
4795 modified_regs[i] = 1;
4796 }
4797
4798 note_stores (PATTERN (b2), mark_modified_reg);
4799 }
4800
4801 /* Check the next candidate branch insn from the label
4802 of the first. */
4803 if (b2 == 0
4804 || GET_CODE (b2) != JUMP_INSN
4805 || b2 == b1
4806 || ! condjump_p (b2)
4807 || simplejump_p (b2))
4808 continue;
4809
4810 /* Get the comparison codes and operands, reversing the
4811 codes if appropriate. If we don't have comparison codes,
4812 we can't do anything. */
4813 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4814 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4815 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4816 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4817 code1 = reverse_condition (code1);
4818
4819 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4820 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4821 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4822 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4823 code2 = reverse_condition (code2);
4824
4825 /* If they test the same things and knowing that B1 branches
4826 tells us whether or not B2 branches, check if we
4827 can thread the branch. */
4828 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4829 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4830 && (comparison_dominates_p (code1, code2)
4831 || (comparison_dominates_p (code1, reverse_condition (code2))
4832 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
4833 0),
4834 b1))))
4835 {
4836 t1 = prev_nonnote_insn (b1);
4837 t2 = prev_nonnote_insn (b2);
4838
4839 while (t1 != 0 && t2 != 0)
4840 {
4841 if (t2 == label)
4842 {
4843 /* We have reached the target of the first branch.
4844 If there are no pending register equivalents,
4845 we know that this branch will either always
4846 succeed (if the senses of the two branches are
4847 the same) or always fail (if not). */
4848 rtx new_label;
4849
4850 if (num_same_regs != 0)
4851 break;
4852
4853 if (comparison_dominates_p (code1, code2))
4854 new_label = JUMP_LABEL (b2);
4855 else
4856 new_label = get_label_after (b2);
4857
4858 if (JUMP_LABEL (b1) != new_label)
4859 {
4860 rtx prev = PREV_INSN (new_label);
4861
4862 if (flag_before_loop
4863 && GET_CODE (prev) == NOTE
4864 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
4865 {
4866 /* Don't thread to the loop label. If a loop
4867 label is reused, loop optimization will
4868 be disabled for that loop. */
4869 new_label = gen_label_rtx ();
4870 emit_label_after (new_label, PREV_INSN (prev));
4871 }
4872 changed |= redirect_jump (b1, new_label);
4873 }
4874 break;
4875 }
4876
4877 /* If either of these is not a normal insn (it might be
4878 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4879 have already been skipped above.) Similarly, fail
4880 if the insns are different. */
4881 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4882 || recog_memoized (t1) != recog_memoized (t2)
4883 || ! rtx_equal_for_thread_p (PATTERN (t1),
4884 PATTERN (t2), t2))
4885 break;
4886
4887 t1 = prev_nonnote_insn (t1);
4888 t2 = prev_nonnote_insn (t2);
4889 }
4890 }
4891 }
4892 }
4893 }
4894 \f
4895 /* This is like RTX_EQUAL_P except that it knows about our handling of
4896 possibly equivalent registers and knows to consider volatile and
4897 modified objects as not equal.
4898
4899 YINSN is the insn containing Y. */
4900
4901 int
4902 rtx_equal_for_thread_p (x, y, yinsn)
4903 rtx x, y;
4904 rtx yinsn;
4905 {
4906 register int i;
4907 register int j;
4908 register enum rtx_code code;
4909 register char *fmt;
4910
4911 code = GET_CODE (x);
4912 /* Rtx's of different codes cannot be equal. */
4913 if (code != GET_CODE (y))
4914 return 0;
4915
4916 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4917 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4918
4919 if (GET_MODE (x) != GET_MODE (y))
4920 return 0;
4921
4922 /* For floating-point, consider everything unequal. This is a bit
4923 pessimistic, but this pass would only rarely do anything for FP
4924 anyway. */
4925 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4926 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
4927 return 0;
4928
4929 /* For commutative operations, the RTX match if the operand match in any
4930 order. Also handle the simple binary and unary cases without a loop. */
4931 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4932 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4933 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4934 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4935 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4936 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4937 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4938 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4939 else if (GET_RTX_CLASS (code) == '1')
4940 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4941
4942 /* Handle special-cases first. */
4943 switch (code)
4944 {
4945 case REG:
4946 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4947 return 1;
4948
4949 /* If neither is user variable or hard register, check for possible
4950 equivalence. */
4951 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4952 || REGNO (x) < FIRST_PSEUDO_REGISTER
4953 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4954 return 0;
4955
4956 if (same_regs[REGNO (x)] == -1)
4957 {
4958 same_regs[REGNO (x)] = REGNO (y);
4959 num_same_regs++;
4960
4961 /* If this is the first time we are seeing a register on the `Y'
4962 side, see if it is the last use. If not, we can't thread the
4963 jump, so mark it as not equivalent. */
4964 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4965 return 0;
4966
4967 return 1;
4968 }
4969 else
4970 return (same_regs[REGNO (x)] == REGNO (y));
4971
4972 break;
4973
4974 case MEM:
4975 /* If memory modified or either volatile, not equivalent.
4976 Else, check address. */
4977 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4978 return 0;
4979
4980 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4981
4982 case ASM_INPUT:
4983 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4984 return 0;
4985
4986 break;
4987
4988 case SET:
4989 /* Cancel a pending `same_regs' if setting equivalenced registers.
4990 Then process source. */
4991 if (GET_CODE (SET_DEST (x)) == REG
4992 && GET_CODE (SET_DEST (y)) == REG)
4993 {
4994 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
4995 {
4996 same_regs[REGNO (SET_DEST (x))] = -1;
4997 num_same_regs--;
4998 }
4999 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5000 return 0;
5001 }
5002 else
5003 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5004 return 0;
5005
5006 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5007
5008 case LABEL_REF:
5009 return XEXP (x, 0) == XEXP (y, 0);
5010
5011 case SYMBOL_REF:
5012 return XSTR (x, 0) == XSTR (y, 0);
5013
5014 default:
5015 break;
5016 }
5017
5018 if (x == y)
5019 return 1;
5020
5021 fmt = GET_RTX_FORMAT (code);
5022 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5023 {
5024 switch (fmt[i])
5025 {
5026 case 'w':
5027 if (XWINT (x, i) != XWINT (y, i))
5028 return 0;
5029 break;
5030
5031 case 'n':
5032 case 'i':
5033 if (XINT (x, i) != XINT (y, i))
5034 return 0;
5035 break;
5036
5037 case 'V':
5038 case 'E':
5039 /* Two vectors must have the same length. */
5040 if (XVECLEN (x, i) != XVECLEN (y, i))
5041 return 0;
5042
5043 /* And the corresponding elements must match. */
5044 for (j = 0; j < XVECLEN (x, i); j++)
5045 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5046 XVECEXP (y, i, j), yinsn) == 0)
5047 return 0;
5048 break;
5049
5050 case 'e':
5051 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5052 return 0;
5053 break;
5054
5055 case 'S':
5056 case 's':
5057 if (strcmp (XSTR (x, i), XSTR (y, i)))
5058 return 0;
5059 break;
5060
5061 case 'u':
5062 /* These are just backpointers, so they don't matter. */
5063 break;
5064
5065 case '0':
5066 break;
5067
5068 /* It is believed that rtx's at this level will never
5069 contain anything but integers and other rtx's,
5070 except for within LABEL_REFs and SYMBOL_REFs. */
5071 default:
5072 abort ();
5073 }
5074 }
5075 return 1;
5076 }
5077 \f
5078
5079 #ifndef HAVE_cc0
5080 /* Return the insn that NEW can be safely inserted in front of starting at
5081 the jump insn INSN. Return 0 if it is not safe to do this jump
5082 optimization. Note that NEW must contain a single set. */
5083
5084 static rtx
5085 find_insert_position (insn, new)
5086 rtx insn;
5087 rtx new;
5088 {
5089 int i;
5090 rtx prev;
5091
5092 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5093 if (GET_CODE (PATTERN (new)) != PARALLEL)
5094 return insn;
5095
5096 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5097 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5098 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5099 insn))
5100 break;
5101
5102 if (i < 0)
5103 return insn;
5104
5105 /* There is a good chance that the previous insn PREV sets the thing
5106 being clobbered (often the CC in a hard reg). If PREV does not
5107 use what NEW sets, we can insert NEW before PREV. */
5108
5109 prev = prev_active_insn (insn);
5110 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5111 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5112 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5113 insn)
5114 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5115 prev))
5116 return 0;
5117
5118 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5119 }
5120 #endif /* !HAVE_cc0 */