* jump.c (rtx_renumbered_equal_p): Special case CODE_LABEL.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "hard-reg-set.h"
59 #include "regs.h"
60 #include "insn-config.h"
61 #include "insn-flags.h"
62 #include "recog.h"
63 #include "expr.h"
64 #include "real.h"
65 #include "except.h"
66 #include "toplev.h"
67
68 /* ??? Eventually must record somehow the labels used by jumps
69 from nested functions. */
70 /* Pre-record the next or previous real insn for each label?
71 No, this pass is very fast anyway. */
72 /* Condense consecutive labels?
73 This would make life analysis faster, maybe. */
74 /* Optimize jump y; x: ... y: jumpif... x?
75 Don't know if it is worth bothering with. */
76 /* Optimize two cases of conditional jump to conditional jump?
77 This can never delete any instruction or make anything dead,
78 or even change what is live at any point.
79 So perhaps let combiner do it. */
80
81 /* Vector indexed by uid.
82 For each CODE_LABEL, index by its uid to get first unconditional jump
83 that jumps to the label.
84 For each JUMP_INSN, index by its uid to get the next unconditional jump
85 that jumps to the same label.
86 Element 0 is the start of a chain of all return insns.
87 (It is safe to use element 0 because insn uid 0 is not used. */
88
89 static rtx *jump_chain;
90
91 /* List of labels referred to from initializers.
92 These can never be deleted. */
93 rtx forced_labels;
94
95 /* Maximum index in jump_chain. */
96
97 static int max_jump_chain;
98
99 /* Set nonzero by jump_optimize if control can fall through
100 to the end of the function. */
101 int can_reach_end;
102
103 /* Indicates whether death notes are significant in cross jump analysis.
104 Normally they are not significant, because of A and B jump to C,
105 and R dies in A, it must die in B. But this might not be true after
106 stack register conversion, and we must compare death notes in that
107 case. */
108
109 static int cross_jump_death_matters = 0;
110
111 static int init_label_info PROTO((rtx));
112 static void delete_barrier_successors PROTO((rtx));
113 static void mark_all_labels PROTO((rtx, int));
114 static rtx delete_unreferenced_labels PROTO((rtx));
115 static void delete_noop_moves PROTO((rtx));
116 static int calculate_can_reach_end PROTO((rtx, int, int));
117 static int duplicate_loop_exit_test PROTO((rtx));
118 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
119 static void do_cross_jump PROTO((rtx, rtx, rtx));
120 static int jump_back_p PROTO((rtx, rtx));
121 static int tension_vector_labels PROTO((rtx, int));
122 static void mark_jump_label PROTO((rtx, rtx, int));
123 static void delete_computation PROTO((rtx));
124 static void delete_from_jump_chain PROTO((rtx));
125 static int delete_labelref_insn PROTO((rtx, rtx, int));
126 static void mark_modified_reg PROTO((rtx, rtx));
127 static void redirect_tablejump PROTO((rtx, rtx));
128 #ifndef HAVE_cc0
129 static rtx find_insert_position PROTO((rtx, rtx));
130 #endif
131 \f
132 /* Delete no-op jumps and optimize jumps to jumps
133 and jumps around jumps.
134 Delete unused labels and unreachable code.
135
136 If CROSS_JUMP is 1, detect matching code
137 before a jump and its destination and unify them.
138 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
139
140 If NOOP_MOVES is nonzero, delete no-op move insns.
141
142 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
143 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
144
145 If `optimize' is zero, don't change any code,
146 just determine whether control drops off the end of the function.
147 This case occurs when we have -W and not -O.
148 It works because `delete_insn' checks the value of `optimize'
149 and refrains from actually deleting when that is 0. */
150
151 void
152 jump_optimize (f, cross_jump, noop_moves, after_regscan)
153 rtx f;
154 int cross_jump;
155 int noop_moves;
156 int after_regscan;
157 {
158 register rtx insn, next;
159 int changed;
160 int old_max_reg;
161 int first = 1;
162 int max_uid = 0;
163 rtx last_insn;
164
165 cross_jump_death_matters = (cross_jump == 2);
166 max_uid = init_label_info (f) + 1;
167
168 /* If we are performing cross jump optimizations, then initialize
169 tables mapping UIDs to EH regions to avoid incorrect movement
170 of insns from one EH region to another. */
171 if (flag_exceptions && cross_jump)
172 init_insn_eh_region (f, max_uid);
173
174 delete_barrier_successors (f);
175
176 /* Leave some extra room for labels and duplicate exit test insns
177 we make. */
178 max_jump_chain = max_uid * 14 / 10;
179 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
180 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
181
182 mark_all_labels (f, cross_jump);
183
184 /* Keep track of labels used from static data;
185 they cannot ever be deleted. */
186
187 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
188 LABEL_NUSES (XEXP (insn, 0))++;
189
190 check_exception_handler_labels ();
191
192 /* Keep track of labels used for marking handlers for exception
193 regions; they cannot usually be deleted. */
194
195 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
196 LABEL_NUSES (XEXP (insn, 0))++;
197
198 exception_optimize ();
199
200 last_insn = delete_unreferenced_labels (f);
201
202 if (!optimize)
203 {
204 can_reach_end = calculate_can_reach_end (last_insn, 1, 0);
205
206 /* Zero the "deleted" flag of all the "deleted" insns. */
207 for (insn = f; insn; insn = NEXT_INSN (insn))
208 INSN_DELETED_P (insn) = 0;
209
210 /* Show that the jump chain is not valid. */
211 jump_chain = 0;
212 return;
213 }
214
215 #ifdef HAVE_return
216 if (HAVE_return)
217 {
218 /* If we fall through to the epilogue, see if we can insert a RETURN insn
219 in front of it. If the machine allows it at this point (we might be
220 after reload for a leaf routine), it will improve optimization for it
221 to be there. */
222 insn = get_last_insn ();
223 while (insn && GET_CODE (insn) == NOTE)
224 insn = PREV_INSN (insn);
225
226 if (insn && GET_CODE (insn) != BARRIER)
227 {
228 emit_jump_insn (gen_return ());
229 emit_barrier ();
230 }
231 }
232 #endif
233
234 if (noop_moves)
235 delete_noop_moves (f);
236
237 /* If we haven't yet gotten to reload and we have just run regscan,
238 delete any insn that sets a register that isn't used elsewhere.
239 This helps some of the optimizations below by having less insns
240 being jumped around. */
241
242 if (! reload_completed && after_regscan)
243 for (insn = f; insn; insn = next)
244 {
245 rtx set = single_set (insn);
246
247 next = NEXT_INSN (insn);
248
249 if (set && GET_CODE (SET_DEST (set)) == REG
250 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
251 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
252 /* We use regno_last_note_uid so as not to delete the setting
253 of a reg that's used in notes. A subsequent optimization
254 might arrange to use that reg for real. */
255 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
256 && ! side_effects_p (SET_SRC (set))
257 && ! find_reg_note (insn, REG_RETVAL, 0))
258 delete_insn (insn);
259 }
260
261 /* Now iterate optimizing jumps until nothing changes over one pass. */
262 changed = 1;
263 old_max_reg = max_reg_num ();
264 while (changed)
265 {
266 changed = 0;
267
268 for (insn = f; insn; insn = next)
269 {
270 rtx reallabelprev;
271 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
272 rtx nlabel;
273 int this_is_simplejump, this_is_condjump, reversep = 0;
274 int this_is_condjump_in_parallel;
275
276 #if 0
277 /* If NOT the first iteration, if this is the last jump pass
278 (just before final), do the special peephole optimizations.
279 Avoiding the first iteration gives ordinary jump opts
280 a chance to work before peephole opts. */
281
282 if (reload_completed && !first && !flag_no_peephole)
283 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
284 peephole (insn);
285 #endif
286
287 /* That could have deleted some insns after INSN, so check now
288 what the following insn is. */
289
290 next = NEXT_INSN (insn);
291
292 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
293 jump. Try to optimize by duplicating the loop exit test if so.
294 This is only safe immediately after regscan, because it uses
295 the values of regno_first_uid and regno_last_uid. */
296 if (after_regscan && GET_CODE (insn) == NOTE
297 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
298 && (temp1 = next_nonnote_insn (insn)) != 0
299 && simplejump_p (temp1))
300 {
301 temp = PREV_INSN (insn);
302 if (duplicate_loop_exit_test (insn))
303 {
304 changed = 1;
305 next = NEXT_INSN (temp);
306 continue;
307 }
308 }
309
310 if (GET_CODE (insn) != JUMP_INSN)
311 continue;
312
313 this_is_simplejump = simplejump_p (insn);
314 this_is_condjump = condjump_p (insn);
315 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
316
317 /* Tension the labels in dispatch tables. */
318
319 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
320 changed |= tension_vector_labels (PATTERN (insn), 0);
321 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
322 changed |= tension_vector_labels (PATTERN (insn), 1);
323
324 /* If a dispatch table always goes to the same place,
325 get rid of it and replace the insn that uses it. */
326
327 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
328 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
329 {
330 int i;
331 rtx pat = PATTERN (insn);
332 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
333 int len = XVECLEN (pat, diff_vec_p);
334 rtx dispatch = prev_real_insn (insn);
335
336 for (i = 0; i < len; i++)
337 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
338 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
339 break;
340 if (i == len
341 && dispatch != 0
342 && GET_CODE (dispatch) == JUMP_INSN
343 && JUMP_LABEL (dispatch) != 0
344 /* Don't mess with a casesi insn. */
345 && !(GET_CODE (PATTERN (dispatch)) == SET
346 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
347 == IF_THEN_ELSE))
348 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
349 {
350 redirect_tablejump (dispatch,
351 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
352 changed = 1;
353 }
354 }
355
356 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
357
358 /* If a jump references the end of the function, try to turn
359 it into a RETURN insn, possibly a conditional one. */
360 if (JUMP_LABEL (insn)
361 && (next_active_insn (JUMP_LABEL (insn)) == 0
362 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
363 == RETURN))
364 changed |= redirect_jump (insn, NULL_RTX);
365
366 /* Detect jump to following insn. */
367 if (reallabelprev == insn && condjump_p (insn))
368 {
369 next = next_real_insn (JUMP_LABEL (insn));
370 delete_jump (insn);
371 changed = 1;
372 continue;
373 }
374
375 /* If we have an unconditional jump preceded by a USE, try to put
376 the USE before the target and jump there. This simplifies many
377 of the optimizations below since we don't have to worry about
378 dealing with these USE insns. We only do this if the label
379 being branch to already has the identical USE or if code
380 never falls through to that label. */
381
382 if (this_is_simplejump
383 && (temp = prev_nonnote_insn (insn)) != 0
384 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
385 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
386 && (GET_CODE (temp1) == BARRIER
387 || (GET_CODE (temp1) == INSN
388 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
389 /* Don't do this optimization if we have a loop containing only
390 the USE instruction, and the loop start label has a usage
391 count of 1. This is because we will redo this optimization
392 everytime through the outer loop, and jump opt will never
393 exit. */
394 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
395 && temp2 == JUMP_LABEL (insn)
396 && LABEL_NUSES (temp2) == 1))
397 {
398 if (GET_CODE (temp1) == BARRIER)
399 {
400 emit_insn_after (PATTERN (temp), temp1);
401 temp1 = NEXT_INSN (temp1);
402 }
403
404 delete_insn (temp);
405 redirect_jump (insn, get_label_before (temp1));
406 reallabelprev = prev_real_insn (temp1);
407 changed = 1;
408 }
409
410 /* Simplify if (...) x = a; else x = b; by converting it
411 to x = b; if (...) x = a;
412 if B is sufficiently simple, the test doesn't involve X,
413 and nothing in the test modifies B or X.
414
415 If we have small register classes, we also can't do this if X
416 is a hard register.
417
418 If the "x = b;" insn has any REG_NOTES, we don't do this because
419 of the possibility that we are running after CSE and there is a
420 REG_EQUAL note that is only valid if the branch has already been
421 taken. If we move the insn with the REG_EQUAL note, we may
422 fold the comparison to always be false in a later CSE pass.
423 (We could also delete the REG_NOTES when moving the insn, but it
424 seems simpler to not move it.) An exception is that we can move
425 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
426 value is the same as "b".
427
428 INSN is the branch over the `else' part.
429
430 We set:
431
432 TEMP to the jump insn preceding "x = a;"
433 TEMP1 to X
434 TEMP2 to the insn that sets "x = b;"
435 TEMP3 to the insn that sets "x = a;"
436 TEMP4 to the set of "x = b"; */
437
438 if (this_is_simplejump
439 && (temp3 = prev_active_insn (insn)) != 0
440 && GET_CODE (temp3) == INSN
441 && (temp4 = single_set (temp3)) != 0
442 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
443 && (! SMALL_REGISTER_CLASSES
444 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
445 && (temp2 = next_active_insn (insn)) != 0
446 && GET_CODE (temp2) == INSN
447 && (temp4 = single_set (temp2)) != 0
448 && rtx_equal_p (SET_DEST (temp4), temp1)
449 && ! side_effects_p (SET_SRC (temp4))
450 && ! may_trap_p (SET_SRC (temp4))
451 && (REG_NOTES (temp2) == 0
452 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
453 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
454 && XEXP (REG_NOTES (temp2), 1) == 0
455 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
456 SET_SRC (temp4))))
457 && (temp = prev_active_insn (temp3)) != 0
458 && condjump_p (temp) && ! simplejump_p (temp)
459 /* TEMP must skip over the "x = a;" insn */
460 && prev_real_insn (JUMP_LABEL (temp)) == insn
461 && no_labels_between_p (insn, JUMP_LABEL (temp))
462 /* There must be no other entries to the "x = b;" insn. */
463 && no_labels_between_p (JUMP_LABEL (temp), temp2)
464 /* INSN must either branch to the insn after TEMP2 or the insn
465 after TEMP2 must branch to the same place as INSN. */
466 && (reallabelprev == temp2
467 || ((temp5 = next_active_insn (temp2)) != 0
468 && simplejump_p (temp5)
469 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
470 {
471 /* The test expression, X, may be a complicated test with
472 multiple branches. See if we can find all the uses of
473 the label that TEMP branches to without hitting a CALL_INSN
474 or a jump to somewhere else. */
475 rtx target = JUMP_LABEL (temp);
476 int nuses = LABEL_NUSES (target);
477 rtx p;
478 #ifdef HAVE_cc0
479 rtx q;
480 #endif
481
482 /* Set P to the first jump insn that goes around "x = a;". */
483 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
484 {
485 if (GET_CODE (p) == JUMP_INSN)
486 {
487 if (condjump_p (p) && ! simplejump_p (p)
488 && JUMP_LABEL (p) == target)
489 {
490 nuses--;
491 if (nuses == 0)
492 break;
493 }
494 else
495 break;
496 }
497 else if (GET_CODE (p) == CALL_INSN)
498 break;
499 }
500
501 #ifdef HAVE_cc0
502 /* We cannot insert anything between a set of cc and its use
503 so if P uses cc0, we must back up to the previous insn. */
504 q = prev_nonnote_insn (p);
505 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
506 && sets_cc0_p (PATTERN (q)))
507 p = q;
508 #endif
509
510 if (p)
511 p = PREV_INSN (p);
512
513 /* If we found all the uses and there was no data conflict, we
514 can move the assignment unless we can branch into the middle
515 from somewhere. */
516 if (nuses == 0 && p
517 && no_labels_between_p (p, insn)
518 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
519 && ! reg_set_between_p (temp1, p, temp3)
520 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
521 || ! modified_between_p (SET_SRC (temp4), p, temp2))
522 /* Verify that registers used by the jump are not clobbered
523 by the instruction being moved. */
524 && ! regs_set_between_p (PATTERN (temp),
525 PREV_INSN (temp2),
526 NEXT_INSN (temp2)))
527 {
528 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
529 delete_insn (temp2);
530
531 /* Set NEXT to an insn that we know won't go away. */
532 next = next_active_insn (insn);
533
534 /* Delete the jump around the set. Note that we must do
535 this before we redirect the test jumps so that it won't
536 delete the code immediately following the assignment
537 we moved (which might be a jump). */
538
539 delete_insn (insn);
540
541 /* We either have two consecutive labels or a jump to
542 a jump, so adjust all the JUMP_INSNs to branch to where
543 INSN branches to. */
544 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
545 if (GET_CODE (p) == JUMP_INSN)
546 redirect_jump (p, target);
547
548 changed = 1;
549 continue;
550 }
551 }
552
553 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
554 to x = a; if (...) goto l; x = b;
555 if A is sufficiently simple, the test doesn't involve X,
556 and nothing in the test modifies A or X.
557
558 If we have small register classes, we also can't do this if X
559 is a hard register.
560
561 If the "x = a;" insn has any REG_NOTES, we don't do this because
562 of the possibility that we are running after CSE and there is a
563 REG_EQUAL note that is only valid if the branch has already been
564 taken. If we move the insn with the REG_EQUAL note, we may
565 fold the comparison to always be false in a later CSE pass.
566 (We could also delete the REG_NOTES when moving the insn, but it
567 seems simpler to not move it.) An exception is that we can move
568 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
569 value is the same as "a".
570
571 INSN is the goto.
572
573 We set:
574
575 TEMP to the jump insn preceding "x = a;"
576 TEMP1 to X
577 TEMP2 to the insn that sets "x = b;"
578 TEMP3 to the insn that sets "x = a;"
579 TEMP4 to the set of "x = a"; */
580
581 if (this_is_simplejump
582 && (temp2 = next_active_insn (insn)) != 0
583 && GET_CODE (temp2) == INSN
584 && (temp4 = single_set (temp2)) != 0
585 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
586 && (! SMALL_REGISTER_CLASSES
587 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
588 && (temp3 = prev_active_insn (insn)) != 0
589 && GET_CODE (temp3) == INSN
590 && (temp4 = single_set (temp3)) != 0
591 && rtx_equal_p (SET_DEST (temp4), temp1)
592 && ! side_effects_p (SET_SRC (temp4))
593 && ! may_trap_p (SET_SRC (temp4))
594 && (REG_NOTES (temp3) == 0
595 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
596 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
597 && XEXP (REG_NOTES (temp3), 1) == 0
598 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
599 SET_SRC (temp4))))
600 && (temp = prev_active_insn (temp3)) != 0
601 && condjump_p (temp) && ! simplejump_p (temp)
602 /* TEMP must skip over the "x = a;" insn */
603 && prev_real_insn (JUMP_LABEL (temp)) == insn
604 && no_labels_between_p (temp, insn))
605 {
606 rtx prev_label = JUMP_LABEL (temp);
607 rtx insert_after = prev_nonnote_insn (temp);
608
609 #ifdef HAVE_cc0
610 /* We cannot insert anything between a set of cc and its use. */
611 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
612 && sets_cc0_p (PATTERN (insert_after)))
613 insert_after = prev_nonnote_insn (insert_after);
614 #endif
615 ++LABEL_NUSES (prev_label);
616
617 if (insert_after
618 && no_labels_between_p (insert_after, temp)
619 && ! reg_referenced_between_p (temp1, insert_after, temp3)
620 && ! reg_referenced_between_p (temp1, temp3,
621 NEXT_INSN (temp2))
622 && ! reg_set_between_p (temp1, insert_after, temp)
623 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
624 /* Verify that registers used by the jump are not clobbered
625 by the instruction being moved. */
626 && ! regs_set_between_p (PATTERN (temp),
627 PREV_INSN (temp3),
628 NEXT_INSN (temp3))
629 && invert_jump (temp, JUMP_LABEL (insn)))
630 {
631 emit_insn_after_with_line_notes (PATTERN (temp3),
632 insert_after, temp3);
633 delete_insn (temp3);
634 delete_insn (insn);
635 /* Set NEXT to an insn that we know won't go away. */
636 next = temp2;
637 changed = 1;
638 }
639 if (prev_label && --LABEL_NUSES (prev_label) == 0)
640 delete_insn (prev_label);
641 if (changed)
642 continue;
643 }
644
645 #ifndef HAVE_cc0
646 /* If we have if (...) x = exp; and branches are expensive,
647 EXP is a single insn, does not have any side effects, cannot
648 trap, and is not too costly, convert this to
649 t = exp; if (...) x = t;
650
651 Don't do this when we have CC0 because it is unlikely to help
652 and we'd need to worry about where to place the new insn and
653 the potential for conflicts. We also can't do this when we have
654 notes on the insn for the same reason as above.
655
656 We set:
657
658 TEMP to the "x = exp;" insn.
659 TEMP1 to the single set in the "x = exp;" insn.
660 TEMP2 to "x". */
661
662 if (! reload_completed
663 && this_is_condjump && ! this_is_simplejump
664 && BRANCH_COST >= 3
665 && (temp = next_nonnote_insn (insn)) != 0
666 && GET_CODE (temp) == INSN
667 && REG_NOTES (temp) == 0
668 && (reallabelprev == temp
669 || ((temp2 = next_active_insn (temp)) != 0
670 && simplejump_p (temp2)
671 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
672 && (temp1 = single_set (temp)) != 0
673 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
674 && (! SMALL_REGISTER_CLASSES
675 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
676 && GET_CODE (SET_SRC (temp1)) != REG
677 && GET_CODE (SET_SRC (temp1)) != SUBREG
678 && GET_CODE (SET_SRC (temp1)) != CONST_INT
679 && ! side_effects_p (SET_SRC (temp1))
680 && ! may_trap_p (SET_SRC (temp1))
681 && rtx_cost (SET_SRC (temp1), SET) < 10)
682 {
683 rtx new = gen_reg_rtx (GET_MODE (temp2));
684
685 if ((temp3 = find_insert_position (insn, temp))
686 && validate_change (temp, &SET_DEST (temp1), new, 0))
687 {
688 next = emit_insn_after (gen_move_insn (temp2, new), insn);
689 emit_insn_after_with_line_notes (PATTERN (temp),
690 PREV_INSN (temp3), temp);
691 delete_insn (temp);
692 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
693
694 if (after_regscan)
695 {
696 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
697 old_max_reg = max_reg_num ();
698 }
699 }
700 }
701
702 /* Similarly, if it takes two insns to compute EXP but they
703 have the same destination. Here TEMP3 will be the second
704 insn and TEMP4 the SET from that insn. */
705
706 if (! reload_completed
707 && this_is_condjump && ! this_is_simplejump
708 && BRANCH_COST >= 4
709 && (temp = next_nonnote_insn (insn)) != 0
710 && GET_CODE (temp) == INSN
711 && REG_NOTES (temp) == 0
712 && (temp3 = next_nonnote_insn (temp)) != 0
713 && GET_CODE (temp3) == INSN
714 && REG_NOTES (temp3) == 0
715 && (reallabelprev == temp3
716 || ((temp2 = next_active_insn (temp3)) != 0
717 && simplejump_p (temp2)
718 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
719 && (temp1 = single_set (temp)) != 0
720 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
721 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
722 && (! SMALL_REGISTER_CLASSES
723 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
724 && ! side_effects_p (SET_SRC (temp1))
725 && ! may_trap_p (SET_SRC (temp1))
726 && rtx_cost (SET_SRC (temp1), SET) < 10
727 && (temp4 = single_set (temp3)) != 0
728 && rtx_equal_p (SET_DEST (temp4), temp2)
729 && ! side_effects_p (SET_SRC (temp4))
730 && ! may_trap_p (SET_SRC (temp4))
731 && rtx_cost (SET_SRC (temp4), SET) < 10)
732 {
733 rtx new = gen_reg_rtx (GET_MODE (temp2));
734
735 if ((temp5 = find_insert_position (insn, temp))
736 && (temp6 = find_insert_position (insn, temp3))
737 && validate_change (temp, &SET_DEST (temp1), new, 0))
738 {
739 /* Use the earliest of temp5 and temp6. */
740 if (temp5 != insn)
741 temp6 = temp5;
742 next = emit_insn_after (gen_move_insn (temp2, new), insn);
743 emit_insn_after_with_line_notes (PATTERN (temp),
744 PREV_INSN (temp6), temp);
745 emit_insn_after_with_line_notes
746 (replace_rtx (PATTERN (temp3), temp2, new),
747 PREV_INSN (temp6), temp3);
748 delete_insn (temp);
749 delete_insn (temp3);
750 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
751
752 if (after_regscan)
753 {
754 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
755 old_max_reg = max_reg_num ();
756 }
757 }
758 }
759
760 /* Finally, handle the case where two insns are used to
761 compute EXP but a temporary register is used. Here we must
762 ensure that the temporary register is not used anywhere else. */
763
764 if (! reload_completed
765 && after_regscan
766 && this_is_condjump && ! this_is_simplejump
767 && BRANCH_COST >= 4
768 && (temp = next_nonnote_insn (insn)) != 0
769 && GET_CODE (temp) == INSN
770 && REG_NOTES (temp) == 0
771 && (temp3 = next_nonnote_insn (temp)) != 0
772 && GET_CODE (temp3) == INSN
773 && REG_NOTES (temp3) == 0
774 && (reallabelprev == temp3
775 || ((temp2 = next_active_insn (temp3)) != 0
776 && simplejump_p (temp2)
777 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
778 && (temp1 = single_set (temp)) != 0
779 && (temp5 = SET_DEST (temp1),
780 (GET_CODE (temp5) == REG
781 || (GET_CODE (temp5) == SUBREG
782 && (temp5 = SUBREG_REG (temp5),
783 GET_CODE (temp5) == REG))))
784 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
785 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
786 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
787 && ! side_effects_p (SET_SRC (temp1))
788 && ! may_trap_p (SET_SRC (temp1))
789 && rtx_cost (SET_SRC (temp1), SET) < 10
790 && (temp4 = single_set (temp3)) != 0
791 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
792 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
793 && (! SMALL_REGISTER_CLASSES
794 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
795 && rtx_equal_p (SET_DEST (temp4), temp2)
796 && ! side_effects_p (SET_SRC (temp4))
797 && ! may_trap_p (SET_SRC (temp4))
798 && rtx_cost (SET_SRC (temp4), SET) < 10)
799 {
800 rtx new = gen_reg_rtx (GET_MODE (temp2));
801
802 if ((temp5 = find_insert_position (insn, temp))
803 && (temp6 = find_insert_position (insn, temp3))
804 && validate_change (temp3, &SET_DEST (temp4), new, 0))
805 {
806 /* Use the earliest of temp5 and temp6. */
807 if (temp5 != insn)
808 temp6 = temp5;
809 next = emit_insn_after (gen_move_insn (temp2, new), insn);
810 emit_insn_after_with_line_notes (PATTERN (temp),
811 PREV_INSN (temp6), temp);
812 emit_insn_after_with_line_notes (PATTERN (temp3),
813 PREV_INSN (temp6), temp3);
814 delete_insn (temp);
815 delete_insn (temp3);
816 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
817
818 if (after_regscan)
819 {
820 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
821 old_max_reg = max_reg_num ();
822 }
823 }
824 }
825 #endif /* HAVE_cc0 */
826
827 /* Try to use a conditional move (if the target has them), or a
828 store-flag insn. The general case is:
829
830 1) x = a; if (...) x = b; and
831 2) if (...) x = b;
832
833 If the jump would be faster, the machine should not have defined
834 the movcc or scc insns!. These cases are often made by the
835 previous optimization.
836
837 The second case is treated as x = x; if (...) x = b;.
838
839 INSN here is the jump around the store. We set:
840
841 TEMP to the "x = b;" insn.
842 TEMP1 to X.
843 TEMP2 to B.
844 TEMP3 to A (X in the second case).
845 TEMP4 to the condition being tested.
846 TEMP5 to the earliest insn used to find the condition. */
847
848 if (/* We can't do this after reload has completed. */
849 ! reload_completed
850 && this_is_condjump && ! this_is_simplejump
851 /* Set TEMP to the "x = b;" insn. */
852 && (temp = next_nonnote_insn (insn)) != 0
853 && GET_CODE (temp) == INSN
854 && GET_CODE (PATTERN (temp)) == SET
855 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
856 && (! SMALL_REGISTER_CLASSES
857 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
858 && ! side_effects_p (temp2 = SET_SRC (PATTERN (temp)))
859 && ! may_trap_p (temp2)
860 /* Allow either form, but prefer the former if both apply.
861 There is no point in using the old value of TEMP1 if
862 it is a register, since cse will alias them. It can
863 lose if the old value were a hard register since CSE
864 won't replace hard registers. Avoid using TEMP3 if
865 small register classes and it is a hard register. */
866 && (((temp3 = reg_set_last (temp1, insn)) != 0
867 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
868 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
869 /* Make the latter case look like x = x; if (...) x = b; */
870 || (temp3 = temp1, 1))
871 /* INSN must either branch to the insn after TEMP or the insn
872 after TEMP must branch to the same place as INSN. */
873 && (reallabelprev == temp
874 || ((temp4 = next_active_insn (temp)) != 0
875 && simplejump_p (temp4)
876 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
877 && (temp4 = get_condition (insn, &temp5)) != 0
878 /* We must be comparing objects whose modes imply the size.
879 We could handle BLKmode if (1) emit_store_flag could
880 and (2) we could find the size reliably. */
881 && GET_MODE (XEXP (temp4, 0)) != BLKmode
882 /* Even if branches are cheap, the store_flag optimization
883 can win when the operation to be performed can be
884 expressed directly. */
885 #ifdef HAVE_cc0
886 /* If the previous insn sets CC0 and something else, we can't
887 do this since we are going to delete that insn. */
888
889 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
890 && GET_CODE (temp6) == INSN
891 && (sets_cc0_p (PATTERN (temp6)) == -1
892 || (sets_cc0_p (PATTERN (temp6)) == 1
893 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
894 #endif
895 )
896 {
897 #ifdef HAVE_conditional_move
898 /* First try a conditional move. */
899 {
900 enum rtx_code code = GET_CODE (temp4);
901 rtx var = temp1;
902 rtx cond0, cond1, aval, bval;
903 rtx target;
904
905 /* Copy the compared variables into cond0 and cond1, so that
906 any side effects performed in or after the old comparison,
907 will not affect our compare which will come later. */
908 /* ??? Is it possible to just use the comparison in the jump
909 insn? After all, we're going to delete it. We'd have
910 to modify emit_conditional_move to take a comparison rtx
911 instead or write a new function. */
912 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
913 /* We want the target to be able to simplify comparisons with
914 zero (and maybe other constants as well), so don't create
915 pseudos for them. There's no need to either. */
916 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
917 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
918 cond1 = XEXP (temp4, 1);
919 else
920 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
921
922 aval = temp3;
923 bval = temp2;
924
925 start_sequence ();
926 target = emit_conditional_move (var, code,
927 cond0, cond1, VOIDmode,
928 aval, bval, GET_MODE (var),
929 (code == LTU || code == GEU
930 || code == LEU || code == GTU));
931
932 if (target)
933 {
934 rtx seq1,seq2,last;
935
936 /* Save the conditional move sequence but don't emit it
937 yet. On some machines, like the alpha, it is possible
938 that temp5 == insn, so next generate the sequence that
939 saves the compared values and then emit both
940 sequences ensuring seq1 occurs before seq2. */
941 seq2 = get_insns ();
942 end_sequence ();
943
944 /* Now that we can't fail, generate the copy insns that
945 preserve the compared values. */
946 start_sequence ();
947 emit_move_insn (cond0, XEXP (temp4, 0));
948 if (cond1 != XEXP (temp4, 1))
949 emit_move_insn (cond1, XEXP (temp4, 1));
950 seq1 = get_insns ();
951 end_sequence ();
952
953 emit_insns_before (seq1, temp5);
954 /* Insert conditional move after insn, to be sure that
955 the jump and a possible compare won't be separated */
956 last = emit_insns_after (seq2, insn);
957
958 /* ??? We can also delete the insn that sets X to A.
959 Flow will do it too though. */
960 delete_insn (temp);
961 next = NEXT_INSN (insn);
962 delete_jump (insn);
963
964 if (after_regscan)
965 {
966 reg_scan_update (seq1, NEXT_INSN (last), old_max_reg);
967 old_max_reg = max_reg_num ();
968 }
969
970 changed = 1;
971 continue;
972 }
973 else
974 end_sequence ();
975 }
976 #endif
977
978 /* That didn't work, try a store-flag insn.
979
980 We further divide the cases into:
981
982 1) x = a; if (...) x = b; and either A or B is zero,
983 2) if (...) x = 0; and jumps are expensive,
984 3) x = a; if (...) x = b; and A and B are constants where all
985 the set bits in A are also set in B and jumps are expensive,
986 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
987 more expensive, and
988 5) if (...) x = b; if jumps are even more expensive. */
989
990 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
991 && ((GET_CODE (temp3) == CONST_INT)
992 /* Make the latter case look like
993 x = x; if (...) x = 0; */
994 || (temp3 = temp1,
995 ((BRANCH_COST >= 2
996 && temp2 == const0_rtx)
997 || BRANCH_COST >= 3)))
998 /* If B is zero, OK; if A is zero, can only do (1) if we
999 can reverse the condition. See if (3) applies possibly
1000 by reversing the condition. Prefer reversing to (4) when
1001 branches are very expensive. */
1002 && (((BRANCH_COST >= 2
1003 || STORE_FLAG_VALUE == -1
1004 || (STORE_FLAG_VALUE == 1
1005 /* Check that the mask is a power of two,
1006 so that it can probably be generated
1007 with a shift. */
1008 && GET_CODE (temp3) == CONST_INT
1009 && exact_log2 (INTVAL (temp3)) >= 0))
1010 && (reversep = 0, temp2 == const0_rtx))
1011 || ((BRANCH_COST >= 2
1012 || STORE_FLAG_VALUE == -1
1013 || (STORE_FLAG_VALUE == 1
1014 && GET_CODE (temp2) == CONST_INT
1015 && exact_log2 (INTVAL (temp2)) >= 0))
1016 && temp3 == const0_rtx
1017 && (reversep = can_reverse_comparison_p (temp4, insn)))
1018 || (BRANCH_COST >= 2
1019 && GET_CODE (temp2) == CONST_INT
1020 && GET_CODE (temp3) == CONST_INT
1021 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1022 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1023 && (reversep = can_reverse_comparison_p (temp4,
1024 insn)))))
1025 || BRANCH_COST >= 3)
1026 )
1027 {
1028 enum rtx_code code = GET_CODE (temp4);
1029 rtx uval, cval, var = temp1;
1030 int normalizep;
1031 rtx target;
1032
1033 /* If necessary, reverse the condition. */
1034 if (reversep)
1035 code = reverse_condition (code), uval = temp2, cval = temp3;
1036 else
1037 uval = temp3, cval = temp2;
1038
1039 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1040 is the constant 1, it is best to just compute the result
1041 directly. If UVAL is constant and STORE_FLAG_VALUE
1042 includes all of its bits, it is best to compute the flag
1043 value unnormalized and `and' it with UVAL. Otherwise,
1044 normalize to -1 and `and' with UVAL. */
1045 normalizep = (cval != const0_rtx ? -1
1046 : (uval == const1_rtx ? 1
1047 : (GET_CODE (uval) == CONST_INT
1048 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1049 ? 0 : -1));
1050
1051 /* We will be putting the store-flag insn immediately in
1052 front of the comparison that was originally being done,
1053 so we know all the variables in TEMP4 will be valid.
1054 However, this might be in front of the assignment of
1055 A to VAR. If it is, it would clobber the store-flag
1056 we will be emitting.
1057
1058 Therefore, emit into a temporary which will be copied to
1059 VAR immediately after TEMP. */
1060
1061 start_sequence ();
1062 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1063 XEXP (temp4, 0), XEXP (temp4, 1),
1064 VOIDmode,
1065 (code == LTU || code == LEU
1066 || code == GEU || code == GTU),
1067 normalizep);
1068 if (target)
1069 {
1070 rtx seq;
1071 rtx before = insn;
1072
1073 seq = get_insns ();
1074 end_sequence ();
1075
1076 /* Put the store-flag insns in front of the first insn
1077 used to compute the condition to ensure that we
1078 use the same values of them as the current
1079 comparison. However, the remainder of the insns we
1080 generate will be placed directly in front of the
1081 jump insn, in case any of the pseudos we use
1082 are modified earlier. */
1083
1084 emit_insns_before (seq, temp5);
1085
1086 start_sequence ();
1087
1088 /* Both CVAL and UVAL are non-zero. */
1089 if (cval != const0_rtx && uval != const0_rtx)
1090 {
1091 rtx tem1, tem2;
1092
1093 tem1 = expand_and (uval, target, NULL_RTX);
1094 if (GET_CODE (cval) == CONST_INT
1095 && GET_CODE (uval) == CONST_INT
1096 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1097 tem2 = cval;
1098 else
1099 {
1100 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1101 target, NULL_RTX, 0);
1102 tem2 = expand_and (cval, tem2,
1103 (GET_CODE (tem2) == REG
1104 ? tem2 : 0));
1105 }
1106
1107 /* If we usually make new pseudos, do so here. This
1108 turns out to help machines that have conditional
1109 move insns. */
1110 /* ??? Conditional moves have already been handled.
1111 This may be obsolete. */
1112
1113 if (flag_expensive_optimizations)
1114 target = 0;
1115
1116 target = expand_binop (GET_MODE (var), ior_optab,
1117 tem1, tem2, target,
1118 1, OPTAB_WIDEN);
1119 }
1120 else if (normalizep != 1)
1121 {
1122 /* We know that either CVAL or UVAL is zero. If
1123 UVAL is zero, negate TARGET and `and' with CVAL.
1124 Otherwise, `and' with UVAL. */
1125 if (uval == const0_rtx)
1126 {
1127 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1128 target, NULL_RTX, 0);
1129 uval = cval;
1130 }
1131
1132 target = expand_and (uval, target,
1133 (GET_CODE (target) == REG
1134 && ! preserve_subexpressions_p ()
1135 ? target : NULL_RTX));
1136 }
1137
1138 emit_move_insn (var, target);
1139 seq = get_insns ();
1140 end_sequence ();
1141 #ifdef HAVE_cc0
1142 /* If INSN uses CC0, we must not separate it from the
1143 insn that sets cc0. */
1144 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1145 before = prev_nonnote_insn (before);
1146 #endif
1147 emit_insns_before (seq, before);
1148
1149 delete_insn (temp);
1150 next = NEXT_INSN (insn);
1151 delete_jump (insn);
1152
1153 if (after_regscan)
1154 {
1155 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1156 old_max_reg = max_reg_num ();
1157 }
1158
1159 changed = 1;
1160 continue;
1161 }
1162 else
1163 end_sequence ();
1164 }
1165 }
1166
1167 /* If branches are expensive, convert
1168 if (foo) bar++; to bar += (foo != 0);
1169 and similarly for "bar--;"
1170
1171 INSN is the conditional branch around the arithmetic. We set:
1172
1173 TEMP is the arithmetic insn.
1174 TEMP1 is the SET doing the arithmetic.
1175 TEMP2 is the operand being incremented or decremented.
1176 TEMP3 to the condition being tested.
1177 TEMP4 to the earliest insn used to find the condition. */
1178
1179 if ((BRANCH_COST >= 2
1180 #ifdef HAVE_incscc
1181 || HAVE_incscc
1182 #endif
1183 #ifdef HAVE_decscc
1184 || HAVE_decscc
1185 #endif
1186 )
1187 && ! reload_completed
1188 && this_is_condjump && ! this_is_simplejump
1189 && (temp = next_nonnote_insn (insn)) != 0
1190 && (temp1 = single_set (temp)) != 0
1191 && (temp2 = SET_DEST (temp1),
1192 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1193 && GET_CODE (SET_SRC (temp1)) == PLUS
1194 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1195 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1196 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1197 && ! side_effects_p (temp2)
1198 && ! may_trap_p (temp2)
1199 /* INSN must either branch to the insn after TEMP or the insn
1200 after TEMP must branch to the same place as INSN. */
1201 && (reallabelprev == temp
1202 || ((temp3 = next_active_insn (temp)) != 0
1203 && simplejump_p (temp3)
1204 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1205 && (temp3 = get_condition (insn, &temp4)) != 0
1206 /* We must be comparing objects whose modes imply the size.
1207 We could handle BLKmode if (1) emit_store_flag could
1208 and (2) we could find the size reliably. */
1209 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1210 && can_reverse_comparison_p (temp3, insn))
1211 {
1212 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1213 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1214
1215 start_sequence ();
1216
1217 /* It must be the case that TEMP2 is not modified in the range
1218 [TEMP4, INSN). The one exception we make is if the insn
1219 before INSN sets TEMP2 to something which is also unchanged
1220 in that range. In that case, we can move the initialization
1221 into our sequence. */
1222
1223 if ((temp5 = prev_active_insn (insn)) != 0
1224 && no_labels_between_p (temp5, insn)
1225 && GET_CODE (temp5) == INSN
1226 && (temp6 = single_set (temp5)) != 0
1227 && rtx_equal_p (temp2, SET_DEST (temp6))
1228 && (CONSTANT_P (SET_SRC (temp6))
1229 || GET_CODE (SET_SRC (temp6)) == REG
1230 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1231 {
1232 emit_insn (PATTERN (temp5));
1233 init_insn = temp5;
1234 init = SET_SRC (temp6);
1235 }
1236
1237 if (CONSTANT_P (init)
1238 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1239 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1240 XEXP (temp3, 0), XEXP (temp3, 1),
1241 VOIDmode,
1242 (code == LTU || code == LEU
1243 || code == GTU || code == GEU), 1);
1244
1245 /* If we can do the store-flag, do the addition or
1246 subtraction. */
1247
1248 if (target)
1249 target = expand_binop (GET_MODE (temp2),
1250 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1251 ? add_optab : sub_optab),
1252 temp2, target, temp2, 0, OPTAB_WIDEN);
1253
1254 if (target != 0)
1255 {
1256 /* Put the result back in temp2 in case it isn't already.
1257 Then replace the jump, possible a CC0-setting insn in
1258 front of the jump, and TEMP, with the sequence we have
1259 made. */
1260
1261 if (target != temp2)
1262 emit_move_insn (temp2, target);
1263
1264 seq = get_insns ();
1265 end_sequence ();
1266
1267 emit_insns_before (seq, temp4);
1268 delete_insn (temp);
1269
1270 if (init_insn)
1271 delete_insn (init_insn);
1272
1273 next = NEXT_INSN (insn);
1274 #ifdef HAVE_cc0
1275 delete_insn (prev_nonnote_insn (insn));
1276 #endif
1277 delete_insn (insn);
1278
1279 if (after_regscan)
1280 {
1281 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1282 old_max_reg = max_reg_num ();
1283 }
1284
1285 changed = 1;
1286 continue;
1287 }
1288 else
1289 end_sequence ();
1290 }
1291
1292 /* Simplify if (...) x = 1; else {...} if (x) ...
1293 We recognize this case scanning backwards as well.
1294
1295 TEMP is the assignment to x;
1296 TEMP1 is the label at the head of the second if. */
1297 /* ?? This should call get_condition to find the values being
1298 compared, instead of looking for a COMPARE insn when HAVE_cc0
1299 is not defined. This would allow it to work on the m88k. */
1300 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1301 is not defined and the condition is tested by a separate compare
1302 insn. This is because the code below assumes that the result
1303 of the compare dies in the following branch.
1304
1305 Not only that, but there might be other insns between the
1306 compare and branch whose results are live. Those insns need
1307 to be executed.
1308
1309 A way to fix this is to move the insns at JUMP_LABEL (insn)
1310 to before INSN. If we are running before flow, they will
1311 be deleted if they aren't needed. But this doesn't work
1312 well after flow.
1313
1314 This is really a special-case of jump threading, anyway. The
1315 right thing to do is to replace this and jump threading with
1316 much simpler code in cse.
1317
1318 This code has been turned off in the non-cc0 case in the
1319 meantime. */
1320
1321 #ifdef HAVE_cc0
1322 else if (this_is_simplejump
1323 /* Safe to skip USE and CLOBBER insns here
1324 since they will not be deleted. */
1325 && (temp = prev_active_insn (insn))
1326 && no_labels_between_p (temp, insn)
1327 && GET_CODE (temp) == INSN
1328 && GET_CODE (PATTERN (temp)) == SET
1329 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1330 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1331 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1332 /* If we find that the next value tested is `x'
1333 (TEMP1 is the insn where this happens), win. */
1334 && GET_CODE (temp1) == INSN
1335 && GET_CODE (PATTERN (temp1)) == SET
1336 #ifdef HAVE_cc0
1337 /* Does temp1 `tst' the value of x? */
1338 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1339 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1340 && (temp1 = next_nonnote_insn (temp1))
1341 #else
1342 /* Does temp1 compare the value of x against zero? */
1343 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1344 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1345 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1346 == SET_DEST (PATTERN (temp)))
1347 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1348 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1349 #endif
1350 && condjump_p (temp1))
1351 {
1352 /* Get the if_then_else from the condjump. */
1353 rtx choice = SET_SRC (PATTERN (temp1));
1354 if (GET_CODE (choice) == IF_THEN_ELSE)
1355 {
1356 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1357 rtx val = SET_SRC (PATTERN (temp));
1358 rtx cond
1359 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1360 val, const0_rtx);
1361 rtx ultimate;
1362
1363 if (cond == const_true_rtx)
1364 ultimate = XEXP (choice, 1);
1365 else if (cond == const0_rtx)
1366 ultimate = XEXP (choice, 2);
1367 else
1368 ultimate = 0;
1369
1370 if (ultimate == pc_rtx)
1371 ultimate = get_label_after (temp1);
1372 else if (ultimate && GET_CODE (ultimate) != RETURN)
1373 ultimate = XEXP (ultimate, 0);
1374
1375 if (ultimate && JUMP_LABEL(insn) != ultimate)
1376 changed |= redirect_jump (insn, ultimate);
1377 }
1378 }
1379 #endif
1380
1381 #if 0
1382 /* @@ This needs a bit of work before it will be right.
1383
1384 Any type of comparison can be accepted for the first and
1385 second compare. When rewriting the first jump, we must
1386 compute the what conditions can reach label3, and use the
1387 appropriate code. We can not simply reverse/swap the code
1388 of the first jump. In some cases, the second jump must be
1389 rewritten also.
1390
1391 For example,
1392 < == converts to > ==
1393 < != converts to == >
1394 etc.
1395
1396 If the code is written to only accept an '==' test for the second
1397 compare, then all that needs to be done is to swap the condition
1398 of the first branch.
1399
1400 It is questionable whether we want this optimization anyways,
1401 since if the user wrote code like this because he/she knew that
1402 the jump to label1 is taken most of the time, then rewriting
1403 this gives slower code. */
1404 /* @@ This should call get_condition to find the values being
1405 compared, instead of looking for a COMPARE insn when HAVE_cc0
1406 is not defined. This would allow it to work on the m88k. */
1407 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1408 is not defined and the condition is tested by a separate compare
1409 insn. This is because the code below assumes that the result
1410 of the compare dies in the following branch. */
1411
1412 /* Simplify test a ~= b
1413 condjump label1;
1414 test a == b
1415 condjump label2;
1416 jump label3;
1417 label1:
1418
1419 rewriting as
1420 test a ~~= b
1421 condjump label3
1422 test a == b
1423 condjump label2
1424 label1:
1425
1426 where ~= is an inequality, e.g. >, and ~~= is the swapped
1427 inequality, e.g. <.
1428
1429 We recognize this case scanning backwards.
1430
1431 TEMP is the conditional jump to `label2';
1432 TEMP1 is the test for `a == b';
1433 TEMP2 is the conditional jump to `label1';
1434 TEMP3 is the test for `a ~= b'. */
1435 else if (this_is_simplejump
1436 && (temp = prev_active_insn (insn))
1437 && no_labels_between_p (temp, insn)
1438 && condjump_p (temp)
1439 && (temp1 = prev_active_insn (temp))
1440 && no_labels_between_p (temp1, temp)
1441 && GET_CODE (temp1) == INSN
1442 && GET_CODE (PATTERN (temp1)) == SET
1443 #ifdef HAVE_cc0
1444 && sets_cc0_p (PATTERN (temp1)) == 1
1445 #else
1446 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1447 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1448 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1449 #endif
1450 && (temp2 = prev_active_insn (temp1))
1451 && no_labels_between_p (temp2, temp1)
1452 && condjump_p (temp2)
1453 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1454 && (temp3 = prev_active_insn (temp2))
1455 && no_labels_between_p (temp3, temp2)
1456 && GET_CODE (PATTERN (temp3)) == SET
1457 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1458 SET_DEST (PATTERN (temp1)))
1459 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1460 SET_SRC (PATTERN (temp3)))
1461 && ! inequality_comparisons_p (PATTERN (temp))
1462 && inequality_comparisons_p (PATTERN (temp2)))
1463 {
1464 rtx fallthrough_label = JUMP_LABEL (temp2);
1465
1466 ++LABEL_NUSES (fallthrough_label);
1467 if (swap_jump (temp2, JUMP_LABEL (insn)))
1468 {
1469 delete_insn (insn);
1470 changed = 1;
1471 }
1472
1473 if (--LABEL_NUSES (fallthrough_label) == 0)
1474 delete_insn (fallthrough_label);
1475 }
1476 #endif
1477 /* Simplify if (...) {... x = 1;} if (x) ...
1478
1479 We recognize this case backwards.
1480
1481 TEMP is the test of `x';
1482 TEMP1 is the assignment to `x' at the end of the
1483 previous statement. */
1484 /* @@ This should call get_condition to find the values being
1485 compared, instead of looking for a COMPARE insn when HAVE_cc0
1486 is not defined. This would allow it to work on the m88k. */
1487 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1488 is not defined and the condition is tested by a separate compare
1489 insn. This is because the code below assumes that the result
1490 of the compare dies in the following branch. */
1491
1492 /* ??? This has to be turned off. The problem is that the
1493 unconditional jump might indirectly end up branching to the
1494 label between TEMP1 and TEMP. We can't detect this, in general,
1495 since it may become a jump to there after further optimizations.
1496 If that jump is done, it will be deleted, so we will retry
1497 this optimization in the next pass, thus an infinite loop.
1498
1499 The present code prevents this by putting the jump after the
1500 label, but this is not logically correct. */
1501 #if 0
1502 else if (this_is_condjump
1503 /* Safe to skip USE and CLOBBER insns here
1504 since they will not be deleted. */
1505 && (temp = prev_active_insn (insn))
1506 && no_labels_between_p (temp, insn)
1507 && GET_CODE (temp) == INSN
1508 && GET_CODE (PATTERN (temp)) == SET
1509 #ifdef HAVE_cc0
1510 && sets_cc0_p (PATTERN (temp)) == 1
1511 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1512 #else
1513 /* Temp must be a compare insn, we can not accept a register
1514 to register move here, since it may not be simply a
1515 tst insn. */
1516 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1517 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1518 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1519 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1520 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1521 #endif
1522 /* May skip USE or CLOBBER insns here
1523 for checking for opportunity, since we
1524 take care of them later. */
1525 && (temp1 = prev_active_insn (temp))
1526 && GET_CODE (temp1) == INSN
1527 && GET_CODE (PATTERN (temp1)) == SET
1528 #ifdef HAVE_cc0
1529 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1530 #else
1531 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1532 == SET_DEST (PATTERN (temp1)))
1533 #endif
1534 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1535 /* If this isn't true, cse will do the job. */
1536 && ! no_labels_between_p (temp1, temp))
1537 {
1538 /* Get the if_then_else from the condjump. */
1539 rtx choice = SET_SRC (PATTERN (insn));
1540 if (GET_CODE (choice) == IF_THEN_ELSE
1541 && (GET_CODE (XEXP (choice, 0)) == EQ
1542 || GET_CODE (XEXP (choice, 0)) == NE))
1543 {
1544 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1545 rtx last_insn;
1546 rtx ultimate;
1547 rtx p;
1548
1549 /* Get the place that condjump will jump to
1550 if it is reached from here. */
1551 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1552 == want_nonzero)
1553 ultimate = XEXP (choice, 1);
1554 else
1555 ultimate = XEXP (choice, 2);
1556 /* Get it as a CODE_LABEL. */
1557 if (ultimate == pc_rtx)
1558 ultimate = get_label_after (insn);
1559 else
1560 /* Get the label out of the LABEL_REF. */
1561 ultimate = XEXP (ultimate, 0);
1562
1563 /* Insert the jump immediately before TEMP, specifically
1564 after the label that is between TEMP1 and TEMP. */
1565 last_insn = PREV_INSN (temp);
1566
1567 /* If we would be branching to the next insn, the jump
1568 would immediately be deleted and the re-inserted in
1569 a subsequent pass over the code. So don't do anything
1570 in that case. */
1571 if (next_active_insn (last_insn)
1572 != next_active_insn (ultimate))
1573 {
1574 emit_barrier_after (last_insn);
1575 p = emit_jump_insn_after (gen_jump (ultimate),
1576 last_insn);
1577 JUMP_LABEL (p) = ultimate;
1578 ++LABEL_NUSES (ultimate);
1579 if (INSN_UID (ultimate) < max_jump_chain
1580 && INSN_CODE (p) < max_jump_chain)
1581 {
1582 jump_chain[INSN_UID (p)]
1583 = jump_chain[INSN_UID (ultimate)];
1584 jump_chain[INSN_UID (ultimate)] = p;
1585 }
1586 changed = 1;
1587 continue;
1588 }
1589 }
1590 }
1591 #endif
1592 /* Detect a conditional jump going to the same place
1593 as an immediately following unconditional jump. */
1594 else if (this_is_condjump
1595 && (temp = next_active_insn (insn)) != 0
1596 && simplejump_p (temp)
1597 && (next_active_insn (JUMP_LABEL (insn))
1598 == next_active_insn (JUMP_LABEL (temp))))
1599 {
1600 rtx tem = temp;
1601
1602 /* ??? Optional. Disables some optimizations, but makes
1603 gcov output more accurate with -O. */
1604 if (flag_test_coverage && !reload_completed)
1605 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1606 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1607 break;
1608
1609 if (tem == temp)
1610 {
1611 delete_jump (insn);
1612 changed = 1;
1613 continue;
1614 }
1615 }
1616 #ifdef HAVE_trap
1617 /* Detect a conditional jump jumping over an unconditional trap. */
1618 else if (HAVE_trap
1619 && this_is_condjump && ! this_is_simplejump
1620 && reallabelprev != 0
1621 && GET_CODE (reallabelprev) == INSN
1622 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1623 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1624 && prev_active_insn (reallabelprev) == insn
1625 && no_labels_between_p (insn, reallabelprev)
1626 && (temp2 = get_condition (insn, &temp4))
1627 && can_reverse_comparison_p (temp2, insn))
1628 {
1629 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1630 XEXP (temp2, 0), XEXP (temp2, 1),
1631 TRAP_CODE (PATTERN (reallabelprev)));
1632
1633 if (new)
1634 {
1635 emit_insn_before (new, temp4);
1636 delete_insn (reallabelprev);
1637 delete_jump (insn);
1638 changed = 1;
1639 continue;
1640 }
1641 }
1642 /* Detect a jump jumping to an unconditional trap. */
1643 else if (HAVE_trap && this_is_condjump
1644 && (temp = next_active_insn (JUMP_LABEL (insn)))
1645 && GET_CODE (temp) == INSN
1646 && GET_CODE (PATTERN (temp)) == TRAP_IF
1647 && (this_is_simplejump
1648 || (temp2 = get_condition (insn, &temp4))))
1649 {
1650 rtx tc = TRAP_CONDITION (PATTERN (temp));
1651
1652 if (tc == const_true_rtx
1653 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1654 {
1655 rtx new;
1656 /* Replace an unconditional jump to a trap with a trap. */
1657 if (this_is_simplejump)
1658 {
1659 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1660 delete_jump (insn);
1661 changed = 1;
1662 continue;
1663 }
1664 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1665 XEXP (temp2, 1),
1666 TRAP_CODE (PATTERN (temp)));
1667 if (new)
1668 {
1669 emit_insn_before (new, temp4);
1670 delete_jump (insn);
1671 changed = 1;
1672 continue;
1673 }
1674 }
1675 /* If the trap condition and jump condition are mutually
1676 exclusive, redirect the jump to the following insn. */
1677 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1678 && ! this_is_simplejump
1679 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1680 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1681 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1682 && redirect_jump (insn, get_label_after (temp)))
1683 {
1684 changed = 1;
1685 continue;
1686 }
1687 }
1688 #endif
1689
1690 /* Detect a conditional jump jumping over an unconditional jump. */
1691
1692 else if ((this_is_condjump || this_is_condjump_in_parallel)
1693 && ! this_is_simplejump
1694 && reallabelprev != 0
1695 && GET_CODE (reallabelprev) == JUMP_INSN
1696 && prev_active_insn (reallabelprev) == insn
1697 && no_labels_between_p (insn, reallabelprev)
1698 && simplejump_p (reallabelprev))
1699 {
1700 /* When we invert the unconditional jump, we will be
1701 decrementing the usage count of its old label.
1702 Make sure that we don't delete it now because that
1703 might cause the following code to be deleted. */
1704 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1705 rtx prev_label = JUMP_LABEL (insn);
1706
1707 if (prev_label)
1708 ++LABEL_NUSES (prev_label);
1709
1710 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1711 {
1712 /* It is very likely that if there are USE insns before
1713 this jump, they hold REG_DEAD notes. These REG_DEAD
1714 notes are no longer valid due to this optimization,
1715 and will cause the life-analysis that following passes
1716 (notably delayed-branch scheduling) to think that
1717 these registers are dead when they are not.
1718
1719 To prevent this trouble, we just remove the USE insns
1720 from the insn chain. */
1721
1722 while (prev_uses && GET_CODE (prev_uses) == INSN
1723 && GET_CODE (PATTERN (prev_uses)) == USE)
1724 {
1725 rtx useless = prev_uses;
1726 prev_uses = prev_nonnote_insn (prev_uses);
1727 delete_insn (useless);
1728 }
1729
1730 delete_insn (reallabelprev);
1731 next = insn;
1732 changed = 1;
1733 }
1734
1735 /* We can now safely delete the label if it is unreferenced
1736 since the delete_insn above has deleted the BARRIER. */
1737 if (prev_label && --LABEL_NUSES (prev_label) == 0)
1738 delete_insn (prev_label);
1739 continue;
1740 }
1741 else
1742 {
1743 /* Detect a jump to a jump. */
1744
1745 nlabel = follow_jumps (JUMP_LABEL (insn));
1746 if (nlabel != JUMP_LABEL (insn)
1747 && redirect_jump (insn, nlabel))
1748 {
1749 changed = 1;
1750 next = insn;
1751 }
1752
1753 /* Look for if (foo) bar; else break; */
1754 /* The insns look like this:
1755 insn = condjump label1;
1756 ...range1 (some insns)...
1757 jump label2;
1758 label1:
1759 ...range2 (some insns)...
1760 jump somewhere unconditionally
1761 label2: */
1762 {
1763 rtx label1 = next_label (insn);
1764 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1765 /* Don't do this optimization on the first round, so that
1766 jump-around-a-jump gets simplified before we ask here
1767 whether a jump is unconditional.
1768
1769 Also don't do it when we are called after reload since
1770 it will confuse reorg. */
1771 if (! first
1772 && (reload_completed ? ! flag_delayed_branch : 1)
1773 /* Make sure INSN is something we can invert. */
1774 && condjump_p (insn)
1775 && label1 != 0
1776 && JUMP_LABEL (insn) == label1
1777 && LABEL_NUSES (label1) == 1
1778 && GET_CODE (range1end) == JUMP_INSN
1779 && simplejump_p (range1end))
1780 {
1781 rtx label2 = next_label (label1);
1782 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1783 if (range1end != range2end
1784 && JUMP_LABEL (range1end) == label2
1785 && GET_CODE (range2end) == JUMP_INSN
1786 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1787 /* Invert the jump condition, so we
1788 still execute the same insns in each case. */
1789 && invert_jump (insn, label1))
1790 {
1791 rtx range1beg = next_active_insn (insn);
1792 rtx range2beg = next_active_insn (label1);
1793 rtx range1after, range2after;
1794 rtx range1before, range2before;
1795 rtx rangenext;
1796
1797 /* Include in each range any notes before it, to be
1798 sure that we get the line number note if any, even
1799 if there are other notes here. */
1800 while (PREV_INSN (range1beg)
1801 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
1802 range1beg = PREV_INSN (range1beg);
1803
1804 while (PREV_INSN (range2beg)
1805 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
1806 range2beg = PREV_INSN (range2beg);
1807
1808 /* Don't move NOTEs for blocks or loops; shift them
1809 outside the ranges, where they'll stay put. */
1810 range1beg = squeeze_notes (range1beg, range1end);
1811 range2beg = squeeze_notes (range2beg, range2end);
1812
1813 /* Get current surrounds of the 2 ranges. */
1814 range1before = PREV_INSN (range1beg);
1815 range2before = PREV_INSN (range2beg);
1816 range1after = NEXT_INSN (range1end);
1817 range2after = NEXT_INSN (range2end);
1818
1819 /* Splice range2 where range1 was. */
1820 NEXT_INSN (range1before) = range2beg;
1821 PREV_INSN (range2beg) = range1before;
1822 NEXT_INSN (range2end) = range1after;
1823 PREV_INSN (range1after) = range2end;
1824 /* Splice range1 where range2 was. */
1825 NEXT_INSN (range2before) = range1beg;
1826 PREV_INSN (range1beg) = range2before;
1827 NEXT_INSN (range1end) = range2after;
1828 PREV_INSN (range2after) = range1end;
1829
1830 /* Check for a loop end note between the end of
1831 range2, and the next code label. If there is one,
1832 then what we have really seen is
1833 if (foo) break; end_of_loop;
1834 and moved the break sequence outside the loop.
1835 We must move the LOOP_END note to where the
1836 loop really ends now, or we will confuse loop
1837 optimization. Stop if we find a LOOP_BEG note
1838 first, since we don't want to move the LOOP_END
1839 note in that case. */
1840 for (;range2after != label2; range2after = rangenext)
1841 {
1842 rangenext = NEXT_INSN (range2after);
1843 if (GET_CODE (range2after) == NOTE)
1844 {
1845 if (NOTE_LINE_NUMBER (range2after)
1846 == NOTE_INSN_LOOP_END)
1847 {
1848 NEXT_INSN (PREV_INSN (range2after))
1849 = rangenext;
1850 PREV_INSN (rangenext)
1851 = PREV_INSN (range2after);
1852 PREV_INSN (range2after)
1853 = PREV_INSN (range1beg);
1854 NEXT_INSN (range2after) = range1beg;
1855 NEXT_INSN (PREV_INSN (range1beg))
1856 = range2after;
1857 PREV_INSN (range1beg) = range2after;
1858 }
1859 else if (NOTE_LINE_NUMBER (range2after)
1860 == NOTE_INSN_LOOP_BEG)
1861 break;
1862 }
1863 }
1864 changed = 1;
1865 continue;
1866 }
1867 }
1868 }
1869
1870 /* Now that the jump has been tensioned,
1871 try cross jumping: check for identical code
1872 before the jump and before its target label. */
1873
1874 /* First, cross jumping of conditional jumps: */
1875
1876 if (cross_jump && condjump_p (insn))
1877 {
1878 rtx newjpos, newlpos;
1879 rtx x = prev_real_insn (JUMP_LABEL (insn));
1880
1881 /* A conditional jump may be crossjumped
1882 only if the place it jumps to follows
1883 an opposing jump that comes back here. */
1884
1885 if (x != 0 && ! jump_back_p (x, insn))
1886 /* We have no opposing jump;
1887 cannot cross jump this insn. */
1888 x = 0;
1889
1890 newjpos = 0;
1891 /* TARGET is nonzero if it is ok to cross jump
1892 to code before TARGET. If so, see if matches. */
1893 if (x != 0)
1894 find_cross_jump (insn, x, 2,
1895 &newjpos, &newlpos);
1896
1897 if (newjpos != 0)
1898 {
1899 do_cross_jump (insn, newjpos, newlpos);
1900 /* Make the old conditional jump
1901 into an unconditional one. */
1902 SET_SRC (PATTERN (insn))
1903 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
1904 INSN_CODE (insn) = -1;
1905 emit_barrier_after (insn);
1906 /* Add to jump_chain unless this is a new label
1907 whose UID is too large. */
1908 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1909 {
1910 jump_chain[INSN_UID (insn)]
1911 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1912 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1913 }
1914 changed = 1;
1915 next = insn;
1916 }
1917 }
1918
1919 /* Cross jumping of unconditional jumps:
1920 a few differences. */
1921
1922 if (cross_jump && simplejump_p (insn))
1923 {
1924 rtx newjpos, newlpos;
1925 rtx target;
1926
1927 newjpos = 0;
1928
1929 /* TARGET is nonzero if it is ok to cross jump
1930 to code before TARGET. If so, see if matches. */
1931 find_cross_jump (insn, JUMP_LABEL (insn), 1,
1932 &newjpos, &newlpos);
1933
1934 /* If cannot cross jump to code before the label,
1935 see if we can cross jump to another jump to
1936 the same label. */
1937 /* Try each other jump to this label. */
1938 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
1939 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1940 target != 0 && newjpos == 0;
1941 target = jump_chain[INSN_UID (target)])
1942 if (target != insn
1943 && JUMP_LABEL (target) == JUMP_LABEL (insn)
1944 /* Ignore TARGET if it's deleted. */
1945 && ! INSN_DELETED_P (target))
1946 find_cross_jump (insn, target, 2,
1947 &newjpos, &newlpos);
1948
1949 if (newjpos != 0)
1950 {
1951 do_cross_jump (insn, newjpos, newlpos);
1952 changed = 1;
1953 next = insn;
1954 }
1955 }
1956
1957 /* This code was dead in the previous jump.c! */
1958 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
1959 {
1960 /* Return insns all "jump to the same place"
1961 so we can cross-jump between any two of them. */
1962
1963 rtx newjpos, newlpos, target;
1964
1965 newjpos = 0;
1966
1967 /* If cannot cross jump to code before the label,
1968 see if we can cross jump to another jump to
1969 the same label. */
1970 /* Try each other jump to this label. */
1971 for (target = jump_chain[0];
1972 target != 0 && newjpos == 0;
1973 target = jump_chain[INSN_UID (target)])
1974 if (target != insn
1975 && ! INSN_DELETED_P (target)
1976 && GET_CODE (PATTERN (target)) == RETURN)
1977 find_cross_jump (insn, target, 2,
1978 &newjpos, &newlpos);
1979
1980 if (newjpos != 0)
1981 {
1982 do_cross_jump (insn, newjpos, newlpos);
1983 changed = 1;
1984 next = insn;
1985 }
1986 }
1987 }
1988 }
1989
1990 first = 0;
1991 }
1992
1993 /* Delete extraneous line number notes.
1994 Note that two consecutive notes for different lines are not really
1995 extraneous. There should be some indication where that line belonged,
1996 even if it became empty. */
1997
1998 {
1999 rtx last_note = 0;
2000
2001 for (insn = f; insn; insn = NEXT_INSN (insn))
2002 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2003 {
2004 /* Delete this note if it is identical to previous note. */
2005 if (last_note
2006 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2007 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2008 {
2009 delete_insn (insn);
2010 continue;
2011 }
2012
2013 last_note = insn;
2014 }
2015 }
2016
2017 #ifdef HAVE_return
2018 if (HAVE_return)
2019 {
2020 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2021 in front of it. If the machine allows it at this point (we might be
2022 after reload for a leaf routine), it will improve optimization for it
2023 to be there. We do this both here and at the start of this pass since
2024 the RETURN might have been deleted by some of our optimizations. */
2025 insn = get_last_insn ();
2026 while (insn && GET_CODE (insn) == NOTE)
2027 insn = PREV_INSN (insn);
2028
2029 if (insn && GET_CODE (insn) != BARRIER)
2030 {
2031 emit_jump_insn (gen_return ());
2032 emit_barrier ();
2033 }
2034 }
2035 #endif
2036
2037 can_reach_end = calculate_can_reach_end (last_insn, 0, 1);
2038
2039 /* Show JUMP_CHAIN no longer valid. */
2040 jump_chain = 0;
2041 }
2042 \f
2043 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2044 notes whose labels don't occur in the insn any more. Returns the
2045 largest INSN_UID found. */
2046 static int
2047 init_label_info (f)
2048 rtx f;
2049 {
2050 int largest_uid = 0;
2051 rtx insn;
2052
2053 for (insn = f; insn; insn = NEXT_INSN (insn))
2054 {
2055 if (GET_CODE (insn) == CODE_LABEL)
2056 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2057 else if (GET_CODE (insn) == JUMP_INSN)
2058 JUMP_LABEL (insn) = 0;
2059 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2060 {
2061 rtx note, next;
2062
2063 for (note = REG_NOTES (insn); note; note = next)
2064 {
2065 next = XEXP (note, 1);
2066 if (REG_NOTE_KIND (note) == REG_LABEL
2067 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2068 remove_note (insn, note);
2069 }
2070 }
2071 if (INSN_UID (insn) > largest_uid)
2072 largest_uid = INSN_UID (insn);
2073 }
2074
2075 return largest_uid;
2076 }
2077
2078 /* Delete insns following barriers, up to next label. */
2079 static void
2080 delete_barrier_successors (f)
2081 rtx f;
2082 {
2083 rtx insn;
2084
2085 for (insn = f; insn;)
2086 {
2087 if (GET_CODE (insn) == BARRIER)
2088 {
2089 insn = NEXT_INSN (insn);
2090 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2091 {
2092 if (GET_CODE (insn) == NOTE
2093 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2094 insn = NEXT_INSN (insn);
2095 else
2096 insn = delete_insn (insn);
2097 }
2098 /* INSN is now the code_label. */
2099 }
2100 else
2101 insn = NEXT_INSN (insn);
2102 }
2103 }
2104
2105 /* Mark the label each jump jumps to.
2106 Combine consecutive labels, and count uses of labels.
2107
2108 For each label, make a chain (using `jump_chain')
2109 of all the *unconditional* jumps that jump to it;
2110 also make a chain of all returns.
2111
2112 CROSS_JUMP indicates whether we are doing cross jumping
2113 and if we are whether we will be paying attention to
2114 death notes or not. */
2115
2116 static void
2117 mark_all_labels (f, cross_jump)
2118 rtx f;
2119 int cross_jump;
2120 {
2121 rtx insn;
2122
2123 for (insn = f; insn; insn = NEXT_INSN (insn))
2124 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2125 {
2126 mark_jump_label (PATTERN (insn), insn, cross_jump);
2127 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2128 {
2129 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2130 {
2131 jump_chain[INSN_UID (insn)]
2132 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2133 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2134 }
2135 if (GET_CODE (PATTERN (insn)) == RETURN)
2136 {
2137 jump_chain[INSN_UID (insn)] = jump_chain[0];
2138 jump_chain[0] = insn;
2139 }
2140 }
2141 }
2142 }
2143
2144 /* Delete all labels already not referenced.
2145 Also find and return the last insn. */
2146
2147 static rtx
2148 delete_unreferenced_labels (f)
2149 rtx f;
2150 {
2151 rtx final = NULL_RTX;
2152 rtx insn;
2153
2154 for (insn = f; insn; )
2155 {
2156 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
2157 insn = delete_insn (insn);
2158 else
2159 {
2160 final = insn;
2161 insn = NEXT_INSN (insn);
2162 }
2163 }
2164
2165 return final;
2166 }
2167
2168 /* Delete various simple forms of moves which have no necessary
2169 side effect. */
2170
2171 static void
2172 delete_noop_moves (f)
2173 rtx f;
2174 {
2175 rtx insn, next;
2176
2177 for (insn = f; insn; )
2178 {
2179 next = NEXT_INSN (insn);
2180
2181 if (GET_CODE (insn) == INSN)
2182 {
2183 register rtx body = PATTERN (insn);
2184
2185 /* Combine stack_adjusts with following push_insns. */
2186 #ifdef PUSH_ROUNDING
2187 if (GET_CODE (body) == SET
2188 && SET_DEST (body) == stack_pointer_rtx
2189 && GET_CODE (SET_SRC (body)) == PLUS
2190 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2191 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2192 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2193 {
2194 rtx p;
2195 rtx stack_adjust_insn = insn;
2196 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2197 int total_pushed = 0;
2198 int pushes = 0;
2199
2200 /* Find all successive push insns. */
2201 p = insn;
2202 /* Don't convert more than three pushes;
2203 that starts adding too many displaced addresses
2204 and the whole thing starts becoming a losing
2205 proposition. */
2206 while (pushes < 3)
2207 {
2208 rtx pbody, dest;
2209 p = next_nonnote_insn (p);
2210 if (p == 0 || GET_CODE (p) != INSN)
2211 break;
2212 pbody = PATTERN (p);
2213 if (GET_CODE (pbody) != SET)
2214 break;
2215 dest = SET_DEST (pbody);
2216 /* Allow a no-op move between the adjust and the push. */
2217 if (GET_CODE (dest) == REG
2218 && GET_CODE (SET_SRC (pbody)) == REG
2219 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2220 continue;
2221 if (! (GET_CODE (dest) == MEM
2222 && GET_CODE (XEXP (dest, 0)) == POST_INC
2223 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2224 break;
2225 pushes++;
2226 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2227 > stack_adjust_amount)
2228 break;
2229 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2230 }
2231
2232 /* Discard the amount pushed from the stack adjust;
2233 maybe eliminate it entirely. */
2234 if (total_pushed >= stack_adjust_amount)
2235 {
2236 delete_computation (stack_adjust_insn);
2237 total_pushed = stack_adjust_amount;
2238 }
2239 else
2240 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2241 = GEN_INT (stack_adjust_amount - total_pushed);
2242
2243 /* Change the appropriate push insns to ordinary stores. */
2244 p = insn;
2245 while (total_pushed > 0)
2246 {
2247 rtx pbody, dest;
2248 p = next_nonnote_insn (p);
2249 if (GET_CODE (p) != INSN)
2250 break;
2251 pbody = PATTERN (p);
2252 if (GET_CODE (pbody) != SET)
2253 break;
2254 dest = SET_DEST (pbody);
2255 /* Allow a no-op move between the adjust and the push. */
2256 if (GET_CODE (dest) == REG
2257 && GET_CODE (SET_SRC (pbody)) == REG
2258 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2259 continue;
2260 if (! (GET_CODE (dest) == MEM
2261 && GET_CODE (XEXP (dest, 0)) == POST_INC
2262 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2263 break;
2264 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2265 /* If this push doesn't fully fit in the space
2266 of the stack adjust that we deleted,
2267 make another stack adjust here for what we
2268 didn't use up. There should be peepholes
2269 to recognize the resulting sequence of insns. */
2270 if (total_pushed < 0)
2271 {
2272 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2273 GEN_INT (- total_pushed)),
2274 p);
2275 break;
2276 }
2277 XEXP (dest, 0)
2278 = plus_constant (stack_pointer_rtx, total_pushed);
2279 }
2280 }
2281 #endif
2282
2283 /* Detect and delete no-op move instructions
2284 resulting from not allocating a parameter in a register. */
2285
2286 if (GET_CODE (body) == SET
2287 && (SET_DEST (body) == SET_SRC (body)
2288 || (GET_CODE (SET_DEST (body)) == MEM
2289 && GET_CODE (SET_SRC (body)) == MEM
2290 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2291 && ! (GET_CODE (SET_DEST (body)) == MEM
2292 && MEM_VOLATILE_P (SET_DEST (body)))
2293 && ! (GET_CODE (SET_SRC (body)) == MEM
2294 && MEM_VOLATILE_P (SET_SRC (body))))
2295 delete_computation (insn);
2296
2297 /* Detect and ignore no-op move instructions
2298 resulting from smart or fortuitous register allocation. */
2299
2300 else if (GET_CODE (body) == SET)
2301 {
2302 int sreg = true_regnum (SET_SRC (body));
2303 int dreg = true_regnum (SET_DEST (body));
2304
2305 if (sreg == dreg && sreg >= 0)
2306 delete_insn (insn);
2307 else if (sreg >= 0 && dreg >= 0)
2308 {
2309 rtx trial;
2310 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2311 sreg, NULL_PTR, dreg,
2312 GET_MODE (SET_SRC (body)));
2313
2314 if (tem != 0
2315 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2316 {
2317 /* DREG may have been the target of a REG_DEAD note in
2318 the insn which makes INSN redundant. If so, reorg
2319 would still think it is dead. So search for such a
2320 note and delete it if we find it. */
2321 if (! find_regno_note (insn, REG_UNUSED, dreg))
2322 for (trial = prev_nonnote_insn (insn);
2323 trial && GET_CODE (trial) != CODE_LABEL;
2324 trial = prev_nonnote_insn (trial))
2325 if (find_regno_note (trial, REG_DEAD, dreg))
2326 {
2327 remove_death (dreg, trial);
2328 break;
2329 }
2330
2331 /* Deleting insn could lose a death-note for SREG. */
2332 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2333 {
2334 /* Change this into a USE so that we won't emit
2335 code for it, but still can keep the note. */
2336 PATTERN (insn)
2337 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2338 INSN_CODE (insn) = -1;
2339 /* Remove all reg notes but the REG_DEAD one. */
2340 REG_NOTES (insn) = trial;
2341 XEXP (trial, 1) = NULL_RTX;
2342 }
2343 else
2344 delete_insn (insn);
2345 }
2346 }
2347 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2348 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2349 NULL_PTR, 0,
2350 GET_MODE (SET_DEST (body))))
2351 {
2352 /* This handles the case where we have two consecutive
2353 assignments of the same constant to pseudos that didn't
2354 get a hard reg. Each SET from the constant will be
2355 converted into a SET of the spill register and an
2356 output reload will be made following it. This produces
2357 two loads of the same constant into the same spill
2358 register. */
2359
2360 rtx in_insn = insn;
2361
2362 /* Look back for a death note for the first reg.
2363 If there is one, it is no longer accurate. */
2364 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2365 {
2366 if ((GET_CODE (in_insn) == INSN
2367 || GET_CODE (in_insn) == JUMP_INSN)
2368 && find_regno_note (in_insn, REG_DEAD, dreg))
2369 {
2370 remove_death (dreg, in_insn);
2371 break;
2372 }
2373 in_insn = PREV_INSN (in_insn);
2374 }
2375
2376 /* Delete the second load of the value. */
2377 delete_insn (insn);
2378 }
2379 }
2380 else if (GET_CODE (body) == PARALLEL)
2381 {
2382 /* If each part is a set between two identical registers or
2383 a USE or CLOBBER, delete the insn. */
2384 int i, sreg, dreg;
2385 rtx tem;
2386
2387 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2388 {
2389 tem = XVECEXP (body, 0, i);
2390 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2391 continue;
2392
2393 if (GET_CODE (tem) != SET
2394 || (sreg = true_regnum (SET_SRC (tem))) < 0
2395 || (dreg = true_regnum (SET_DEST (tem))) < 0
2396 || dreg != sreg)
2397 break;
2398 }
2399
2400 if (i < 0)
2401 delete_insn (insn);
2402 }
2403 /* Also delete insns to store bit fields if they are no-ops. */
2404 /* Not worth the hair to detect this in the big-endian case. */
2405 else if (! BYTES_BIG_ENDIAN
2406 && GET_CODE (body) == SET
2407 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2408 && XEXP (SET_DEST (body), 2) == const0_rtx
2409 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2410 && ! (GET_CODE (SET_SRC (body)) == MEM
2411 && MEM_VOLATILE_P (SET_SRC (body))))
2412 delete_insn (insn);
2413 }
2414 insn = next;
2415 }
2416 }
2417
2418 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2419 If so indicate that this function can drop off the end by returning
2420 1, else return 0.
2421
2422 CHECK_DELETED indicates whether we must check if the note being
2423 searched for has the deleted flag set.
2424
2425 DELETE_FINAL_NOTE indicates whether we should delete the note
2426 if we find it. */
2427
2428 static int
2429 calculate_can_reach_end (last, check_deleted, delete_final_note)
2430 rtx last;
2431 int check_deleted;
2432 int delete_final_note;
2433 {
2434 rtx insn = last;
2435 int n_labels = 1;
2436
2437 while (insn != NULL_RTX)
2438 {
2439 int ok = 0;
2440
2441 /* One label can follow the end-note: the return label. */
2442 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2443 ok = 1;
2444 /* Ordinary insns can follow it if returning a structure. */
2445 else if (GET_CODE (insn) == INSN)
2446 ok = 1;
2447 /* If machine uses explicit RETURN insns, no epilogue,
2448 then one of them follows the note. */
2449 else if (GET_CODE (insn) == JUMP_INSN
2450 && GET_CODE (PATTERN (insn)) == RETURN)
2451 ok = 1;
2452 /* A barrier can follow the return insn. */
2453 else if (GET_CODE (insn) == BARRIER)
2454 ok = 1;
2455 /* Other kinds of notes can follow also. */
2456 else if (GET_CODE (insn) == NOTE
2457 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2458 ok = 1;
2459
2460 if (ok != 1)
2461 break;
2462
2463 insn = PREV_INSN (insn);
2464 }
2465
2466 /* See if we backed up to the appropriate type of note. */
2467 if (insn != NULL_RTX
2468 && GET_CODE (insn) == NOTE
2469 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
2470 && (check_deleted == 0
2471 || ! INSN_DELETED_P (insn)))
2472 {
2473 if (delete_final_note)
2474 delete_insn (insn);
2475 return 1;
2476 }
2477
2478 return 0;
2479 }
2480
2481 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2482 jump. Assume that this unconditional jump is to the exit test code. If
2483 the code is sufficiently simple, make a copy of it before INSN,
2484 followed by a jump to the exit of the loop. Then delete the unconditional
2485 jump after INSN.
2486
2487 Return 1 if we made the change, else 0.
2488
2489 This is only safe immediately after a regscan pass because it uses the
2490 values of regno_first_uid and regno_last_uid. */
2491
2492 static int
2493 duplicate_loop_exit_test (loop_start)
2494 rtx loop_start;
2495 {
2496 rtx insn, set, reg, p, link;
2497 rtx copy = 0;
2498 int num_insns = 0;
2499 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2500 rtx lastexit;
2501 int max_reg = max_reg_num ();
2502 rtx *reg_map = 0;
2503
2504 /* Scan the exit code. We do not perform this optimization if any insn:
2505
2506 is a CALL_INSN
2507 is a CODE_LABEL
2508 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2509 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2510 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2511 is not valid.
2512
2513 We also do not do this if we find an insn with ASM_OPERANDS. While
2514 this restriction should not be necessary, copying an insn with
2515 ASM_OPERANDS can confuse asm_noperands in some cases.
2516
2517 Also, don't do this if the exit code is more than 20 insns. */
2518
2519 for (insn = exitcode;
2520 insn
2521 && ! (GET_CODE (insn) == NOTE
2522 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2523 insn = NEXT_INSN (insn))
2524 {
2525 switch (GET_CODE (insn))
2526 {
2527 case CODE_LABEL:
2528 case CALL_INSN:
2529 return 0;
2530 case NOTE:
2531 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2532 a jump immediately after the loop start that branches outside
2533 the loop but within an outer loop, near the exit test.
2534 If we copied this exit test and created a phony
2535 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2536 before the exit test look like these could be safely moved
2537 out of the loop even if they actually may be never executed.
2538 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2539
2540 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2541 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2542 return 0;
2543
2544 if (optimize < 2
2545 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2546 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2547 /* If we were to duplicate this code, we would not move
2548 the BLOCK notes, and so debugging the moved code would
2549 be difficult. Thus, we only move the code with -O2 or
2550 higher. */
2551 return 0;
2552
2553 break;
2554 case JUMP_INSN:
2555 case INSN:
2556 /* The code below would grossly mishandle REG_WAS_0 notes,
2557 so get rid of them here. */
2558 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2559 remove_note (insn, p);
2560 if (++num_insns > 20
2561 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2562 || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
2563 || asm_noperands (PATTERN (insn)) > 0)
2564 return 0;
2565 break;
2566 default:
2567 break;
2568 }
2569 }
2570
2571 /* Unless INSN is zero, we can do the optimization. */
2572 if (insn == 0)
2573 return 0;
2574
2575 lastexit = insn;
2576
2577 /* See if any insn sets a register only used in the loop exit code and
2578 not a user variable. If so, replace it with a new register. */
2579 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2580 if (GET_CODE (insn) == INSN
2581 && (set = single_set (insn)) != 0
2582 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2583 || (GET_CODE (reg) == SUBREG
2584 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2585 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2586 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2587 {
2588 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2589 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2590 break;
2591
2592 if (p != lastexit)
2593 {
2594 /* We can do the replacement. Allocate reg_map if this is the
2595 first replacement we found. */
2596 if (reg_map == 0)
2597 {
2598 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2599 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2600 }
2601
2602 REG_LOOP_TEST_P (reg) = 1;
2603
2604 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2605 }
2606 }
2607
2608 /* Now copy each insn. */
2609 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2610 switch (GET_CODE (insn))
2611 {
2612 case BARRIER:
2613 copy = emit_barrier_before (loop_start);
2614 break;
2615 case NOTE:
2616 /* Only copy line-number notes. */
2617 if (NOTE_LINE_NUMBER (insn) >= 0)
2618 {
2619 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2620 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2621 }
2622 break;
2623
2624 case INSN:
2625 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2626 if (reg_map)
2627 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2628
2629 mark_jump_label (PATTERN (copy), copy, 0);
2630
2631 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2632 make them. */
2633 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2634 if (REG_NOTE_KIND (link) != REG_LABEL)
2635 REG_NOTES (copy)
2636 = copy_rtx (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2637 XEXP (link, 0),
2638 REG_NOTES (copy)));
2639 if (reg_map && REG_NOTES (copy))
2640 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2641 break;
2642
2643 case JUMP_INSN:
2644 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2645 if (reg_map)
2646 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2647 mark_jump_label (PATTERN (copy), copy, 0);
2648 if (REG_NOTES (insn))
2649 {
2650 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2651 if (reg_map)
2652 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2653 }
2654
2655 /* If this is a simple jump, add it to the jump chain. */
2656
2657 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2658 && simplejump_p (copy))
2659 {
2660 jump_chain[INSN_UID (copy)]
2661 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2662 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2663 }
2664 break;
2665
2666 default:
2667 abort ();
2668 }
2669
2670 /* Now clean up by emitting a jump to the end label and deleting the jump
2671 at the start of the loop. */
2672 if (! copy || GET_CODE (copy) != BARRIER)
2673 {
2674 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2675 loop_start);
2676 mark_jump_label (PATTERN (copy), copy, 0);
2677 if (INSN_UID (copy) < max_jump_chain
2678 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2679 {
2680 jump_chain[INSN_UID (copy)]
2681 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2682 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2683 }
2684 emit_barrier_before (loop_start);
2685 }
2686
2687 /* Mark the exit code as the virtual top of the converted loop. */
2688 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2689
2690 delete_insn (next_nonnote_insn (loop_start));
2691
2692 return 1;
2693 }
2694 \f
2695 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2696 loop-end notes between START and END out before START. Assume that
2697 END is not such a note. START may be such a note. Returns the value
2698 of the new starting insn, which may be different if the original start
2699 was such a note. */
2700
2701 rtx
2702 squeeze_notes (start, end)
2703 rtx start, end;
2704 {
2705 rtx insn;
2706 rtx next;
2707
2708 for (insn = start; insn != end; insn = next)
2709 {
2710 next = NEXT_INSN (insn);
2711 if (GET_CODE (insn) == NOTE
2712 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2713 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2714 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2715 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2716 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2717 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2718 {
2719 if (insn == start)
2720 start = next;
2721 else
2722 {
2723 rtx prev = PREV_INSN (insn);
2724 PREV_INSN (insn) = PREV_INSN (start);
2725 NEXT_INSN (insn) = start;
2726 NEXT_INSN (PREV_INSN (insn)) = insn;
2727 PREV_INSN (NEXT_INSN (insn)) = insn;
2728 NEXT_INSN (prev) = next;
2729 PREV_INSN (next) = prev;
2730 }
2731 }
2732 }
2733
2734 return start;
2735 }
2736 \f
2737 /* Compare the instructions before insn E1 with those before E2
2738 to find an opportunity for cross jumping.
2739 (This means detecting identical sequences of insns followed by
2740 jumps to the same place, or followed by a label and a jump
2741 to that label, and replacing one with a jump to the other.)
2742
2743 Assume E1 is a jump that jumps to label E2
2744 (that is not always true but it might as well be).
2745 Find the longest possible equivalent sequences
2746 and store the first insns of those sequences into *F1 and *F2.
2747 Store zero there if no equivalent preceding instructions are found.
2748
2749 We give up if we find a label in stream 1.
2750 Actually we could transfer that label into stream 2. */
2751
2752 static void
2753 find_cross_jump (e1, e2, minimum, f1, f2)
2754 rtx e1, e2;
2755 int minimum;
2756 rtx *f1, *f2;
2757 {
2758 register rtx i1 = e1, i2 = e2;
2759 register rtx p1, p2;
2760 int lose = 0;
2761
2762 rtx last1 = 0, last2 = 0;
2763 rtx afterlast1 = 0, afterlast2 = 0;
2764
2765 *f1 = 0;
2766 *f2 = 0;
2767
2768 while (1)
2769 {
2770 i1 = prev_nonnote_insn (i1);
2771
2772 i2 = PREV_INSN (i2);
2773 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2774 i2 = PREV_INSN (i2);
2775
2776 if (i1 == 0)
2777 break;
2778
2779 /* Don't allow the range of insns preceding E1 or E2
2780 to include the other (E2 or E1). */
2781 if (i2 == e1 || i1 == e2)
2782 break;
2783
2784 /* If we will get to this code by jumping, those jumps will be
2785 tensioned to go directly to the new label (before I2),
2786 so this cross-jumping won't cost extra. So reduce the minimum. */
2787 if (GET_CODE (i1) == CODE_LABEL)
2788 {
2789 --minimum;
2790 break;
2791 }
2792
2793 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2794 break;
2795
2796 /* Avoid moving insns across EH regions if either of the insns
2797 can throw. */
2798 if (flag_exceptions
2799 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2800 && !in_same_eh_region (i1, i2))
2801 break;
2802
2803 p1 = PATTERN (i1);
2804 p2 = PATTERN (i2);
2805
2806 /* If this is a CALL_INSN, compare register usage information.
2807 If we don't check this on stack register machines, the two
2808 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2809 numbers of stack registers in the same basic block.
2810 If we don't check this on machines with delay slots, a delay slot may
2811 be filled that clobbers a parameter expected by the subroutine.
2812
2813 ??? We take the simple route for now and assume that if they're
2814 equal, they were constructed identically. */
2815
2816 if (GET_CODE (i1) == CALL_INSN
2817 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2818 CALL_INSN_FUNCTION_USAGE (i2)))
2819 lose = 1;
2820
2821 #ifdef STACK_REGS
2822 /* If cross_jump_death_matters is not 0, the insn's mode
2823 indicates whether or not the insn contains any stack-like
2824 regs. */
2825
2826 if (!lose && cross_jump_death_matters && GET_MODE (i1) == QImode)
2827 {
2828 /* If register stack conversion has already been done, then
2829 death notes must also be compared before it is certain that
2830 the two instruction streams match. */
2831
2832 rtx note;
2833 HARD_REG_SET i1_regset, i2_regset;
2834
2835 CLEAR_HARD_REG_SET (i1_regset);
2836 CLEAR_HARD_REG_SET (i2_regset);
2837
2838 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2839 if (REG_NOTE_KIND (note) == REG_DEAD
2840 && STACK_REG_P (XEXP (note, 0)))
2841 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2842
2843 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2844 if (REG_NOTE_KIND (note) == REG_DEAD
2845 && STACK_REG_P (XEXP (note, 0)))
2846 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2847
2848 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2849
2850 lose = 1;
2851
2852 done:
2853 ;
2854 }
2855 #endif
2856
2857 /* Don't allow old-style asm or volatile extended asms to be accepted
2858 for cross jumping purposes. It is conceptually correct to allow
2859 them, since cross-jumping preserves the dynamic instruction order
2860 even though it is changing the static instruction order. However,
2861 if an asm is being used to emit an assembler pseudo-op, such as
2862 the MIPS `.set reorder' pseudo-op, then the static instruction order
2863 matters and it must be preserved. */
2864 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2865 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2866 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2867 lose = 1;
2868
2869 if (lose || GET_CODE (p1) != GET_CODE (p2)
2870 || ! rtx_renumbered_equal_p (p1, p2))
2871 {
2872 /* The following code helps take care of G++ cleanups. */
2873 rtx equiv1;
2874 rtx equiv2;
2875
2876 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2877 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2878 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2879 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2880 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2881 /* If the equivalences are not to a constant, they may
2882 reference pseudos that no longer exist, so we can't
2883 use them. */
2884 && CONSTANT_P (XEXP (equiv1, 0))
2885 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2886 {
2887 rtx s1 = single_set (i1);
2888 rtx s2 = single_set (i2);
2889 if (s1 != 0 && s2 != 0
2890 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2891 {
2892 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2893 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2894 if (! rtx_renumbered_equal_p (p1, p2))
2895 cancel_changes (0);
2896 else if (apply_change_group ())
2897 goto win;
2898 }
2899 }
2900
2901 /* Insns fail to match; cross jumping is limited to the following
2902 insns. */
2903
2904 #ifdef HAVE_cc0
2905 /* Don't allow the insn after a compare to be shared by
2906 cross-jumping unless the compare is also shared.
2907 Here, if either of these non-matching insns is a compare,
2908 exclude the following insn from possible cross-jumping. */
2909 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2910 last1 = afterlast1, last2 = afterlast2, ++minimum;
2911 #endif
2912
2913 /* If cross-jumping here will feed a jump-around-jump
2914 optimization, this jump won't cost extra, so reduce
2915 the minimum. */
2916 if (GET_CODE (i1) == JUMP_INSN
2917 && JUMP_LABEL (i1)
2918 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2919 --minimum;
2920 break;
2921 }
2922
2923 win:
2924 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2925 {
2926 /* Ok, this insn is potentially includable in a cross-jump here. */
2927 afterlast1 = last1, afterlast2 = last2;
2928 last1 = i1, last2 = i2, --minimum;
2929 }
2930 }
2931
2932 if (minimum <= 0 && last1 != 0 && last1 != e1)
2933 *f1 = last1, *f2 = last2;
2934 }
2935
2936 static void
2937 do_cross_jump (insn, newjpos, newlpos)
2938 rtx insn, newjpos, newlpos;
2939 {
2940 /* Find an existing label at this point
2941 or make a new one if there is none. */
2942 register rtx label = get_label_before (newlpos);
2943
2944 /* Make the same jump insn jump to the new point. */
2945 if (GET_CODE (PATTERN (insn)) == RETURN)
2946 {
2947 /* Remove from jump chain of returns. */
2948 delete_from_jump_chain (insn);
2949 /* Change the insn. */
2950 PATTERN (insn) = gen_jump (label);
2951 INSN_CODE (insn) = -1;
2952 JUMP_LABEL (insn) = label;
2953 LABEL_NUSES (label)++;
2954 /* Add to new the jump chain. */
2955 if (INSN_UID (label) < max_jump_chain
2956 && INSN_UID (insn) < max_jump_chain)
2957 {
2958 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
2959 jump_chain[INSN_UID (label)] = insn;
2960 }
2961 }
2962 else
2963 redirect_jump (insn, label);
2964
2965 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2966 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2967 the NEWJPOS stream. */
2968
2969 while (newjpos != insn)
2970 {
2971 rtx lnote;
2972
2973 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
2974 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
2975 || REG_NOTE_KIND (lnote) == REG_EQUIV)
2976 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
2977 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
2978 remove_note (newlpos, lnote);
2979
2980 delete_insn (newjpos);
2981 newjpos = next_real_insn (newjpos);
2982 newlpos = next_real_insn (newlpos);
2983 }
2984 }
2985 \f
2986 /* Return the label before INSN, or put a new label there. */
2987
2988 rtx
2989 get_label_before (insn)
2990 rtx insn;
2991 {
2992 rtx label;
2993
2994 /* Find an existing label at this point
2995 or make a new one if there is none. */
2996 label = prev_nonnote_insn (insn);
2997
2998 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2999 {
3000 rtx prev = PREV_INSN (insn);
3001
3002 label = gen_label_rtx ();
3003 emit_label_after (label, prev);
3004 LABEL_NUSES (label) = 0;
3005 }
3006 return label;
3007 }
3008
3009 /* Return the label after INSN, or put a new label there. */
3010
3011 rtx
3012 get_label_after (insn)
3013 rtx insn;
3014 {
3015 rtx label;
3016
3017 /* Find an existing label at this point
3018 or make a new one if there is none. */
3019 label = next_nonnote_insn (insn);
3020
3021 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3022 {
3023 label = gen_label_rtx ();
3024 emit_label_after (label, insn);
3025 LABEL_NUSES (label) = 0;
3026 }
3027 return label;
3028 }
3029 \f
3030 /* Return 1 if INSN is a jump that jumps to right after TARGET
3031 only on the condition that TARGET itself would drop through.
3032 Assumes that TARGET is a conditional jump. */
3033
3034 static int
3035 jump_back_p (insn, target)
3036 rtx insn, target;
3037 {
3038 rtx cinsn, ctarget;
3039 enum rtx_code codei, codet;
3040
3041 if (simplejump_p (insn) || ! condjump_p (insn)
3042 || simplejump_p (target)
3043 || target != prev_real_insn (JUMP_LABEL (insn)))
3044 return 0;
3045
3046 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3047 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3048
3049 codei = GET_CODE (cinsn);
3050 codet = GET_CODE (ctarget);
3051
3052 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3053 {
3054 if (! can_reverse_comparison_p (cinsn, insn))
3055 return 0;
3056 codei = reverse_condition (codei);
3057 }
3058
3059 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3060 {
3061 if (! can_reverse_comparison_p (ctarget, target))
3062 return 0;
3063 codet = reverse_condition (codet);
3064 }
3065
3066 return (codei == codet
3067 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3068 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3069 }
3070 \f
3071 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3072 return non-zero if it is safe to reverse this comparison. It is if our
3073 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3074 this is known to be an integer comparison. */
3075
3076 int
3077 can_reverse_comparison_p (comparison, insn)
3078 rtx comparison;
3079 rtx insn;
3080 {
3081 rtx arg0;
3082
3083 /* If this is not actually a comparison, we can't reverse it. */
3084 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3085 return 0;
3086
3087 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3088 /* If this is an NE comparison, it is safe to reverse it to an EQ
3089 comparison and vice versa, even for floating point. If no operands
3090 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3091 always false and NE is always true, so the reversal is also valid. */
3092 || flag_fast_math
3093 || GET_CODE (comparison) == NE
3094 || GET_CODE (comparison) == EQ)
3095 return 1;
3096
3097 arg0 = XEXP (comparison, 0);
3098
3099 /* Make sure ARG0 is one of the actual objects being compared. If we
3100 can't do this, we can't be sure the comparison can be reversed.
3101
3102 Handle cc0 and a MODE_CC register. */
3103 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3104 #ifdef HAVE_cc0
3105 || arg0 == cc0_rtx
3106 #endif
3107 )
3108 {
3109 rtx prev = prev_nonnote_insn (insn);
3110 rtx set = single_set (prev);
3111
3112 if (set == 0 || SET_DEST (set) != arg0)
3113 return 0;
3114
3115 arg0 = SET_SRC (set);
3116
3117 if (GET_CODE (arg0) == COMPARE)
3118 arg0 = XEXP (arg0, 0);
3119 }
3120
3121 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3122 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3123 return (GET_CODE (arg0) == CONST_INT
3124 || (GET_MODE (arg0) != VOIDmode
3125 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3126 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3127 }
3128
3129 /* Given an rtx-code for a comparison, return the code
3130 for the negated comparison.
3131 WATCH OUT! reverse_condition is not safe to use on a jump
3132 that might be acting on the results of an IEEE floating point comparison,
3133 because of the special treatment of non-signaling nans in comparisons.
3134 Use can_reverse_comparison_p to be sure. */
3135
3136 enum rtx_code
3137 reverse_condition (code)
3138 enum rtx_code code;
3139 {
3140 switch (code)
3141 {
3142 case EQ:
3143 return NE;
3144
3145 case NE:
3146 return EQ;
3147
3148 case GT:
3149 return LE;
3150
3151 case GE:
3152 return LT;
3153
3154 case LT:
3155 return GE;
3156
3157 case LE:
3158 return GT;
3159
3160 case GTU:
3161 return LEU;
3162
3163 case GEU:
3164 return LTU;
3165
3166 case LTU:
3167 return GEU;
3168
3169 case LEU:
3170 return GTU;
3171
3172 default:
3173 abort ();
3174 return UNKNOWN;
3175 }
3176 }
3177
3178 /* Similar, but return the code when two operands of a comparison are swapped.
3179 This IS safe for IEEE floating-point. */
3180
3181 enum rtx_code
3182 swap_condition (code)
3183 enum rtx_code code;
3184 {
3185 switch (code)
3186 {
3187 case EQ:
3188 case NE:
3189 return code;
3190
3191 case GT:
3192 return LT;
3193
3194 case GE:
3195 return LE;
3196
3197 case LT:
3198 return GT;
3199
3200 case LE:
3201 return GE;
3202
3203 case GTU:
3204 return LTU;
3205
3206 case GEU:
3207 return LEU;
3208
3209 case LTU:
3210 return GTU;
3211
3212 case LEU:
3213 return GEU;
3214
3215 default:
3216 abort ();
3217 return UNKNOWN;
3218 }
3219 }
3220
3221 /* Given a comparison CODE, return the corresponding unsigned comparison.
3222 If CODE is an equality comparison or already an unsigned comparison,
3223 CODE is returned. */
3224
3225 enum rtx_code
3226 unsigned_condition (code)
3227 enum rtx_code code;
3228 {
3229 switch (code)
3230 {
3231 case EQ:
3232 case NE:
3233 case GTU:
3234 case GEU:
3235 case LTU:
3236 case LEU:
3237 return code;
3238
3239 case GT:
3240 return GTU;
3241
3242 case GE:
3243 return GEU;
3244
3245 case LT:
3246 return LTU;
3247
3248 case LE:
3249 return LEU;
3250
3251 default:
3252 abort ();
3253 }
3254 }
3255
3256 /* Similarly, return the signed version of a comparison. */
3257
3258 enum rtx_code
3259 signed_condition (code)
3260 enum rtx_code code;
3261 {
3262 switch (code)
3263 {
3264 case EQ:
3265 case NE:
3266 case GT:
3267 case GE:
3268 case LT:
3269 case LE:
3270 return code;
3271
3272 case GTU:
3273 return GT;
3274
3275 case GEU:
3276 return GE;
3277
3278 case LTU:
3279 return LT;
3280
3281 case LEU:
3282 return LE;
3283
3284 default:
3285 abort ();
3286 }
3287 }
3288 \f
3289 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3290 truth of CODE1 implies the truth of CODE2. */
3291
3292 int
3293 comparison_dominates_p (code1, code2)
3294 enum rtx_code code1, code2;
3295 {
3296 if (code1 == code2)
3297 return 1;
3298
3299 switch (code1)
3300 {
3301 case EQ:
3302 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3303 return 1;
3304 break;
3305
3306 case LT:
3307 if (code2 == LE || code2 == NE)
3308 return 1;
3309 break;
3310
3311 case GT:
3312 if (code2 == GE || code2 == NE)
3313 return 1;
3314 break;
3315
3316 case LTU:
3317 if (code2 == LEU || code2 == NE)
3318 return 1;
3319 break;
3320
3321 case GTU:
3322 if (code2 == GEU || code2 == NE)
3323 return 1;
3324 break;
3325
3326 default:
3327 break;
3328 }
3329
3330 return 0;
3331 }
3332 \f
3333 /* Return 1 if INSN is an unconditional jump and nothing else. */
3334
3335 int
3336 simplejump_p (insn)
3337 rtx insn;
3338 {
3339 return (GET_CODE (insn) == JUMP_INSN
3340 && GET_CODE (PATTERN (insn)) == SET
3341 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3342 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3343 }
3344
3345 /* Return nonzero if INSN is a (possibly) conditional jump
3346 and nothing more. */
3347
3348 int
3349 condjump_p (insn)
3350 rtx insn;
3351 {
3352 register rtx x = PATTERN (insn);
3353 if (GET_CODE (x) != SET)
3354 return 0;
3355 if (GET_CODE (SET_DEST (x)) != PC)
3356 return 0;
3357 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3358 return 1;
3359 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3360 return 0;
3361 if (XEXP (SET_SRC (x), 2) == pc_rtx
3362 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3363 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3364 return 1;
3365 if (XEXP (SET_SRC (x), 1) == pc_rtx
3366 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3367 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3368 return 1;
3369 return 0;
3370 }
3371
3372 /* Return nonzero if INSN is a (possibly) conditional jump
3373 and nothing more. */
3374
3375 int
3376 condjump_in_parallel_p (insn)
3377 rtx insn;
3378 {
3379 register rtx x = PATTERN (insn);
3380
3381 if (GET_CODE (x) != PARALLEL)
3382 return 0;
3383 else
3384 x = XVECEXP (x, 0, 0);
3385
3386 if (GET_CODE (x) != SET)
3387 return 0;
3388 if (GET_CODE (SET_DEST (x)) != PC)
3389 return 0;
3390 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3391 return 1;
3392 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3393 return 0;
3394 if (XEXP (SET_SRC (x), 2) == pc_rtx
3395 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3396 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3397 return 1;
3398 if (XEXP (SET_SRC (x), 1) == pc_rtx
3399 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3400 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3401 return 1;
3402 return 0;
3403 }
3404
3405 #ifdef HAVE_cc0
3406
3407 /* Return 1 if X is an RTX that does nothing but set the condition codes
3408 and CLOBBER or USE registers.
3409 Return -1 if X does explicitly set the condition codes,
3410 but also does other things. */
3411
3412 int
3413 sets_cc0_p (x)
3414 rtx x ATTRIBUTE_UNUSED;
3415 {
3416 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3417 return 1;
3418 if (GET_CODE (x) == PARALLEL)
3419 {
3420 int i;
3421 int sets_cc0 = 0;
3422 int other_things = 0;
3423 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3424 {
3425 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3426 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3427 sets_cc0 = 1;
3428 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3429 other_things = 1;
3430 }
3431 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3432 }
3433 return 0;
3434 }
3435 #endif
3436 \f
3437 /* Follow any unconditional jump at LABEL;
3438 return the ultimate label reached by any such chain of jumps.
3439 If LABEL is not followed by a jump, return LABEL.
3440 If the chain loops or we can't find end, return LABEL,
3441 since that tells caller to avoid changing the insn.
3442
3443 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3444 a USE or CLOBBER. */
3445
3446 rtx
3447 follow_jumps (label)
3448 rtx label;
3449 {
3450 register rtx insn;
3451 register rtx next;
3452 register rtx value = label;
3453 register int depth;
3454
3455 for (depth = 0;
3456 (depth < 10
3457 && (insn = next_active_insn (value)) != 0
3458 && GET_CODE (insn) == JUMP_INSN
3459 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3460 || GET_CODE (PATTERN (insn)) == RETURN)
3461 && (next = NEXT_INSN (insn))
3462 && GET_CODE (next) == BARRIER);
3463 depth++)
3464 {
3465 /* Don't chain through the insn that jumps into a loop
3466 from outside the loop,
3467 since that would create multiple loop entry jumps
3468 and prevent loop optimization. */
3469 rtx tem;
3470 if (!reload_completed)
3471 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3472 if (GET_CODE (tem) == NOTE
3473 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3474 /* ??? Optional. Disables some optimizations, but makes
3475 gcov output more accurate with -O. */
3476 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3477 return value;
3478
3479 /* If we have found a cycle, make the insn jump to itself. */
3480 if (JUMP_LABEL (insn) == label)
3481 return label;
3482
3483 tem = next_active_insn (JUMP_LABEL (insn));
3484 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3485 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3486 break;
3487
3488 value = JUMP_LABEL (insn);
3489 }
3490 if (depth == 10)
3491 return label;
3492 return value;
3493 }
3494
3495 /* Assuming that field IDX of X is a vector of label_refs,
3496 replace each of them by the ultimate label reached by it.
3497 Return nonzero if a change is made.
3498 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3499
3500 static int
3501 tension_vector_labels (x, idx)
3502 register rtx x;
3503 register int idx;
3504 {
3505 int changed = 0;
3506 register int i;
3507 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3508 {
3509 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3510 register rtx nlabel = follow_jumps (olabel);
3511 if (nlabel && nlabel != olabel)
3512 {
3513 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3514 ++LABEL_NUSES (nlabel);
3515 if (--LABEL_NUSES (olabel) == 0)
3516 delete_insn (olabel);
3517 changed = 1;
3518 }
3519 }
3520 return changed;
3521 }
3522 \f
3523 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3524 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3525 in INSN, then store one of them in JUMP_LABEL (INSN).
3526 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3527 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3528 Also, when there are consecutive labels, canonicalize on the last of them.
3529
3530 Note that two labels separated by a loop-beginning note
3531 must be kept distinct if we have not yet done loop-optimization,
3532 because the gap between them is where loop-optimize
3533 will want to move invariant code to. CROSS_JUMP tells us
3534 that loop-optimization is done with.
3535
3536 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3537 two labels distinct if they are separated by only USE or CLOBBER insns. */
3538
3539 static void
3540 mark_jump_label (x, insn, cross_jump)
3541 register rtx x;
3542 rtx insn;
3543 int cross_jump;
3544 {
3545 register RTX_CODE code = GET_CODE (x);
3546 register int i;
3547 register char *fmt;
3548
3549 switch (code)
3550 {
3551 case PC:
3552 case CC0:
3553 case REG:
3554 case SUBREG:
3555 case CONST_INT:
3556 case SYMBOL_REF:
3557 case CONST_DOUBLE:
3558 case CLOBBER:
3559 case CALL:
3560 return;
3561
3562 case MEM:
3563 /* If this is a constant-pool reference, see if it is a label. */
3564 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3565 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3566 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3567 break;
3568
3569 case LABEL_REF:
3570 {
3571 rtx label = XEXP (x, 0);
3572 rtx olabel = label;
3573 rtx note;
3574 rtx next;
3575
3576 if (GET_CODE (label) != CODE_LABEL)
3577 abort ();
3578
3579 /* Ignore references to labels of containing functions. */
3580 if (LABEL_REF_NONLOCAL_P (x))
3581 break;
3582
3583 /* If there are other labels following this one,
3584 replace it with the last of the consecutive labels. */
3585 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3586 {
3587 if (GET_CODE (next) == CODE_LABEL)
3588 label = next;
3589 else if (cross_jump && GET_CODE (next) == INSN
3590 && (GET_CODE (PATTERN (next)) == USE
3591 || GET_CODE (PATTERN (next)) == CLOBBER))
3592 continue;
3593 else if (GET_CODE (next) != NOTE)
3594 break;
3595 else if (! cross_jump
3596 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3597 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3598 /* ??? Optional. Disables some optimizations, but
3599 makes gcov output more accurate with -O. */
3600 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3601 break;
3602 }
3603
3604 XEXP (x, 0) = label;
3605 if (! insn || ! INSN_DELETED_P (insn))
3606 ++LABEL_NUSES (label);
3607
3608 if (insn)
3609 {
3610 if (GET_CODE (insn) == JUMP_INSN)
3611 JUMP_LABEL (insn) = label;
3612
3613 /* If we've changed OLABEL and we had a REG_LABEL note
3614 for it, update it as well. */
3615 else if (label != olabel
3616 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3617 XEXP (note, 0) = label;
3618
3619 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3620 is one. */
3621 else if (! find_reg_note (insn, REG_LABEL, label))
3622 {
3623 /* This code used to ignore labels which refered to dispatch
3624 tables to avoid flow.c generating worse code.
3625
3626 However, in the presense of global optimizations like
3627 gcse which call find_basic_blocks without calling
3628 life_analysis, not recording such labels will lead
3629 to compiler aborts because of inconsistencies in the
3630 flow graph. So we go ahead and record the label.
3631
3632 It may also be the case that the optimization argument
3633 is no longer valid because of the more accurate cfg
3634 we build in find_basic_blocks -- it no longer pessimizes
3635 code when it finds a REG_LABEL note. */
3636 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3637 REG_NOTES (insn));
3638 }
3639 }
3640 return;
3641 }
3642
3643 /* Do walk the labels in a vector, but not the first operand of an
3644 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3645 case ADDR_VEC:
3646 case ADDR_DIFF_VEC:
3647 if (! INSN_DELETED_P (insn))
3648 {
3649 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3650
3651 for (i = 0; i < XVECLEN (x, eltnum); i++)
3652 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3653 }
3654 return;
3655
3656 default:
3657 break;
3658 }
3659
3660 fmt = GET_RTX_FORMAT (code);
3661 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3662 {
3663 if (fmt[i] == 'e')
3664 mark_jump_label (XEXP (x, i), insn, cross_jump);
3665 else if (fmt[i] == 'E')
3666 {
3667 register int j;
3668 for (j = 0; j < XVECLEN (x, i); j++)
3669 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3670 }
3671 }
3672 }
3673
3674 /* If all INSN does is set the pc, delete it,
3675 and delete the insn that set the condition codes for it
3676 if that's what the previous thing was. */
3677
3678 void
3679 delete_jump (insn)
3680 rtx insn;
3681 {
3682 register rtx set = single_set (insn);
3683
3684 if (set && GET_CODE (SET_DEST (set)) == PC)
3685 delete_computation (insn);
3686 }
3687
3688 /* Delete INSN and recursively delete insns that compute values used only
3689 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3690 If we are running before flow.c, we need do nothing since flow.c will
3691 delete dead code. We also can't know if the registers being used are
3692 dead or not at this point.
3693
3694 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3695 nothing other than set a register that dies in this insn, we can delete
3696 that insn as well.
3697
3698 On machines with CC0, if CC0 is used in this insn, we may be able to
3699 delete the insn that set it. */
3700
3701 static void
3702 delete_computation (insn)
3703 rtx insn;
3704 {
3705 rtx note, next;
3706
3707 #ifdef HAVE_cc0
3708 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3709 {
3710 rtx prev = prev_nonnote_insn (insn);
3711 /* We assume that at this stage
3712 CC's are always set explicitly
3713 and always immediately before the jump that
3714 will use them. So if the previous insn
3715 exists to set the CC's, delete it
3716 (unless it performs auto-increments, etc.). */
3717 if (prev && GET_CODE (prev) == INSN
3718 && sets_cc0_p (PATTERN (prev)))
3719 {
3720 if (sets_cc0_p (PATTERN (prev)) > 0
3721 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
3722 delete_computation (prev);
3723 else
3724 /* Otherwise, show that cc0 won't be used. */
3725 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
3726 cc0_rtx, REG_NOTES (prev));
3727 }
3728 }
3729 #endif
3730
3731 for (note = REG_NOTES (insn); note; note = next)
3732 {
3733 rtx our_prev;
3734
3735 next = XEXP (note, 1);
3736
3737 if (REG_NOTE_KIND (note) != REG_DEAD
3738 /* Verify that the REG_NOTE is legitimate. */
3739 || GET_CODE (XEXP (note, 0)) != REG)
3740 continue;
3741
3742 for (our_prev = prev_nonnote_insn (insn);
3743 our_prev && GET_CODE (our_prev) == INSN;
3744 our_prev = prev_nonnote_insn (our_prev))
3745 {
3746 /* If we reach a SEQUENCE, it is too complex to try to
3747 do anything with it, so give up. */
3748 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
3749 break;
3750
3751 if (GET_CODE (PATTERN (our_prev)) == USE
3752 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
3753 /* reorg creates USEs that look like this. We leave them
3754 alone because reorg needs them for its own purposes. */
3755 break;
3756
3757 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
3758 {
3759 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
3760 break;
3761
3762 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
3763 {
3764 /* If we find a SET of something else, we can't
3765 delete the insn. */
3766
3767 int i;
3768
3769 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
3770 {
3771 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
3772
3773 if (GET_CODE (part) == SET
3774 && SET_DEST (part) != XEXP (note, 0))
3775 break;
3776 }
3777
3778 if (i == XVECLEN (PATTERN (our_prev), 0))
3779 delete_computation (our_prev);
3780 }
3781 else if (GET_CODE (PATTERN (our_prev)) == SET
3782 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
3783 delete_computation (our_prev);
3784
3785 break;
3786 }
3787
3788 /* If OUR_PREV references the register that dies here, it is an
3789 additional use. Hence any prior SET isn't dead. However, this
3790 insn becomes the new place for the REG_DEAD note. */
3791 if (reg_overlap_mentioned_p (XEXP (note, 0),
3792 PATTERN (our_prev)))
3793 {
3794 XEXP (note, 1) = REG_NOTES (our_prev);
3795 REG_NOTES (our_prev) = note;
3796 break;
3797 }
3798 }
3799 }
3800
3801 delete_insn (insn);
3802 }
3803 \f
3804 /* Delete insn INSN from the chain of insns and update label ref counts.
3805 May delete some following insns as a consequence; may even delete
3806 a label elsewhere and insns that follow it.
3807
3808 Returns the first insn after INSN that was not deleted. */
3809
3810 rtx
3811 delete_insn (insn)
3812 register rtx insn;
3813 {
3814 register rtx next = NEXT_INSN (insn);
3815 register rtx prev = PREV_INSN (insn);
3816 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
3817 register int dont_really_delete = 0;
3818
3819 while (next && INSN_DELETED_P (next))
3820 next = NEXT_INSN (next);
3821
3822 /* This insn is already deleted => return first following nondeleted. */
3823 if (INSN_DELETED_P (insn))
3824 return next;
3825
3826 /* Don't delete user-declared labels. Convert them to special NOTEs
3827 instead. */
3828 if (was_code_label && LABEL_NAME (insn) != 0
3829 && optimize && ! dont_really_delete)
3830 {
3831 PUT_CODE (insn, NOTE);
3832 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
3833 NOTE_SOURCE_FILE (insn) = 0;
3834 dont_really_delete = 1;
3835 }
3836 else
3837 /* Mark this insn as deleted. */
3838 INSN_DELETED_P (insn) = 1;
3839
3840 /* If this is an unconditional jump, delete it from the jump chain. */
3841 if (simplejump_p (insn))
3842 delete_from_jump_chain (insn);
3843
3844 /* If instruction is followed by a barrier,
3845 delete the barrier too. */
3846
3847 if (next != 0 && GET_CODE (next) == BARRIER)
3848 {
3849 INSN_DELETED_P (next) = 1;
3850 next = NEXT_INSN (next);
3851 }
3852
3853 /* Patch out INSN (and the barrier if any) */
3854
3855 if (optimize && ! dont_really_delete)
3856 {
3857 if (prev)
3858 {
3859 NEXT_INSN (prev) = next;
3860 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3861 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
3862 XVECLEN (PATTERN (prev), 0) - 1)) = next;
3863 }
3864
3865 if (next)
3866 {
3867 PREV_INSN (next) = prev;
3868 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3869 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3870 }
3871
3872 if (prev && NEXT_INSN (prev) == 0)
3873 set_last_insn (prev);
3874 }
3875
3876 /* If deleting a jump, decrement the count of the label,
3877 and delete the label if it is now unused. */
3878
3879 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3880 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3881 {
3882 /* This can delete NEXT or PREV,
3883 either directly if NEXT is JUMP_LABEL (INSN),
3884 or indirectly through more levels of jumps. */
3885 delete_insn (JUMP_LABEL (insn));
3886 /* I feel a little doubtful about this loop,
3887 but I see no clean and sure alternative way
3888 to find the first insn after INSN that is not now deleted.
3889 I hope this works. */
3890 while (next && INSN_DELETED_P (next))
3891 next = NEXT_INSN (next);
3892 return next;
3893 }
3894
3895 /* Likewise if we're deleting a dispatch table. */
3896
3897 if (GET_CODE (insn) == JUMP_INSN
3898 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3899 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3900 {
3901 rtx pat = PATTERN (insn);
3902 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3903 int len = XVECLEN (pat, diff_vec_p);
3904
3905 for (i = 0; i < len; i++)
3906 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
3907 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
3908 while (next && INSN_DELETED_P (next))
3909 next = NEXT_INSN (next);
3910 return next;
3911 }
3912
3913 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3914 prev = PREV_INSN (prev);
3915
3916 /* If INSN was a label and a dispatch table follows it,
3917 delete the dispatch table. The tablejump must have gone already.
3918 It isn't useful to fall through into a table. */
3919
3920 if (was_code_label
3921 && NEXT_INSN (insn) != 0
3922 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3923 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3924 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3925 next = delete_insn (NEXT_INSN (insn));
3926
3927 /* If INSN was a label, delete insns following it if now unreachable. */
3928
3929 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3930 {
3931 register RTX_CODE code;
3932 while (next != 0
3933 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
3934 || code == NOTE || code == BARRIER
3935 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3936 {
3937 if (code == NOTE
3938 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3939 next = NEXT_INSN (next);
3940 /* Keep going past other deleted labels to delete what follows. */
3941 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3942 next = NEXT_INSN (next);
3943 else
3944 /* Note: if this deletes a jump, it can cause more
3945 deletion of unreachable code, after a different label.
3946 As long as the value from this recursive call is correct,
3947 this invocation functions correctly. */
3948 next = delete_insn (next);
3949 }
3950 }
3951
3952 return next;
3953 }
3954
3955 /* Advance from INSN till reaching something not deleted
3956 then return that. May return INSN itself. */
3957
3958 rtx
3959 next_nondeleted_insn (insn)
3960 rtx insn;
3961 {
3962 while (INSN_DELETED_P (insn))
3963 insn = NEXT_INSN (insn);
3964 return insn;
3965 }
3966 \f
3967 /* Delete a range of insns from FROM to TO, inclusive.
3968 This is for the sake of peephole optimization, so assume
3969 that whatever these insns do will still be done by a new
3970 peephole insn that will replace them. */
3971
3972 void
3973 delete_for_peephole (from, to)
3974 register rtx from, to;
3975 {
3976 register rtx insn = from;
3977
3978 while (1)
3979 {
3980 register rtx next = NEXT_INSN (insn);
3981 register rtx prev = PREV_INSN (insn);
3982
3983 if (GET_CODE (insn) != NOTE)
3984 {
3985 INSN_DELETED_P (insn) = 1;
3986
3987 /* Patch this insn out of the chain. */
3988 /* We don't do this all at once, because we
3989 must preserve all NOTEs. */
3990 if (prev)
3991 NEXT_INSN (prev) = next;
3992
3993 if (next)
3994 PREV_INSN (next) = prev;
3995 }
3996
3997 if (insn == to)
3998 break;
3999 insn = next;
4000 }
4001
4002 /* Note that if TO is an unconditional jump
4003 we *do not* delete the BARRIER that follows,
4004 since the peephole that replaces this sequence
4005 is also an unconditional jump in that case. */
4006 }
4007 \f
4008 /* Invert the condition of the jump JUMP, and make it jump
4009 to label NLABEL instead of where it jumps now. */
4010
4011 int
4012 invert_jump (jump, nlabel)
4013 rtx jump, nlabel;
4014 {
4015 /* We have to either invert the condition and change the label or
4016 do neither. Either operation could fail. We first try to invert
4017 the jump. If that succeeds, we try changing the label. If that fails,
4018 we invert the jump back to what it was. */
4019
4020 if (! invert_exp (PATTERN (jump), jump))
4021 return 0;
4022
4023 if (redirect_jump (jump, nlabel))
4024 {
4025 if (flag_branch_probabilities)
4026 {
4027 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4028
4029 /* An inverted jump means that a probability taken becomes a
4030 probability not taken. Subtract the branch probability from the
4031 probability base to convert it back to a taken probability.
4032 (We don't flip the probability on a branch that's never taken. */
4033 if (note && XINT (XEXP (note, 0), 0) >= 0)
4034 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4035 }
4036
4037 return 1;
4038 }
4039
4040 if (! invert_exp (PATTERN (jump), jump))
4041 /* This should just be putting it back the way it was. */
4042 abort ();
4043
4044 return 0;
4045 }
4046
4047 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4048
4049 Return 1 if we can do so, 0 if we cannot find a way to do so that
4050 matches a pattern. */
4051
4052 int
4053 invert_exp (x, insn)
4054 rtx x;
4055 rtx insn;
4056 {
4057 register RTX_CODE code;
4058 register int i;
4059 register char *fmt;
4060
4061 code = GET_CODE (x);
4062
4063 if (code == IF_THEN_ELSE)
4064 {
4065 register rtx comp = XEXP (x, 0);
4066 register rtx tem;
4067
4068 /* We can do this in two ways: The preferable way, which can only
4069 be done if this is not an integer comparison, is to reverse
4070 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4071 of the IF_THEN_ELSE. If we can't do either, fail. */
4072
4073 if (can_reverse_comparison_p (comp, insn)
4074 && validate_change (insn, &XEXP (x, 0),
4075 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4076 GET_MODE (comp), XEXP (comp, 0),
4077 XEXP (comp, 1)), 0))
4078 return 1;
4079
4080 tem = XEXP (x, 1);
4081 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4082 validate_change (insn, &XEXP (x, 2), tem, 1);
4083 return apply_change_group ();
4084 }
4085
4086 fmt = GET_RTX_FORMAT (code);
4087 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4088 {
4089 if (fmt[i] == 'e')
4090 if (! invert_exp (XEXP (x, i), insn))
4091 return 0;
4092 if (fmt[i] == 'E')
4093 {
4094 register int j;
4095 for (j = 0; j < XVECLEN (x, i); j++)
4096 if (!invert_exp (XVECEXP (x, i, j), insn))
4097 return 0;
4098 }
4099 }
4100
4101 return 1;
4102 }
4103 \f
4104 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4105 If the old jump target label is unused as a result,
4106 it and the code following it may be deleted.
4107
4108 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4109 RETURN insn.
4110
4111 The return value will be 1 if the change was made, 0 if it wasn't (this
4112 can only occur for NLABEL == 0). */
4113
4114 int
4115 redirect_jump (jump, nlabel)
4116 rtx jump, nlabel;
4117 {
4118 register rtx olabel = JUMP_LABEL (jump);
4119
4120 if (nlabel == olabel)
4121 return 1;
4122
4123 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4124 return 0;
4125
4126 /* If this is an unconditional branch, delete it from the jump_chain of
4127 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4128 have UID's in range and JUMP_CHAIN is valid). */
4129 if (jump_chain && (simplejump_p (jump)
4130 || GET_CODE (PATTERN (jump)) == RETURN))
4131 {
4132 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4133
4134 delete_from_jump_chain (jump);
4135 if (label_index < max_jump_chain
4136 && INSN_UID (jump) < max_jump_chain)
4137 {
4138 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4139 jump_chain[label_index] = jump;
4140 }
4141 }
4142
4143 JUMP_LABEL (jump) = nlabel;
4144 if (nlabel)
4145 ++LABEL_NUSES (nlabel);
4146
4147 if (olabel && --LABEL_NUSES (olabel) == 0)
4148 delete_insn (olabel);
4149
4150 return 1;
4151 }
4152
4153 /* Delete the instruction JUMP from any jump chain it might be on. */
4154
4155 static void
4156 delete_from_jump_chain (jump)
4157 rtx jump;
4158 {
4159 int index;
4160 rtx olabel = JUMP_LABEL (jump);
4161
4162 /* Handle unconditional jumps. */
4163 if (jump_chain && olabel != 0
4164 && INSN_UID (olabel) < max_jump_chain
4165 && simplejump_p (jump))
4166 index = INSN_UID (olabel);
4167 /* Handle return insns. */
4168 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4169 index = 0;
4170 else return;
4171
4172 if (jump_chain[index] == jump)
4173 jump_chain[index] = jump_chain[INSN_UID (jump)];
4174 else
4175 {
4176 rtx insn;
4177
4178 for (insn = jump_chain[index];
4179 insn != 0;
4180 insn = jump_chain[INSN_UID (insn)])
4181 if (jump_chain[INSN_UID (insn)] == jump)
4182 {
4183 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4184 break;
4185 }
4186 }
4187 }
4188
4189 /* If NLABEL is nonzero, throughout the rtx at LOC,
4190 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4191 zero, alter (RETURN) to (LABEL_REF NLABEL).
4192
4193 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4194 validity with validate_change. Convert (set (pc) (label_ref olabel))
4195 to (return).
4196
4197 Return 0 if we found a change we would like to make but it is invalid.
4198 Otherwise, return 1. */
4199
4200 int
4201 redirect_exp (loc, olabel, nlabel, insn)
4202 rtx *loc;
4203 rtx olabel, nlabel;
4204 rtx insn;
4205 {
4206 register rtx x = *loc;
4207 register RTX_CODE code = GET_CODE (x);
4208 register int i;
4209 register char *fmt;
4210
4211 if (code == LABEL_REF)
4212 {
4213 if (XEXP (x, 0) == olabel)
4214 {
4215 if (nlabel)
4216 XEXP (x, 0) = nlabel;
4217 else
4218 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4219 return 1;
4220 }
4221 }
4222 else if (code == RETURN && olabel == 0)
4223 {
4224 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4225 if (loc == &PATTERN (insn))
4226 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4227 return validate_change (insn, loc, x, 0);
4228 }
4229
4230 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4231 && GET_CODE (SET_SRC (x)) == LABEL_REF
4232 && XEXP (SET_SRC (x), 0) == olabel)
4233 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4234
4235 fmt = GET_RTX_FORMAT (code);
4236 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4237 {
4238 if (fmt[i] == 'e')
4239 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4240 return 0;
4241 if (fmt[i] == 'E')
4242 {
4243 register int j;
4244 for (j = 0; j < XVECLEN (x, i); j++)
4245 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4246 return 0;
4247 }
4248 }
4249
4250 return 1;
4251 }
4252 \f
4253 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4254
4255 If the old jump target label (before the dispatch table) becomes unused,
4256 it and the dispatch table may be deleted. In that case, find the insn
4257 before the jump references that label and delete it and logical successors
4258 too. */
4259
4260 static void
4261 redirect_tablejump (jump, nlabel)
4262 rtx jump, nlabel;
4263 {
4264 register rtx olabel = JUMP_LABEL (jump);
4265
4266 /* Add this jump to the jump_chain of NLABEL. */
4267 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4268 && INSN_UID (jump) < max_jump_chain)
4269 {
4270 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4271 jump_chain[INSN_UID (nlabel)] = jump;
4272 }
4273
4274 PATTERN (jump) = gen_jump (nlabel);
4275 JUMP_LABEL (jump) = nlabel;
4276 ++LABEL_NUSES (nlabel);
4277 INSN_CODE (jump) = -1;
4278
4279 if (--LABEL_NUSES (olabel) == 0)
4280 {
4281 delete_labelref_insn (jump, olabel, 0);
4282 delete_insn (olabel);
4283 }
4284 }
4285
4286 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4287 If we found one, delete it and then delete this insn if DELETE_THIS is
4288 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4289
4290 static int
4291 delete_labelref_insn (insn, label, delete_this)
4292 rtx insn, label;
4293 int delete_this;
4294 {
4295 int deleted = 0;
4296 rtx link;
4297
4298 if (GET_CODE (insn) != NOTE
4299 && reg_mentioned_p (label, PATTERN (insn)))
4300 {
4301 if (delete_this)
4302 {
4303 delete_insn (insn);
4304 deleted = 1;
4305 }
4306 else
4307 return 1;
4308 }
4309
4310 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4311 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4312 {
4313 if (delete_this)
4314 {
4315 delete_insn (insn);
4316 deleted = 1;
4317 }
4318 else
4319 return 1;
4320 }
4321
4322 return deleted;
4323 }
4324 \f
4325 /* Like rtx_equal_p except that it considers two REGs as equal
4326 if they renumber to the same value and considers two commutative
4327 operations to be the same if the order of the operands has been
4328 reversed.
4329
4330 ??? Addition is not commutative on the PA due to the weird implicit
4331 space register selection rules for memory addresses. Therefore, we
4332 don't consider a + b == b + a.
4333
4334 We could/should make this test a little tighter. Possibly only
4335 disabling it on the PA via some backend macro or only disabling this
4336 case when the PLUS is inside a MEM. */
4337
4338 int
4339 rtx_renumbered_equal_p (x, y)
4340 rtx x, y;
4341 {
4342 register int i;
4343 register RTX_CODE code = GET_CODE (x);
4344 register char *fmt;
4345
4346 if (x == y)
4347 return 1;
4348
4349 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4350 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4351 && GET_CODE (SUBREG_REG (y)) == REG)))
4352 {
4353 int reg_x = -1, reg_y = -1;
4354 int word_x = 0, word_y = 0;
4355
4356 if (GET_MODE (x) != GET_MODE (y))
4357 return 0;
4358
4359 /* If we haven't done any renumbering, don't
4360 make any assumptions. */
4361 if (reg_renumber == 0)
4362 return rtx_equal_p (x, y);
4363
4364 if (code == SUBREG)
4365 {
4366 reg_x = REGNO (SUBREG_REG (x));
4367 word_x = SUBREG_WORD (x);
4368
4369 if (reg_renumber[reg_x] >= 0)
4370 {
4371 reg_x = reg_renumber[reg_x] + word_x;
4372 word_x = 0;
4373 }
4374 }
4375
4376 else
4377 {
4378 reg_x = REGNO (x);
4379 if (reg_renumber[reg_x] >= 0)
4380 reg_x = reg_renumber[reg_x];
4381 }
4382
4383 if (GET_CODE (y) == SUBREG)
4384 {
4385 reg_y = REGNO (SUBREG_REG (y));
4386 word_y = SUBREG_WORD (y);
4387
4388 if (reg_renumber[reg_y] >= 0)
4389 {
4390 reg_y = reg_renumber[reg_y];
4391 word_y = 0;
4392 }
4393 }
4394
4395 else
4396 {
4397 reg_y = REGNO (y);
4398 if (reg_renumber[reg_y] >= 0)
4399 reg_y = reg_renumber[reg_y];
4400 }
4401
4402 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4403 }
4404
4405 /* Now we have disposed of all the cases
4406 in which different rtx codes can match. */
4407 if (code != GET_CODE (y))
4408 return 0;
4409
4410 switch (code)
4411 {
4412 case PC:
4413 case CC0:
4414 case ADDR_VEC:
4415 case ADDR_DIFF_VEC:
4416 return 0;
4417
4418 case CONST_INT:
4419 return INTVAL (x) == INTVAL (y);
4420
4421 case LABEL_REF:
4422 /* We can't assume nonlocal labels have their following insns yet. */
4423 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4424 return XEXP (x, 0) == XEXP (y, 0);
4425
4426 /* Two label-refs are equivalent if they point at labels
4427 in the same position in the instruction stream. */
4428 return (next_real_insn (XEXP (x, 0))
4429 == next_real_insn (XEXP (y, 0)));
4430
4431 case SYMBOL_REF:
4432 return XSTR (x, 0) == XSTR (y, 0);
4433
4434 case CODE_LABEL:
4435 /* If we didn't match EQ equality above, they aren't the same. */
4436 return 0;
4437
4438 default:
4439 break;
4440 }
4441
4442 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4443
4444 if (GET_MODE (x) != GET_MODE (y))
4445 return 0;
4446
4447 /* For commutative operations, the RTX match if the operand match in any
4448 order. Also handle the simple binary and unary cases without a loop.
4449
4450 ??? Don't consider PLUS a commutative operator; see comments above. */
4451 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4452 && code != PLUS)
4453 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4454 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4455 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4456 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4457 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4458 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4459 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4460 else if (GET_RTX_CLASS (code) == '1')
4461 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4462
4463 /* Compare the elements. If any pair of corresponding elements
4464 fail to match, return 0 for the whole things. */
4465
4466 fmt = GET_RTX_FORMAT (code);
4467 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4468 {
4469 register int j;
4470 switch (fmt[i])
4471 {
4472 case 'w':
4473 if (XWINT (x, i) != XWINT (y, i))
4474 return 0;
4475 break;
4476
4477 case 'i':
4478 if (XINT (x, i) != XINT (y, i))
4479 return 0;
4480 break;
4481
4482 case 's':
4483 if (strcmp (XSTR (x, i), XSTR (y, i)))
4484 return 0;
4485 break;
4486
4487 case 'e':
4488 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4489 return 0;
4490 break;
4491
4492 case 'u':
4493 if (XEXP (x, i) != XEXP (y, i))
4494 return 0;
4495 /* fall through. */
4496 case '0':
4497 break;
4498
4499 case 'E':
4500 if (XVECLEN (x, i) != XVECLEN (y, i))
4501 return 0;
4502 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4503 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4504 return 0;
4505 break;
4506
4507 default:
4508 abort ();
4509 }
4510 }
4511 return 1;
4512 }
4513 \f
4514 /* If X is a hard register or equivalent to one or a subregister of one,
4515 return the hard register number. If X is a pseudo register that was not
4516 assigned a hard register, return the pseudo register number. Otherwise,
4517 return -1. Any rtx is valid for X. */
4518
4519 int
4520 true_regnum (x)
4521 rtx x;
4522 {
4523 if (GET_CODE (x) == REG)
4524 {
4525 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4526 return reg_renumber[REGNO (x)];
4527 return REGNO (x);
4528 }
4529 if (GET_CODE (x) == SUBREG)
4530 {
4531 int base = true_regnum (SUBREG_REG (x));
4532 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4533 return SUBREG_WORD (x) + base;
4534 }
4535 return -1;
4536 }
4537 \f
4538 /* Optimize code of the form:
4539
4540 for (x = a[i]; x; ...)
4541 ...
4542 for (x = a[i]; x; ...)
4543 ...
4544 foo:
4545
4546 Loop optimize will change the above code into
4547
4548 if (x = a[i])
4549 for (;;)
4550 { ...; if (! (x = ...)) break; }
4551 if (x = a[i])
4552 for (;;)
4553 { ...; if (! (x = ...)) break; }
4554 foo:
4555
4556 In general, if the first test fails, the program can branch
4557 directly to `foo' and skip the second try which is doomed to fail.
4558 We run this after loop optimization and before flow analysis. */
4559
4560 /* When comparing the insn patterns, we track the fact that different
4561 pseudo-register numbers may have been used in each computation.
4562 The following array stores an equivalence -- same_regs[I] == J means
4563 that pseudo register I was used in the first set of tests in a context
4564 where J was used in the second set. We also count the number of such
4565 pending equivalences. If nonzero, the expressions really aren't the
4566 same. */
4567
4568 static int *same_regs;
4569
4570 static int num_same_regs;
4571
4572 /* Track any registers modified between the target of the first jump and
4573 the second jump. They never compare equal. */
4574
4575 static char *modified_regs;
4576
4577 /* Record if memory was modified. */
4578
4579 static int modified_mem;
4580
4581 /* Called via note_stores on each insn between the target of the first
4582 branch and the second branch. It marks any changed registers. */
4583
4584 static void
4585 mark_modified_reg (dest, x)
4586 rtx dest;
4587 rtx x ATTRIBUTE_UNUSED;
4588 {
4589 int regno, i;
4590
4591 if (GET_CODE (dest) == SUBREG)
4592 dest = SUBREG_REG (dest);
4593
4594 if (GET_CODE (dest) == MEM)
4595 modified_mem = 1;
4596
4597 if (GET_CODE (dest) != REG)
4598 return;
4599
4600 regno = REGNO (dest);
4601 if (regno >= FIRST_PSEUDO_REGISTER)
4602 modified_regs[regno] = 1;
4603 else
4604 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4605 modified_regs[regno + i] = 1;
4606 }
4607
4608 /* F is the first insn in the chain of insns. */
4609
4610 void
4611 thread_jumps (f, max_reg, flag_before_loop)
4612 rtx f;
4613 int max_reg;
4614 int flag_before_loop;
4615 {
4616 /* Basic algorithm is to find a conditional branch,
4617 the label it may branch to, and the branch after
4618 that label. If the two branches test the same condition,
4619 walk back from both branch paths until the insn patterns
4620 differ, or code labels are hit. If we make it back to
4621 the target of the first branch, then we know that the first branch
4622 will either always succeed or always fail depending on the relative
4623 senses of the two branches. So adjust the first branch accordingly
4624 in this case. */
4625
4626 rtx label, b1, b2, t1, t2;
4627 enum rtx_code code1, code2;
4628 rtx b1op0, b1op1, b2op0, b2op1;
4629 int changed = 1;
4630 int i;
4631 int *all_reset;
4632
4633 /* Allocate register tables and quick-reset table. */
4634 modified_regs = (char *) alloca (max_reg * sizeof (char));
4635 same_regs = (int *) alloca (max_reg * sizeof (int));
4636 all_reset = (int *) alloca (max_reg * sizeof (int));
4637 for (i = 0; i < max_reg; i++)
4638 all_reset[i] = -1;
4639
4640 while (changed)
4641 {
4642 changed = 0;
4643
4644 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4645 {
4646 /* Get to a candidate branch insn. */
4647 if (GET_CODE (b1) != JUMP_INSN
4648 || ! condjump_p (b1) || simplejump_p (b1)
4649 || JUMP_LABEL (b1) == 0)
4650 continue;
4651
4652 bzero (modified_regs, max_reg * sizeof (char));
4653 modified_mem = 0;
4654
4655 bcopy ((char *) all_reset, (char *) same_regs,
4656 max_reg * sizeof (int));
4657 num_same_regs = 0;
4658
4659 label = JUMP_LABEL (b1);
4660
4661 /* Look for a branch after the target. Record any registers and
4662 memory modified between the target and the branch. Stop when we
4663 get to a label since we can't know what was changed there. */
4664 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4665 {
4666 if (GET_CODE (b2) == CODE_LABEL)
4667 break;
4668
4669 else if (GET_CODE (b2) == JUMP_INSN)
4670 {
4671 /* If this is an unconditional jump and is the only use of
4672 its target label, we can follow it. */
4673 if (simplejump_p (b2)
4674 && JUMP_LABEL (b2) != 0
4675 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4676 {
4677 b2 = JUMP_LABEL (b2);
4678 continue;
4679 }
4680 else
4681 break;
4682 }
4683
4684 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4685 continue;
4686
4687 if (GET_CODE (b2) == CALL_INSN)
4688 {
4689 modified_mem = 1;
4690 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4691 if (call_used_regs[i] && ! fixed_regs[i]
4692 && i != STACK_POINTER_REGNUM
4693 && i != FRAME_POINTER_REGNUM
4694 && i != HARD_FRAME_POINTER_REGNUM
4695 && i != ARG_POINTER_REGNUM)
4696 modified_regs[i] = 1;
4697 }
4698
4699 note_stores (PATTERN (b2), mark_modified_reg);
4700 }
4701
4702 /* Check the next candidate branch insn from the label
4703 of the first. */
4704 if (b2 == 0
4705 || GET_CODE (b2) != JUMP_INSN
4706 || b2 == b1
4707 || ! condjump_p (b2)
4708 || simplejump_p (b2))
4709 continue;
4710
4711 /* Get the comparison codes and operands, reversing the
4712 codes if appropriate. If we don't have comparison codes,
4713 we can't do anything. */
4714 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4715 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4716 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4717 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4718 code1 = reverse_condition (code1);
4719
4720 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4721 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4722 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4723 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4724 code2 = reverse_condition (code2);
4725
4726 /* If they test the same things and knowing that B1 branches
4727 tells us whether or not B2 branches, check if we
4728 can thread the branch. */
4729 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4730 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4731 && (comparison_dominates_p (code1, code2)
4732 || (comparison_dominates_p (code1, reverse_condition (code2))
4733 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
4734 0),
4735 b1))))
4736 {
4737 t1 = prev_nonnote_insn (b1);
4738 t2 = prev_nonnote_insn (b2);
4739
4740 while (t1 != 0 && t2 != 0)
4741 {
4742 if (t2 == label)
4743 {
4744 /* We have reached the target of the first branch.
4745 If there are no pending register equivalents,
4746 we know that this branch will either always
4747 succeed (if the senses of the two branches are
4748 the same) or always fail (if not). */
4749 rtx new_label;
4750
4751 if (num_same_regs != 0)
4752 break;
4753
4754 if (comparison_dominates_p (code1, code2))
4755 new_label = JUMP_LABEL (b2);
4756 else
4757 new_label = get_label_after (b2);
4758
4759 if (JUMP_LABEL (b1) != new_label)
4760 {
4761 rtx prev = PREV_INSN (new_label);
4762
4763 if (flag_before_loop
4764 && GET_CODE (prev) == NOTE
4765 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
4766 {
4767 /* Don't thread to the loop label. If a loop
4768 label is reused, loop optimization will
4769 be disabled for that loop. */
4770 new_label = gen_label_rtx ();
4771 emit_label_after (new_label, PREV_INSN (prev));
4772 }
4773 changed |= redirect_jump (b1, new_label);
4774 }
4775 break;
4776 }
4777
4778 /* If either of these is not a normal insn (it might be
4779 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4780 have already been skipped above.) Similarly, fail
4781 if the insns are different. */
4782 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4783 || recog_memoized (t1) != recog_memoized (t2)
4784 || ! rtx_equal_for_thread_p (PATTERN (t1),
4785 PATTERN (t2), t2))
4786 break;
4787
4788 t1 = prev_nonnote_insn (t1);
4789 t2 = prev_nonnote_insn (t2);
4790 }
4791 }
4792 }
4793 }
4794 }
4795 \f
4796 /* This is like RTX_EQUAL_P except that it knows about our handling of
4797 possibly equivalent registers and knows to consider volatile and
4798 modified objects as not equal.
4799
4800 YINSN is the insn containing Y. */
4801
4802 int
4803 rtx_equal_for_thread_p (x, y, yinsn)
4804 rtx x, y;
4805 rtx yinsn;
4806 {
4807 register int i;
4808 register int j;
4809 register enum rtx_code code;
4810 register char *fmt;
4811
4812 code = GET_CODE (x);
4813 /* Rtx's of different codes cannot be equal. */
4814 if (code != GET_CODE (y))
4815 return 0;
4816
4817 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4818 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4819
4820 if (GET_MODE (x) != GET_MODE (y))
4821 return 0;
4822
4823 /* For floating-point, consider everything unequal. This is a bit
4824 pessimistic, but this pass would only rarely do anything for FP
4825 anyway. */
4826 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4827 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
4828 return 0;
4829
4830 /* For commutative operations, the RTX match if the operand match in any
4831 order. Also handle the simple binary and unary cases without a loop. */
4832 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4833 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4834 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4835 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4836 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4837 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4838 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4839 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4840 else if (GET_RTX_CLASS (code) == '1')
4841 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4842
4843 /* Handle special-cases first. */
4844 switch (code)
4845 {
4846 case REG:
4847 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4848 return 1;
4849
4850 /* If neither is user variable or hard register, check for possible
4851 equivalence. */
4852 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4853 || REGNO (x) < FIRST_PSEUDO_REGISTER
4854 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4855 return 0;
4856
4857 if (same_regs[REGNO (x)] == -1)
4858 {
4859 same_regs[REGNO (x)] = REGNO (y);
4860 num_same_regs++;
4861
4862 /* If this is the first time we are seeing a register on the `Y'
4863 side, see if it is the last use. If not, we can't thread the
4864 jump, so mark it as not equivalent. */
4865 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4866 return 0;
4867
4868 return 1;
4869 }
4870 else
4871 return (same_regs[REGNO (x)] == REGNO (y));
4872
4873 break;
4874
4875 case MEM:
4876 /* If memory modified or either volatile, not equivalent.
4877 Else, check address. */
4878 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4879 return 0;
4880
4881 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4882
4883 case ASM_INPUT:
4884 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4885 return 0;
4886
4887 break;
4888
4889 case SET:
4890 /* Cancel a pending `same_regs' if setting equivalenced registers.
4891 Then process source. */
4892 if (GET_CODE (SET_DEST (x)) == REG
4893 && GET_CODE (SET_DEST (y)) == REG)
4894 {
4895 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
4896 {
4897 same_regs[REGNO (SET_DEST (x))] = -1;
4898 num_same_regs--;
4899 }
4900 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4901 return 0;
4902 }
4903 else
4904 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4905 return 0;
4906
4907 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4908
4909 case LABEL_REF:
4910 return XEXP (x, 0) == XEXP (y, 0);
4911
4912 case SYMBOL_REF:
4913 return XSTR (x, 0) == XSTR (y, 0);
4914
4915 default:
4916 break;
4917 }
4918
4919 if (x == y)
4920 return 1;
4921
4922 fmt = GET_RTX_FORMAT (code);
4923 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4924 {
4925 switch (fmt[i])
4926 {
4927 case 'w':
4928 if (XWINT (x, i) != XWINT (y, i))
4929 return 0;
4930 break;
4931
4932 case 'n':
4933 case 'i':
4934 if (XINT (x, i) != XINT (y, i))
4935 return 0;
4936 break;
4937
4938 case 'V':
4939 case 'E':
4940 /* Two vectors must have the same length. */
4941 if (XVECLEN (x, i) != XVECLEN (y, i))
4942 return 0;
4943
4944 /* And the corresponding elements must match. */
4945 for (j = 0; j < XVECLEN (x, i); j++)
4946 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4947 XVECEXP (y, i, j), yinsn) == 0)
4948 return 0;
4949 break;
4950
4951 case 'e':
4952 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4953 return 0;
4954 break;
4955
4956 case 'S':
4957 case 's':
4958 if (strcmp (XSTR (x, i), XSTR (y, i)))
4959 return 0;
4960 break;
4961
4962 case 'u':
4963 /* These are just backpointers, so they don't matter. */
4964 break;
4965
4966 case '0':
4967 break;
4968
4969 /* It is believed that rtx's at this level will never
4970 contain anything but integers and other rtx's,
4971 except for within LABEL_REFs and SYMBOL_REFs. */
4972 default:
4973 abort ();
4974 }
4975 }
4976 return 1;
4977 }
4978 \f
4979
4980 #ifndef HAVE_cc0
4981 /* Return the insn that NEW can be safely inserted in front of starting at
4982 the jump insn INSN. Return 0 if it is not safe to do this jump
4983 optimization. Note that NEW must contain a single set. */
4984
4985 static rtx
4986 find_insert_position (insn, new)
4987 rtx insn;
4988 rtx new;
4989 {
4990 int i;
4991 rtx prev;
4992
4993 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
4994 if (GET_CODE (PATTERN (new)) != PARALLEL)
4995 return insn;
4996
4997 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
4998 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
4999 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5000 insn))
5001 break;
5002
5003 if (i < 0)
5004 return insn;
5005
5006 /* There is a good chance that the previous insn PREV sets the thing
5007 being clobbered (often the CC in a hard reg). If PREV does not
5008 use what NEW sets, we can insert NEW before PREV. */
5009
5010 prev = prev_active_insn (insn);
5011 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5012 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5013 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5014 insn)
5015 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5016 prev))
5017 return 0;
5018
5019 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5020 }
5021 #endif /* !HAVE_cc0 */