jump.c (delete_insn): Partially revert 19 Jan change; don't convert unused code label...
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-99, 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
70
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
83
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
91
92 static rtx *jump_chain;
93
94 /* Maximum index in jump_chain. */
95
96 static int max_jump_chain;
97
98 /* Set nonzero by jump_optimize if control can fall through
99 to the end of the function. */
100 int can_reach_end;
101
102 /* Indicates whether death notes are significant in cross jump analysis.
103 Normally they are not significant, because of A and B jump to C,
104 and R dies in A, it must die in B. But this might not be true after
105 stack register conversion, and we must compare death notes in that
106 case. */
107
108 static int cross_jump_death_matters = 0;
109
110 static int init_label_info PARAMS ((rtx));
111 static void delete_barrier_successors PARAMS ((rtx));
112 static void mark_all_labels PARAMS ((rtx, int));
113 static rtx delete_unreferenced_labels PARAMS ((rtx));
114 static void delete_noop_moves PARAMS ((rtx));
115 static int calculate_can_reach_end PARAMS ((rtx, int));
116 static int duplicate_loop_exit_test PARAMS ((rtx));
117 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
118 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
119 static int jump_back_p PARAMS ((rtx, rtx));
120 static int tension_vector_labels PARAMS ((rtx, int));
121 static void mark_jump_label PARAMS ((rtx, rtx, int));
122 static void delete_computation PARAMS ((rtx));
123 static void delete_from_jump_chain PARAMS ((rtx));
124 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
125 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
126 static void redirect_tablejump PARAMS ((rtx, rtx));
127 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int));
128 #if ! defined(HAVE_cc0) && ! defined(HAVE_conditional_arithmetic)
129 static rtx find_insert_position PARAMS ((rtx, rtx));
130 #endif
131 static int returnjump_p_1 PARAMS ((rtx *, void *));
132 static void delete_prior_computation PARAMS ((rtx, rtx));
133
134 /* Main external entry point into the jump optimizer. See comments before
135 jump_optimize_1 for descriptions of the arguments. */
136 void
137 jump_optimize (f, cross_jump, noop_moves, after_regscan)
138 rtx f;
139 int cross_jump;
140 int noop_moves;
141 int after_regscan;
142 {
143 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
144 }
145
146 /* Alternate entry into the jump optimizer. This entry point only rebuilds
147 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
148 instructions. */
149 void
150 rebuild_jump_labels (f)
151 rtx f;
152 {
153 jump_optimize_1 (f, 0, 0, 0, 1);
154 }
155
156 \f
157 /* Delete no-op jumps and optimize jumps to jumps
158 and jumps around jumps.
159 Delete unused labels and unreachable code.
160
161 If CROSS_JUMP is 1, detect matching code
162 before a jump and its destination and unify them.
163 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
164
165 If NOOP_MOVES is nonzero, delete no-op move insns.
166
167 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
168 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
169
170 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
171 and JUMP_LABEL field for jumping insns.
172
173 If `optimize' is zero, don't change any code,
174 just determine whether control drops off the end of the function.
175 This case occurs when we have -W and not -O.
176 It works because `delete_insn' checks the value of `optimize'
177 and refrains from actually deleting when that is 0. */
178
179 static void
180 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
181 rtx f;
182 int cross_jump;
183 int noop_moves;
184 int after_regscan;
185 int mark_labels_only;
186 {
187 register rtx insn, next;
188 int changed;
189 int old_max_reg;
190 int first = 1;
191 int max_uid = 0;
192 rtx last_insn;
193
194 cross_jump_death_matters = (cross_jump == 2);
195 max_uid = init_label_info (f) + 1;
196
197 /* If we are performing cross jump optimizations, then initialize
198 tables mapping UIDs to EH regions to avoid incorrect movement
199 of insns from one EH region to another. */
200 if (flag_exceptions && cross_jump)
201 init_insn_eh_region (f, max_uid);
202
203 delete_barrier_successors (f);
204
205 /* Leave some extra room for labels and duplicate exit test insns
206 we make. */
207 max_jump_chain = max_uid * 14 / 10;
208 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
209
210 mark_all_labels (f, cross_jump);
211
212 /* Keep track of labels used from static data;
213 they cannot ever be deleted. */
214
215 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
216 LABEL_NUSES (XEXP (insn, 0))++;
217
218 check_exception_handler_labels ();
219
220 /* Keep track of labels used for marking handlers for exception
221 regions; they cannot usually be deleted. */
222
223 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
224 LABEL_NUSES (XEXP (insn, 0))++;
225
226 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
227 notes and recompute LABEL_NUSES. */
228 if (mark_labels_only)
229 goto end;
230
231 exception_optimize ();
232
233 last_insn = delete_unreferenced_labels (f);
234
235 #ifdef HAVE_return
236 if (optimize && HAVE_return)
237 {
238 /* If we fall through to the epilogue, see if we can insert a RETURN insn
239 in front of it. If the machine allows it at this point (we might be
240 after reload for a leaf routine), it will improve optimization for it
241 to be there. */
242 insn = get_last_insn ();
243 while (insn && GET_CODE (insn) == NOTE)
244 insn = PREV_INSN (insn);
245
246 if (insn && GET_CODE (insn) != BARRIER)
247 {
248 emit_jump_insn (gen_return ());
249 emit_barrier ();
250 }
251 }
252 #endif
253
254 if (noop_moves)
255 delete_noop_moves (f);
256
257 /* If we haven't yet gotten to reload and we have just run regscan,
258 delete any insn that sets a register that isn't used elsewhere.
259 This helps some of the optimizations below by having less insns
260 being jumped around. */
261
262 if (optimize && ! reload_completed && after_regscan)
263 for (insn = f; insn; insn = next)
264 {
265 rtx set = single_set (insn);
266
267 next = NEXT_INSN (insn);
268
269 if (set && GET_CODE (SET_DEST (set)) == REG
270 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
271 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
272 /* We use regno_last_note_uid so as not to delete the setting
273 of a reg that's used in notes. A subsequent optimization
274 might arrange to use that reg for real. */
275 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
276 && ! side_effects_p (SET_SRC (set))
277 && ! find_reg_note (insn, REG_RETVAL, 0)
278 /* An ADDRESSOF expression can turn into a use of the internal arg
279 pointer, so do not delete the initialization of the internal
280 arg pointer yet. If it is truly dead, flow will delete the
281 initializing insn. */
282 && SET_DEST (set) != current_function_internal_arg_pointer)
283 delete_insn (insn);
284 }
285
286 /* Now iterate optimizing jumps until nothing changes over one pass. */
287 changed = 1;
288 old_max_reg = max_reg_num ();
289 while (changed)
290 {
291 changed = 0;
292
293 for (insn = f; insn; insn = next)
294 {
295 rtx reallabelprev;
296 rtx temp, temp1, temp2 = NULL_RTX, temp3, temp4, temp5, temp6;
297 rtx nlabel;
298 int this_is_simplejump, this_is_condjump, reversep = 0;
299 int this_is_condjump_in_parallel;
300
301 next = NEXT_INSN (insn);
302
303 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
304 jump. Try to optimize by duplicating the loop exit test if so.
305 This is only safe immediately after regscan, because it uses
306 the values of regno_first_uid and regno_last_uid. */
307 if (after_regscan && GET_CODE (insn) == NOTE
308 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
309 && (temp1 = next_nonnote_insn (insn)) != 0
310 && simplejump_p (temp1))
311 {
312 temp = PREV_INSN (insn);
313 if (duplicate_loop_exit_test (insn))
314 {
315 changed = 1;
316 next = NEXT_INSN (temp);
317 continue;
318 }
319 }
320
321 if (GET_CODE (insn) != JUMP_INSN)
322 continue;
323
324 this_is_simplejump = simplejump_p (insn);
325 this_is_condjump = condjump_p (insn);
326 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
327
328 /* Tension the labels in dispatch tables. */
329
330 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
331 changed |= tension_vector_labels (PATTERN (insn), 0);
332 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
333 changed |= tension_vector_labels (PATTERN (insn), 1);
334
335 /* See if this jump goes to another jump and redirect if so. */
336 nlabel = follow_jumps (JUMP_LABEL (insn));
337 if (nlabel != JUMP_LABEL (insn))
338 changed |= redirect_jump (insn, nlabel);
339
340 if (! optimize)
341 continue;
342
343 /* If a dispatch table always goes to the same place,
344 get rid of it and replace the insn that uses it. */
345
346 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
347 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
348 {
349 int i;
350 rtx pat = PATTERN (insn);
351 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
352 int len = XVECLEN (pat, diff_vec_p);
353 rtx dispatch = prev_real_insn (insn);
354 rtx set;
355
356 for (i = 0; i < len; i++)
357 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
358 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
359 break;
360
361 if (i == len
362 && dispatch != 0
363 && GET_CODE (dispatch) == JUMP_INSN
364 && JUMP_LABEL (dispatch) != 0
365 /* Don't mess with a casesi insn.
366 XXX according to the comment before computed_jump_p(),
367 all casesi insns should be a parallel of the jump
368 and a USE of a LABEL_REF. */
369 && ! ((set = single_set (dispatch)) != NULL
370 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
371 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
372 {
373 redirect_tablejump (dispatch,
374 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
375 changed = 1;
376 }
377 }
378
379 /* If a jump references the end of the function, try to turn
380 it into a RETURN insn, possibly a conditional one. */
381 if (JUMP_LABEL (insn) != 0
382 && (next_active_insn (JUMP_LABEL (insn)) == 0
383 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
384 == RETURN))
385 changed |= redirect_jump (insn, NULL_RTX);
386
387 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
388
389 /* Detect jump to following insn. */
390 if (reallabelprev == insn && this_is_condjump)
391 {
392 next = next_real_insn (JUMP_LABEL (insn));
393 delete_jump (insn);
394 changed = 1;
395 continue;
396 }
397
398 /* Detect a conditional jump going to the same place
399 as an immediately following unconditional jump. */
400 else if (this_is_condjump
401 && (temp = next_active_insn (insn)) != 0
402 && simplejump_p (temp)
403 && (next_active_insn (JUMP_LABEL (insn))
404 == next_active_insn (JUMP_LABEL (temp))))
405 {
406 /* Don't mess up test coverage analysis. */
407 temp2 = temp;
408 if (flag_test_coverage && !reload_completed)
409 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
410 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
411 break;
412
413 if (temp2 == temp)
414 {
415 delete_jump (insn);
416 changed = 1;
417 continue;
418 }
419 }
420
421 /* Detect a conditional jump jumping over an unconditional jump. */
422
423 else if ((this_is_condjump || this_is_condjump_in_parallel)
424 && ! this_is_simplejump
425 && reallabelprev != 0
426 && GET_CODE (reallabelprev) == JUMP_INSN
427 && prev_active_insn (reallabelprev) == insn
428 && no_labels_between_p (insn, reallabelprev)
429 && simplejump_p (reallabelprev))
430 {
431 /* When we invert the unconditional jump, we will be
432 decrementing the usage count of its old label.
433 Make sure that we don't delete it now because that
434 might cause the following code to be deleted. */
435 rtx prev_uses = prev_nonnote_insn (reallabelprev);
436 rtx prev_label = JUMP_LABEL (insn);
437
438 if (prev_label)
439 ++LABEL_NUSES (prev_label);
440
441 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
442 {
443 /* It is very likely that if there are USE insns before
444 this jump, they hold REG_DEAD notes. These REG_DEAD
445 notes are no longer valid due to this optimization,
446 and will cause the life-analysis that following passes
447 (notably delayed-branch scheduling) to think that
448 these registers are dead when they are not.
449
450 To prevent this trouble, we just remove the USE insns
451 from the insn chain. */
452
453 while (prev_uses && GET_CODE (prev_uses) == INSN
454 && GET_CODE (PATTERN (prev_uses)) == USE)
455 {
456 rtx useless = prev_uses;
457 prev_uses = prev_nonnote_insn (prev_uses);
458 delete_insn (useless);
459 }
460
461 delete_insn (reallabelprev);
462 changed = 1;
463 }
464
465 /* We can now safely delete the label if it is unreferenced
466 since the delete_insn above has deleted the BARRIER. */
467 if (prev_label && --LABEL_NUSES (prev_label) == 0)
468 delete_insn (prev_label);
469
470 next = NEXT_INSN (insn);
471 }
472
473 /* If we have an unconditional jump preceded by a USE, try to put
474 the USE before the target and jump there. This simplifies many
475 of the optimizations below since we don't have to worry about
476 dealing with these USE insns. We only do this if the label
477 being branch to already has the identical USE or if code
478 never falls through to that label. */
479
480 else if (this_is_simplejump
481 && (temp = prev_nonnote_insn (insn)) != 0
482 && GET_CODE (temp) == INSN
483 && GET_CODE (PATTERN (temp)) == USE
484 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
485 && (GET_CODE (temp1) == BARRIER
486 || (GET_CODE (temp1) == INSN
487 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
488 /* Don't do this optimization if we have a loop containing
489 only the USE instruction, and the loop start label has
490 a usage count of 1. This is because we will redo this
491 optimization everytime through the outer loop, and jump
492 opt will never exit. */
493 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
494 && temp2 == JUMP_LABEL (insn)
495 && LABEL_NUSES (temp2) == 1))
496 {
497 if (GET_CODE (temp1) == BARRIER)
498 {
499 emit_insn_after (PATTERN (temp), temp1);
500 temp1 = NEXT_INSN (temp1);
501 }
502
503 delete_insn (temp);
504 redirect_jump (insn, get_label_before (temp1));
505 reallabelprev = prev_real_insn (temp1);
506 changed = 1;
507 next = NEXT_INSN (insn);
508 }
509
510 /* Simplify if (...) x = a; else x = b; by converting it
511 to x = b; if (...) x = a;
512 if B is sufficiently simple, the test doesn't involve X,
513 and nothing in the test modifies B or X.
514
515 If we have small register classes, we also can't do this if X
516 is a hard register.
517
518 If the "x = b;" insn has any REG_NOTES, we don't do this because
519 of the possibility that we are running after CSE and there is a
520 REG_EQUAL note that is only valid if the branch has already been
521 taken. If we move the insn with the REG_EQUAL note, we may
522 fold the comparison to always be false in a later CSE pass.
523 (We could also delete the REG_NOTES when moving the insn, but it
524 seems simpler to not move it.) An exception is that we can move
525 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
526 value is the same as "b".
527
528 INSN is the branch over the `else' part.
529
530 We set:
531
532 TEMP to the jump insn preceding "x = a;"
533 TEMP1 to X
534 TEMP2 to the insn that sets "x = b;"
535 TEMP3 to the insn that sets "x = a;"
536 TEMP4 to the set of "x = b"; */
537
538 if (this_is_simplejump
539 && (temp3 = prev_active_insn (insn)) != 0
540 && GET_CODE (temp3) == INSN
541 && (temp4 = single_set (temp3)) != 0
542 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
543 && (! SMALL_REGISTER_CLASSES
544 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
545 && (temp2 = next_active_insn (insn)) != 0
546 && GET_CODE (temp2) == INSN
547 && (temp4 = single_set (temp2)) != 0
548 && rtx_equal_p (SET_DEST (temp4), temp1)
549 && ! side_effects_p (SET_SRC (temp4))
550 && ! may_trap_p (SET_SRC (temp4))
551 && (REG_NOTES (temp2) == 0
552 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
553 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
554 && XEXP (REG_NOTES (temp2), 1) == 0
555 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
556 SET_SRC (temp4))))
557 && (temp = prev_active_insn (temp3)) != 0
558 && condjump_p (temp) && ! simplejump_p (temp)
559 /* TEMP must skip over the "x = a;" insn */
560 && prev_real_insn (JUMP_LABEL (temp)) == insn
561 && no_labels_between_p (insn, JUMP_LABEL (temp))
562 /* There must be no other entries to the "x = b;" insn. */
563 && no_labels_between_p (JUMP_LABEL (temp), temp2)
564 /* INSN must either branch to the insn after TEMP2 or the insn
565 after TEMP2 must branch to the same place as INSN. */
566 && (reallabelprev == temp2
567 || ((temp5 = next_active_insn (temp2)) != 0
568 && simplejump_p (temp5)
569 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
570 {
571 /* The test expression, X, may be a complicated test with
572 multiple branches. See if we can find all the uses of
573 the label that TEMP branches to without hitting a CALL_INSN
574 or a jump to somewhere else. */
575 rtx target = JUMP_LABEL (temp);
576 int nuses = LABEL_NUSES (target);
577 rtx p;
578 #ifdef HAVE_cc0
579 rtx q;
580 #endif
581
582 /* Set P to the first jump insn that goes around "x = a;". */
583 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
584 {
585 if (GET_CODE (p) == JUMP_INSN)
586 {
587 if (condjump_p (p) && ! simplejump_p (p)
588 && JUMP_LABEL (p) == target)
589 {
590 nuses--;
591 if (nuses == 0)
592 break;
593 }
594 else
595 break;
596 }
597 else if (GET_CODE (p) == CALL_INSN)
598 break;
599 }
600
601 #ifdef HAVE_cc0
602 /* We cannot insert anything between a set of cc and its use
603 so if P uses cc0, we must back up to the previous insn. */
604 q = prev_nonnote_insn (p);
605 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
606 && sets_cc0_p (PATTERN (q)))
607 p = q;
608 #endif
609
610 if (p)
611 p = PREV_INSN (p);
612
613 /* If we found all the uses and there was no data conflict, we
614 can move the assignment unless we can branch into the middle
615 from somewhere. */
616 if (nuses == 0 && p
617 && no_labels_between_p (p, insn)
618 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
619 && ! reg_set_between_p (temp1, p, temp3)
620 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
621 || ! modified_between_p (SET_SRC (temp4), p, temp2))
622 /* Verify that registers used by the jump are not clobbered
623 by the instruction being moved. */
624 && ! regs_set_between_p (PATTERN (temp),
625 PREV_INSN (temp2),
626 NEXT_INSN (temp2)))
627 {
628 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
629 delete_insn (temp2);
630
631 /* Set NEXT to an insn that we know won't go away. */
632 next = next_active_insn (insn);
633
634 /* Delete the jump around the set. Note that we must do
635 this before we redirect the test jumps so that it won't
636 delete the code immediately following the assignment
637 we moved (which might be a jump). */
638
639 delete_insn (insn);
640
641 /* We either have two consecutive labels or a jump to
642 a jump, so adjust all the JUMP_INSNs to branch to where
643 INSN branches to. */
644 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
645 if (GET_CODE (p) == JUMP_INSN)
646 redirect_jump (p, target);
647
648 changed = 1;
649 next = NEXT_INSN (insn);
650 continue;
651 }
652 }
653
654 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
655 to x = a; if (...) goto l; x = b;
656 if A is sufficiently simple, the test doesn't involve X,
657 and nothing in the test modifies A or X.
658
659 If we have small register classes, we also can't do this if X
660 is a hard register.
661
662 If the "x = a;" insn has any REG_NOTES, we don't do this because
663 of the possibility that we are running after CSE and there is a
664 REG_EQUAL note that is only valid if the branch has already been
665 taken. If we move the insn with the REG_EQUAL note, we may
666 fold the comparison to always be false in a later CSE pass.
667 (We could also delete the REG_NOTES when moving the insn, but it
668 seems simpler to not move it.) An exception is that we can move
669 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
670 value is the same as "a".
671
672 INSN is the goto.
673
674 We set:
675
676 TEMP to the jump insn preceding "x = a;"
677 TEMP1 to X
678 TEMP2 to the insn that sets "x = b;"
679 TEMP3 to the insn that sets "x = a;"
680 TEMP4 to the set of "x = a"; */
681
682 if (this_is_simplejump
683 && (temp2 = next_active_insn (insn)) != 0
684 && GET_CODE (temp2) == INSN
685 && (temp4 = single_set (temp2)) != 0
686 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
687 && (! SMALL_REGISTER_CLASSES
688 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
689 && (temp3 = prev_active_insn (insn)) != 0
690 && GET_CODE (temp3) == INSN
691 && (temp4 = single_set (temp3)) != 0
692 && rtx_equal_p (SET_DEST (temp4), temp1)
693 && ! side_effects_p (SET_SRC (temp4))
694 && ! may_trap_p (SET_SRC (temp4))
695 && (REG_NOTES (temp3) == 0
696 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
697 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
698 && XEXP (REG_NOTES (temp3), 1) == 0
699 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
700 SET_SRC (temp4))))
701 && (temp = prev_active_insn (temp3)) != 0
702 && condjump_p (temp) && ! simplejump_p (temp)
703 /* TEMP must skip over the "x = a;" insn */
704 && prev_real_insn (JUMP_LABEL (temp)) == insn
705 && no_labels_between_p (temp, insn))
706 {
707 rtx prev_label = JUMP_LABEL (temp);
708 rtx insert_after = prev_nonnote_insn (temp);
709
710 #ifdef HAVE_cc0
711 /* We cannot insert anything between a set of cc and its use. */
712 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
713 && sets_cc0_p (PATTERN (insert_after)))
714 insert_after = prev_nonnote_insn (insert_after);
715 #endif
716 ++LABEL_NUSES (prev_label);
717
718 if (insert_after
719 && no_labels_between_p (insert_after, temp)
720 && ! reg_referenced_between_p (temp1, insert_after, temp3)
721 && ! reg_referenced_between_p (temp1, temp3,
722 NEXT_INSN (temp2))
723 && ! reg_set_between_p (temp1, insert_after, temp)
724 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
725 /* Verify that registers used by the jump are not clobbered
726 by the instruction being moved. */
727 && ! regs_set_between_p (PATTERN (temp),
728 PREV_INSN (temp3),
729 NEXT_INSN (temp3))
730 && invert_jump (temp, JUMP_LABEL (insn)))
731 {
732 emit_insn_after_with_line_notes (PATTERN (temp3),
733 insert_after, temp3);
734 delete_insn (temp3);
735 delete_insn (insn);
736 /* Set NEXT to an insn that we know won't go away. */
737 next = temp2;
738 changed = 1;
739 }
740 if (prev_label && --LABEL_NUSES (prev_label) == 0)
741 delete_insn (prev_label);
742 if (changed)
743 continue;
744 }
745
746 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
747
748 /* If we have if (...) x = exp; and branches are expensive,
749 EXP is a single insn, does not have any side effects, cannot
750 trap, and is not too costly, convert this to
751 t = exp; if (...) x = t;
752
753 Don't do this when we have CC0 because it is unlikely to help
754 and we'd need to worry about where to place the new insn and
755 the potential for conflicts. We also can't do this when we have
756 notes on the insn for the same reason as above.
757
758 If we have conditional arithmetic, this will make this
759 harder to optimize later and isn't needed, so don't do it
760 in that case either.
761
762 We set:
763
764 TEMP to the "x = exp;" insn.
765 TEMP1 to the single set in the "x = exp;" insn.
766 TEMP2 to "x". */
767
768 if (! reload_completed
769 && this_is_condjump && ! this_is_simplejump
770 && BRANCH_COST >= 3
771 && (temp = next_nonnote_insn (insn)) != 0
772 && GET_CODE (temp) == INSN
773 && REG_NOTES (temp) == 0
774 && (reallabelprev == temp
775 || ((temp2 = next_active_insn (temp)) != 0
776 && simplejump_p (temp2)
777 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
778 && (temp1 = single_set (temp)) != 0
779 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
780 && (! SMALL_REGISTER_CLASSES
781 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
782 && GET_CODE (SET_SRC (temp1)) != REG
783 && GET_CODE (SET_SRC (temp1)) != SUBREG
784 && GET_CODE (SET_SRC (temp1)) != CONST_INT
785 && ! side_effects_p (SET_SRC (temp1))
786 && ! may_trap_p (SET_SRC (temp1))
787 && rtx_cost (SET_SRC (temp1), SET) < 10)
788 {
789 rtx new = gen_reg_rtx (GET_MODE (temp2));
790
791 if ((temp3 = find_insert_position (insn, temp))
792 && validate_change (temp, &SET_DEST (temp1), new, 0))
793 {
794 next = emit_insn_after (gen_move_insn (temp2, new), insn);
795 emit_insn_after_with_line_notes (PATTERN (temp),
796 PREV_INSN (temp3), temp);
797 delete_insn (temp);
798 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
799
800 if (after_regscan)
801 {
802 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
803 old_max_reg = max_reg_num ();
804 }
805 }
806 }
807
808 /* Similarly, if it takes two insns to compute EXP but they
809 have the same destination. Here TEMP3 will be the second
810 insn and TEMP4 the SET from that insn. */
811
812 if (! reload_completed
813 && this_is_condjump && ! this_is_simplejump
814 && BRANCH_COST >= 4
815 && (temp = next_nonnote_insn (insn)) != 0
816 && GET_CODE (temp) == INSN
817 && REG_NOTES (temp) == 0
818 && (temp3 = next_nonnote_insn (temp)) != 0
819 && GET_CODE (temp3) == INSN
820 && REG_NOTES (temp3) == 0
821 && (reallabelprev == temp3
822 || ((temp2 = next_active_insn (temp3)) != 0
823 && simplejump_p (temp2)
824 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
825 && (temp1 = single_set (temp)) != 0
826 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
827 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
828 && (! SMALL_REGISTER_CLASSES
829 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
830 && ! side_effects_p (SET_SRC (temp1))
831 && ! may_trap_p (SET_SRC (temp1))
832 && rtx_cost (SET_SRC (temp1), SET) < 10
833 && (temp4 = single_set (temp3)) != 0
834 && rtx_equal_p (SET_DEST (temp4), temp2)
835 && ! side_effects_p (SET_SRC (temp4))
836 && ! may_trap_p (SET_SRC (temp4))
837 && rtx_cost (SET_SRC (temp4), SET) < 10)
838 {
839 rtx new = gen_reg_rtx (GET_MODE (temp2));
840
841 if ((temp5 = find_insert_position (insn, temp))
842 && (temp6 = find_insert_position (insn, temp3))
843 && validate_change (temp, &SET_DEST (temp1), new, 0))
844 {
845 /* Use the earliest of temp5 and temp6. */
846 if (temp5 != insn)
847 temp6 = temp5;
848 next = emit_insn_after (gen_move_insn (temp2, new), insn);
849 emit_insn_after_with_line_notes (PATTERN (temp),
850 PREV_INSN (temp6), temp);
851 emit_insn_after_with_line_notes
852 (replace_rtx (PATTERN (temp3), temp2, new),
853 PREV_INSN (temp6), temp3);
854 delete_insn (temp);
855 delete_insn (temp3);
856 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
857
858 if (after_regscan)
859 {
860 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
861 old_max_reg = max_reg_num ();
862 }
863 }
864 }
865
866 /* Finally, handle the case where two insns are used to
867 compute EXP but a temporary register is used. Here we must
868 ensure that the temporary register is not used anywhere else. */
869
870 if (! reload_completed
871 && after_regscan
872 && this_is_condjump && ! this_is_simplejump
873 && BRANCH_COST >= 4
874 && (temp = next_nonnote_insn (insn)) != 0
875 && GET_CODE (temp) == INSN
876 && REG_NOTES (temp) == 0
877 && (temp3 = next_nonnote_insn (temp)) != 0
878 && GET_CODE (temp3) == INSN
879 && REG_NOTES (temp3) == 0
880 && (reallabelprev == temp3
881 || ((temp2 = next_active_insn (temp3)) != 0
882 && simplejump_p (temp2)
883 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
884 && (temp1 = single_set (temp)) != 0
885 && (temp5 = SET_DEST (temp1),
886 (GET_CODE (temp5) == REG
887 || (GET_CODE (temp5) == SUBREG
888 && (temp5 = SUBREG_REG (temp5),
889 GET_CODE (temp5) == REG))))
890 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
891 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
892 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
893 && ! side_effects_p (SET_SRC (temp1))
894 && ! may_trap_p (SET_SRC (temp1))
895 && rtx_cost (SET_SRC (temp1), SET) < 10
896 && (temp4 = single_set (temp3)) != 0
897 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
898 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
899 && (! SMALL_REGISTER_CLASSES
900 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
901 && rtx_equal_p (SET_DEST (temp4), temp2)
902 && ! side_effects_p (SET_SRC (temp4))
903 && ! may_trap_p (SET_SRC (temp4))
904 && rtx_cost (SET_SRC (temp4), SET) < 10)
905 {
906 rtx new = gen_reg_rtx (GET_MODE (temp2));
907
908 if ((temp5 = find_insert_position (insn, temp))
909 && (temp6 = find_insert_position (insn, temp3))
910 && validate_change (temp3, &SET_DEST (temp4), new, 0))
911 {
912 /* Use the earliest of temp5 and temp6. */
913 if (temp5 != insn)
914 temp6 = temp5;
915 next = emit_insn_after (gen_move_insn (temp2, new), insn);
916 emit_insn_after_with_line_notes (PATTERN (temp),
917 PREV_INSN (temp6), temp);
918 emit_insn_after_with_line_notes (PATTERN (temp3),
919 PREV_INSN (temp6), temp3);
920 delete_insn (temp);
921 delete_insn (temp3);
922 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
923
924 if (after_regscan)
925 {
926 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
927 old_max_reg = max_reg_num ();
928 }
929 }
930 }
931 #endif /* HAVE_cc0 */
932
933 #ifdef HAVE_conditional_arithmetic
934 /* ??? This is disabled in genconfig, as this simple-minded
935 transformation can incredibly lengthen register lifetimes.
936
937 Consider this example from cexp.c's yyparse:
938
939 234 (set (pc)
940 (if_then_else (ne (reg:DI 149) (const_int 0 [0x0]))
941 (label_ref 248) (pc)))
942 237 (set (reg/i:DI 0 $0) (const_int 1 [0x1]))
943 239 (set (pc) (label_ref 2382))
944 248 (code_label ("yybackup"))
945
946 This will be transformed to:
947
948 237 (set (reg/i:DI 0 $0)
949 (if_then_else:DI (eq (reg:DI 149) (const_int 0 [0x0]))
950 (const_int 1 [0x1]) (reg/i:DI 0 $0)))
951 239 (set (pc)
952 (if_then_else (eq (reg:DI 149) (const_int 0 [0x0]))
953 (label_ref 2382) (pc)))
954
955 which, from this narrow viewpoint looks fine. Except that
956 between this and 3 other ocurrences of the same pattern, $0
957 is now live for basically the entire function, and we'll
958 get an abort in caller_save.
959
960 Any replacement for this code should recall that a set of
961 a register that is not live need not, and indeed should not,
962 be conditionalized. Either that, or delay the transformation
963 until after register allocation. */
964
965 /* See if this is a conditional jump around a small number of
966 instructions that we can conditionalize. Don't do this before
967 the initial CSE pass or after reload.
968
969 We reject any insns that have side effects or may trap.
970 Strictly speaking, this is not needed since the machine may
971 support conditionalizing these too, but we won't deal with that
972 now. Specifically, this means that we can't conditionalize a
973 CALL_INSN, which some machines, such as the ARC, can do, but
974 this is a very minor optimization. */
975 if (this_is_condjump && ! this_is_simplejump
976 && cse_not_expected && ! reload_completed
977 && BRANCH_COST > 2
978 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (insn)), 0),
979 insn))
980 {
981 rtx ourcond = XEXP (SET_SRC (PATTERN (insn)), 0);
982 int num_insns = 0;
983 char *storage = (char *) oballoc (0);
984 int last_insn = 0, failed = 0;
985 rtx changed_jump = 0;
986
987 ourcond = gen_rtx (reverse_condition (GET_CODE (ourcond)),
988 VOIDmode, XEXP (ourcond, 0),
989 XEXP (ourcond, 1));
990
991 /* Scan forward BRANCH_COST real insns looking for the JUMP_LABEL
992 of this insn. We see if we think we can conditionalize the
993 insns we pass. For now, we only deal with insns that have
994 one SET. We stop after an insn that modifies anything in
995 OURCOND, if we have too many insns, or if we have an insn
996 with a side effect or that may trip. Note that we will
997 be modifying any unconditional jumps we encounter to be
998 conditional; this will have the effect of also doing this
999 optimization on the "else" the next time around. */
1000 for (temp1 = NEXT_INSN (insn);
1001 num_insns <= BRANCH_COST && ! failed && temp1 != 0
1002 && GET_CODE (temp1) != CODE_LABEL;
1003 temp1 = NEXT_INSN (temp1))
1004 {
1005 /* Ignore everything but an active insn. */
1006 if (GET_RTX_CLASS (GET_CODE (temp1)) != 'i'
1007 || GET_CODE (PATTERN (temp1)) == USE
1008 || GET_CODE (PATTERN (temp1)) == CLOBBER)
1009 continue;
1010
1011 /* If this was an unconditional jump, record it since we'll
1012 need to remove the BARRIER if we succeed. We can only
1013 have one such jump since there must be a label after
1014 the BARRIER and it's either ours, in which case it's the
1015 only one or some other, in which case we'd fail.
1016 Likewise if it's a CALL_INSN followed by a BARRIER. */
1017
1018 if (simplejump_p (temp1)
1019 || (GET_CODE (temp1) == CALL_INSN
1020 && NEXT_INSN (temp1) != 0
1021 && GET_CODE (NEXT_INSN (temp1)) == BARRIER))
1022 {
1023 if (changed_jump == 0)
1024 changed_jump = temp1;
1025 else
1026 changed_jump
1027 = gen_rtx_INSN_LIST (VOIDmode, temp1, changed_jump);
1028 }
1029
1030 /* See if we are allowed another insn and if this insn
1031 if one we think we may be able to handle. */
1032 if (++num_insns > BRANCH_COST
1033 || last_insn
1034 || (((temp2 = single_set (temp1)) == 0
1035 || side_effects_p (SET_SRC (temp2))
1036 || may_trap_p (SET_SRC (temp2)))
1037 && GET_CODE (temp1) != CALL_INSN))
1038 failed = 1;
1039 else if (temp2 != 0)
1040 validate_change (temp1, &SET_SRC (temp2),
1041 gen_rtx_IF_THEN_ELSE
1042 (GET_MODE (SET_DEST (temp2)),
1043 copy_rtx (ourcond),
1044 SET_SRC (temp2), SET_DEST (temp2)),
1045 1);
1046 else
1047 {
1048 /* This is a CALL_INSN that doesn't have a SET. */
1049 rtx *call_loc = &PATTERN (temp1);
1050
1051 if (GET_CODE (*call_loc) == PARALLEL)
1052 call_loc = &XVECEXP (*call_loc, 0, 0);
1053
1054 validate_change (temp1, call_loc,
1055 gen_rtx_IF_THEN_ELSE
1056 (VOIDmode, copy_rtx (ourcond),
1057 *call_loc, const0_rtx),
1058 1);
1059 }
1060
1061
1062 if (modified_in_p (ourcond, temp1))
1063 last_insn = 1;
1064 }
1065
1066 /* If we've reached our jump label, haven't failed, and all
1067 the changes above are valid, we can delete this jump
1068 insn. Also remove a BARRIER after any jump that used
1069 to be unconditional and remove any REG_EQUAL or REG_EQUIV
1070 that might have previously been present on insns we
1071 made conditional. */
1072 if (temp1 == JUMP_LABEL (insn) && ! failed
1073 && apply_change_group ())
1074 {
1075 for (temp1 = NEXT_INSN (insn); temp1 != JUMP_LABEL (insn);
1076 temp1 = NEXT_INSN (temp1))
1077 if (GET_RTX_CLASS (GET_CODE (temp1)) == 'i')
1078 for (temp2 = REG_NOTES (temp1); temp2 != 0;
1079 temp2 = XEXP (temp2, 1))
1080 if (REG_NOTE_KIND (temp2) == REG_EQUAL
1081 || REG_NOTE_KIND (temp2) == REG_EQUIV)
1082 remove_note (temp1, temp2);
1083
1084 if (changed_jump != 0)
1085 {
1086 while (GET_CODE (changed_jump) == INSN_LIST)
1087 {
1088 delete_barrier (NEXT_INSN (XEXP (changed_jump, 0)));
1089 changed_jump = XEXP (changed_jump, 1);
1090 }
1091
1092 delete_barrier (NEXT_INSN (changed_jump));
1093 }
1094
1095 delete_insn (insn);
1096 changed = 1;
1097 continue;
1098 }
1099 else
1100 {
1101 cancel_changes (0);
1102 obfree (storage);
1103 }
1104 }
1105 #endif
1106 /* If branches are expensive, convert
1107 if (foo) bar++; to bar += (foo != 0);
1108 and similarly for "bar--;"
1109
1110 INSN is the conditional branch around the arithmetic. We set:
1111
1112 TEMP is the arithmetic insn.
1113 TEMP1 is the SET doing the arithmetic.
1114 TEMP2 is the operand being incremented or decremented.
1115 TEMP3 to the condition being tested.
1116 TEMP4 to the earliest insn used to find the condition. */
1117
1118 if ((BRANCH_COST >= 2
1119 #ifdef HAVE_incscc
1120 || HAVE_incscc
1121 #endif
1122 #ifdef HAVE_decscc
1123 || HAVE_decscc
1124 #endif
1125 )
1126 && ! reload_completed
1127 && this_is_condjump && ! this_is_simplejump
1128 && (temp = next_nonnote_insn (insn)) != 0
1129 && (temp1 = single_set (temp)) != 0
1130 && (temp2 = SET_DEST (temp1),
1131 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1132 && GET_CODE (SET_SRC (temp1)) == PLUS
1133 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1134 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1135 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1136 && ! side_effects_p (temp2)
1137 && ! may_trap_p (temp2)
1138 /* INSN must either branch to the insn after TEMP or the insn
1139 after TEMP must branch to the same place as INSN. */
1140 && (reallabelprev == temp
1141 || ((temp3 = next_active_insn (temp)) != 0
1142 && simplejump_p (temp3)
1143 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1144 && (temp3 = get_condition (insn, &temp4)) != 0
1145 /* We must be comparing objects whose modes imply the size.
1146 We could handle BLKmode if (1) emit_store_flag could
1147 and (2) we could find the size reliably. */
1148 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1149 && can_reverse_comparison_p (temp3, insn))
1150 {
1151 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1152 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1153
1154 start_sequence ();
1155
1156 /* It must be the case that TEMP2 is not modified in the range
1157 [TEMP4, INSN). The one exception we make is if the insn
1158 before INSN sets TEMP2 to something which is also unchanged
1159 in that range. In that case, we can move the initialization
1160 into our sequence. */
1161
1162 if ((temp5 = prev_active_insn (insn)) != 0
1163 && no_labels_between_p (temp5, insn)
1164 && GET_CODE (temp5) == INSN
1165 && (temp6 = single_set (temp5)) != 0
1166 && rtx_equal_p (temp2, SET_DEST (temp6))
1167 && (CONSTANT_P (SET_SRC (temp6))
1168 || GET_CODE (SET_SRC (temp6)) == REG
1169 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1170 {
1171 emit_insn (PATTERN (temp5));
1172 init_insn = temp5;
1173 init = SET_SRC (temp6);
1174 }
1175
1176 if (CONSTANT_P (init)
1177 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1178 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1179 XEXP (temp3, 0), XEXP (temp3, 1),
1180 VOIDmode,
1181 (code == LTU || code == LEU
1182 || code == GTU || code == GEU), 1);
1183
1184 /* If we can do the store-flag, do the addition or
1185 subtraction. */
1186
1187 if (target)
1188 target = expand_binop (GET_MODE (temp2),
1189 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1190 ? add_optab : sub_optab),
1191 temp2, target, temp2, 0, OPTAB_WIDEN);
1192
1193 if (target != 0)
1194 {
1195 /* Put the result back in temp2 in case it isn't already.
1196 Then replace the jump, possible a CC0-setting insn in
1197 front of the jump, and TEMP, with the sequence we have
1198 made. */
1199
1200 if (target != temp2)
1201 emit_move_insn (temp2, target);
1202
1203 seq = get_insns ();
1204 end_sequence ();
1205
1206 emit_insns_before (seq, temp4);
1207 delete_insn (temp);
1208
1209 if (init_insn)
1210 delete_insn (init_insn);
1211
1212 next = NEXT_INSN (insn);
1213 #ifdef HAVE_cc0
1214 delete_insn (prev_nonnote_insn (insn));
1215 #endif
1216 delete_insn (insn);
1217
1218 if (after_regscan)
1219 {
1220 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1221 old_max_reg = max_reg_num ();
1222 }
1223
1224 changed = 1;
1225 continue;
1226 }
1227 else
1228 end_sequence ();
1229 }
1230
1231 /* Try to use a conditional move (if the target has them), or a
1232 store-flag insn. If the target has conditional arithmetic as
1233 well as conditional move, the above code will have done something.
1234 Note that we prefer the above code since it is more general: the
1235 code below can make changes that require work to undo.
1236
1237 The general case here is:
1238
1239 1) x = a; if (...) x = b; and
1240 2) if (...) x = b;
1241
1242 If the jump would be faster, the machine should not have defined
1243 the movcc or scc insns!. These cases are often made by the
1244 previous optimization.
1245
1246 The second case is treated as x = x; if (...) x = b;.
1247
1248 INSN here is the jump around the store. We set:
1249
1250 TEMP to the "x op= b;" insn.
1251 TEMP1 to X.
1252 TEMP2 to B.
1253 TEMP3 to A (X in the second case).
1254 TEMP4 to the condition being tested.
1255 TEMP5 to the earliest insn used to find the condition.
1256 TEMP6 to the SET of TEMP. */
1257
1258 if (/* We can't do this after reload has completed. */
1259 ! reload_completed
1260 #ifdef HAVE_conditional_arithmetic
1261 /* Defer this until after CSE so the above code gets the
1262 first crack at it. */
1263 && cse_not_expected
1264 #endif
1265 && this_is_condjump && ! this_is_simplejump
1266 /* Set TEMP to the "x = b;" insn. */
1267 && (temp = next_nonnote_insn (insn)) != 0
1268 && GET_CODE (temp) == INSN
1269 && (temp6 = single_set (temp)) != NULL_RTX
1270 && GET_CODE (temp1 = SET_DEST (temp6)) == REG
1271 && (! SMALL_REGISTER_CLASSES
1272 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
1273 && ! side_effects_p (temp2 = SET_SRC (temp6))
1274 && ! may_trap_p (temp2)
1275 /* Allow either form, but prefer the former if both apply.
1276 There is no point in using the old value of TEMP1 if
1277 it is a register, since cse will alias them. It can
1278 lose if the old value were a hard register since CSE
1279 won't replace hard registers. Avoid using TEMP3 if
1280 small register classes and it is a hard register. */
1281 && (((temp3 = reg_set_last (temp1, insn)) != 0
1282 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
1283 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
1284 /* Make the latter case look like x = x; if (...) x = b; */
1285 || (temp3 = temp1, 1))
1286 /* INSN must either branch to the insn after TEMP or the insn
1287 after TEMP must branch to the same place as INSN. */
1288 && (reallabelprev == temp
1289 || ((temp4 = next_active_insn (temp)) != 0
1290 && simplejump_p (temp4)
1291 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1292 && (temp4 = get_condition (insn, &temp5)) != 0
1293 /* We must be comparing objects whose modes imply the size.
1294 We could handle BLKmode if (1) emit_store_flag could
1295 and (2) we could find the size reliably. */
1296 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1297 /* Even if branches are cheap, the store_flag optimization
1298 can win when the operation to be performed can be
1299 expressed directly. */
1300 #ifdef HAVE_cc0
1301 /* If the previous insn sets CC0 and something else, we can't
1302 do this since we are going to delete that insn. */
1303
1304 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1305 && GET_CODE (temp6) == INSN
1306 && (sets_cc0_p (PATTERN (temp6)) == -1
1307 || (sets_cc0_p (PATTERN (temp6)) == 1
1308 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1309 #endif
1310 )
1311 {
1312 #ifdef HAVE_conditional_move
1313 /* First try a conditional move. */
1314 {
1315 enum rtx_code code = GET_CODE (temp4);
1316 rtx var = temp1;
1317 rtx cond0, cond1, aval, bval;
1318 rtx target, new_insn;
1319
1320 /* Copy the compared variables into cond0 and cond1, so that
1321 any side effects performed in or after the old comparison,
1322 will not affect our compare which will come later. */
1323 /* ??? Is it possible to just use the comparison in the jump
1324 insn? After all, we're going to delete it. We'd have
1325 to modify emit_conditional_move to take a comparison rtx
1326 instead or write a new function. */
1327 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1328 /* We want the target to be able to simplify comparisons with
1329 zero (and maybe other constants as well), so don't create
1330 pseudos for them. There's no need to either. */
1331 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1332 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1333 cond1 = XEXP (temp4, 1);
1334 else
1335 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1336
1337 /* Careful about copying these values -- an IOR or what may
1338 need to do other things, like clobber flags. */
1339 /* ??? Assume for the moment that AVAL is ok. */
1340 aval = temp3;
1341
1342 start_sequence ();
1343
1344 /* We're dealing with a single_set insn with no side effects
1345 on SET_SRC. We do need to be reasonably certain that if
1346 we need to force BVAL into a register that we won't
1347 clobber the flags -- general_operand should suffice. */
1348 if (general_operand (temp2, GET_MODE (var)))
1349 bval = temp2;
1350 else
1351 {
1352 bval = gen_reg_rtx (GET_MODE (var));
1353 new_insn = copy_rtx (temp);
1354 temp6 = single_set (new_insn);
1355 SET_DEST (temp6) = bval;
1356 emit_insn (PATTERN (new_insn));
1357 }
1358
1359 target = emit_conditional_move (var, code,
1360 cond0, cond1, VOIDmode,
1361 aval, bval, GET_MODE (var),
1362 (code == LTU || code == GEU
1363 || code == LEU || code == GTU));
1364
1365 if (target)
1366 {
1367 rtx seq1, seq2, last;
1368 int copy_ok;
1369
1370 /* Save the conditional move sequence but don't emit it
1371 yet. On some machines, like the alpha, it is possible
1372 that temp5 == insn, so next generate the sequence that
1373 saves the compared values and then emit both
1374 sequences ensuring seq1 occurs before seq2. */
1375 seq2 = get_insns ();
1376 end_sequence ();
1377
1378 /* "Now that we can't fail..." Famous last words.
1379 Generate the copy insns that preserve the compared
1380 values. */
1381 start_sequence ();
1382 emit_move_insn (cond0, XEXP (temp4, 0));
1383 if (cond1 != XEXP (temp4, 1))
1384 emit_move_insn (cond1, XEXP (temp4, 1));
1385 seq1 = get_insns ();
1386 end_sequence ();
1387
1388 /* Validate the sequence -- this may be some weird
1389 bit-extract-and-test instruction for which there
1390 exists no complimentary bit-extract insn. */
1391 copy_ok = 1;
1392 for (last = seq1; last ; last = NEXT_INSN (last))
1393 if (recog_memoized (last) < 0)
1394 {
1395 copy_ok = 0;
1396 break;
1397 }
1398
1399 if (copy_ok)
1400 {
1401 emit_insns_before (seq1, temp5);
1402
1403 /* Insert conditional move after insn, to be sure
1404 that the jump and a possible compare won't be
1405 separated. */
1406 last = emit_insns_after (seq2, insn);
1407
1408 /* ??? We can also delete the insn that sets X to A.
1409 Flow will do it too though. */
1410 delete_insn (temp);
1411 next = NEXT_INSN (insn);
1412 delete_jump (insn);
1413
1414 if (after_regscan)
1415 {
1416 reg_scan_update (seq1, NEXT_INSN (last),
1417 old_max_reg);
1418 old_max_reg = max_reg_num ();
1419 }
1420
1421 changed = 1;
1422 continue;
1423 }
1424 }
1425 else
1426 end_sequence ();
1427 }
1428 #endif
1429
1430 /* That didn't work, try a store-flag insn.
1431
1432 We further divide the cases into:
1433
1434 1) x = a; if (...) x = b; and either A or B is zero,
1435 2) if (...) x = 0; and jumps are expensive,
1436 3) x = a; if (...) x = b; and A and B are constants where all
1437 the set bits in A are also set in B and jumps are expensive,
1438 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1439 more expensive, and
1440 5) if (...) x = b; if jumps are even more expensive. */
1441
1442 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1443 /* We will be passing this as operand into expand_and. No
1444 good if it's not valid as an operand. */
1445 && general_operand (temp2, GET_MODE (temp2))
1446 && ((GET_CODE (temp3) == CONST_INT)
1447 /* Make the latter case look like
1448 x = x; if (...) x = 0; */
1449 || (temp3 = temp1,
1450 ((BRANCH_COST >= 2
1451 && temp2 == const0_rtx)
1452 || BRANCH_COST >= 3)))
1453 /* If B is zero, OK; if A is zero, can only do (1) if we
1454 can reverse the condition. See if (3) applies possibly
1455 by reversing the condition. Prefer reversing to (4) when
1456 branches are very expensive. */
1457 && (((BRANCH_COST >= 2
1458 || STORE_FLAG_VALUE == -1
1459 || (STORE_FLAG_VALUE == 1
1460 /* Check that the mask is a power of two,
1461 so that it can probably be generated
1462 with a shift. */
1463 && GET_CODE (temp3) == CONST_INT
1464 && exact_log2 (INTVAL (temp3)) >= 0))
1465 && (reversep = 0, temp2 == const0_rtx))
1466 || ((BRANCH_COST >= 2
1467 || STORE_FLAG_VALUE == -1
1468 || (STORE_FLAG_VALUE == 1
1469 && GET_CODE (temp2) == CONST_INT
1470 && exact_log2 (INTVAL (temp2)) >= 0))
1471 && temp3 == const0_rtx
1472 && (reversep = can_reverse_comparison_p (temp4, insn)))
1473 || (BRANCH_COST >= 2
1474 && GET_CODE (temp2) == CONST_INT
1475 && GET_CODE (temp3) == CONST_INT
1476 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1477 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1478 && (reversep = can_reverse_comparison_p (temp4,
1479 insn)))))
1480 || BRANCH_COST >= 3)
1481 )
1482 {
1483 enum rtx_code code = GET_CODE (temp4);
1484 rtx uval, cval, var = temp1;
1485 int normalizep;
1486 rtx target;
1487
1488 /* If necessary, reverse the condition. */
1489 if (reversep)
1490 code = reverse_condition (code), uval = temp2, cval = temp3;
1491 else
1492 uval = temp3, cval = temp2;
1493
1494 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1495 is the constant 1, it is best to just compute the result
1496 directly. If UVAL is constant and STORE_FLAG_VALUE
1497 includes all of its bits, it is best to compute the flag
1498 value unnormalized and `and' it with UVAL. Otherwise,
1499 normalize to -1 and `and' with UVAL. */
1500 normalizep = (cval != const0_rtx ? -1
1501 : (uval == const1_rtx ? 1
1502 : (GET_CODE (uval) == CONST_INT
1503 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1504 ? 0 : -1));
1505
1506 /* We will be putting the store-flag insn immediately in
1507 front of the comparison that was originally being done,
1508 so we know all the variables in TEMP4 will be valid.
1509 However, this might be in front of the assignment of
1510 A to VAR. If it is, it would clobber the store-flag
1511 we will be emitting.
1512
1513 Therefore, emit into a temporary which will be copied to
1514 VAR immediately after TEMP. */
1515
1516 start_sequence ();
1517 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1518 XEXP (temp4, 0), XEXP (temp4, 1),
1519 VOIDmode,
1520 (code == LTU || code == LEU
1521 || code == GEU || code == GTU),
1522 normalizep);
1523 if (target)
1524 {
1525 rtx seq;
1526 rtx before = insn;
1527
1528 seq = get_insns ();
1529 end_sequence ();
1530
1531 /* Put the store-flag insns in front of the first insn
1532 used to compute the condition to ensure that we
1533 use the same values of them as the current
1534 comparison. However, the remainder of the insns we
1535 generate will be placed directly in front of the
1536 jump insn, in case any of the pseudos we use
1537 are modified earlier. */
1538
1539 emit_insns_before (seq, temp5);
1540
1541 start_sequence ();
1542
1543 /* Both CVAL and UVAL are non-zero. */
1544 if (cval != const0_rtx && uval != const0_rtx)
1545 {
1546 rtx tem1, tem2;
1547
1548 tem1 = expand_and (uval, target, NULL_RTX);
1549 if (GET_CODE (cval) == CONST_INT
1550 && GET_CODE (uval) == CONST_INT
1551 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1552 tem2 = cval;
1553 else
1554 {
1555 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1556 target, NULL_RTX, 0);
1557 tem2 = expand_and (cval, tem2,
1558 (GET_CODE (tem2) == REG
1559 ? tem2 : 0));
1560 }
1561
1562 /* If we usually make new pseudos, do so here. This
1563 turns out to help machines that have conditional
1564 move insns. */
1565 /* ??? Conditional moves have already been handled.
1566 This may be obsolete. */
1567
1568 if (flag_expensive_optimizations)
1569 target = 0;
1570
1571 target = expand_binop (GET_MODE (var), ior_optab,
1572 tem1, tem2, target,
1573 1, OPTAB_WIDEN);
1574 }
1575 else if (normalizep != 1)
1576 {
1577 /* We know that either CVAL or UVAL is zero. If
1578 UVAL is zero, negate TARGET and `and' with CVAL.
1579 Otherwise, `and' with UVAL. */
1580 if (uval == const0_rtx)
1581 {
1582 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1583 target, NULL_RTX, 0);
1584 uval = cval;
1585 }
1586
1587 target = expand_and (uval, target,
1588 (GET_CODE (target) == REG
1589 && ! preserve_subexpressions_p ()
1590 ? target : NULL_RTX));
1591 }
1592
1593 emit_move_insn (var, target);
1594 seq = get_insns ();
1595 end_sequence ();
1596 #ifdef HAVE_cc0
1597 /* If INSN uses CC0, we must not separate it from the
1598 insn that sets cc0. */
1599 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1600 before = prev_nonnote_insn (before);
1601 #endif
1602 emit_insns_before (seq, before);
1603
1604 delete_insn (temp);
1605 next = NEXT_INSN (insn);
1606 delete_jump (insn);
1607
1608 if (after_regscan)
1609 {
1610 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1611 old_max_reg = max_reg_num ();
1612 }
1613
1614 changed = 1;
1615 continue;
1616 }
1617 else
1618 end_sequence ();
1619 }
1620 }
1621
1622
1623 /* Simplify if (...) x = 1; else {...} if (x) ...
1624 We recognize this case scanning backwards as well.
1625
1626 TEMP is the assignment to x;
1627 TEMP1 is the label at the head of the second if. */
1628 /* ?? This should call get_condition to find the values being
1629 compared, instead of looking for a COMPARE insn when HAVE_cc0
1630 is not defined. This would allow it to work on the m88k. */
1631 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1632 is not defined and the condition is tested by a separate compare
1633 insn. This is because the code below assumes that the result
1634 of the compare dies in the following branch.
1635
1636 Not only that, but there might be other insns between the
1637 compare and branch whose results are live. Those insns need
1638 to be executed.
1639
1640 A way to fix this is to move the insns at JUMP_LABEL (insn)
1641 to before INSN. If we are running before flow, they will
1642 be deleted if they aren't needed. But this doesn't work
1643 well after flow.
1644
1645 This is really a special-case of jump threading, anyway. The
1646 right thing to do is to replace this and jump threading with
1647 much simpler code in cse.
1648
1649 This code has been turned off in the non-cc0 case in the
1650 meantime. */
1651
1652 #ifdef HAVE_cc0
1653 else if (this_is_simplejump
1654 /* Safe to skip USE and CLOBBER insns here
1655 since they will not be deleted. */
1656 && (temp = prev_active_insn (insn))
1657 && no_labels_between_p (temp, insn)
1658 && GET_CODE (temp) == INSN
1659 && GET_CODE (PATTERN (temp)) == SET
1660 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1661 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1662 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1663 /* If we find that the next value tested is `x'
1664 (TEMP1 is the insn where this happens), win. */
1665 && GET_CODE (temp1) == INSN
1666 && GET_CODE (PATTERN (temp1)) == SET
1667 #ifdef HAVE_cc0
1668 /* Does temp1 `tst' the value of x? */
1669 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1670 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1671 && (temp1 = next_nonnote_insn (temp1))
1672 #else
1673 /* Does temp1 compare the value of x against zero? */
1674 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1675 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1676 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1677 == SET_DEST (PATTERN (temp)))
1678 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1679 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1680 #endif
1681 && condjump_p (temp1))
1682 {
1683 /* Get the if_then_else from the condjump. */
1684 rtx choice = SET_SRC (PATTERN (temp1));
1685 if (GET_CODE (choice) == IF_THEN_ELSE)
1686 {
1687 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1688 rtx val = SET_SRC (PATTERN (temp));
1689 rtx cond
1690 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1691 val, const0_rtx);
1692 rtx ultimate;
1693
1694 if (cond == const_true_rtx)
1695 ultimate = XEXP (choice, 1);
1696 else if (cond == const0_rtx)
1697 ultimate = XEXP (choice, 2);
1698 else
1699 ultimate = 0;
1700
1701 if (ultimate == pc_rtx)
1702 ultimate = get_label_after (temp1);
1703 else if (ultimate && GET_CODE (ultimate) != RETURN)
1704 ultimate = XEXP (ultimate, 0);
1705
1706 if (ultimate && JUMP_LABEL(insn) != ultimate)
1707 changed |= redirect_jump (insn, ultimate);
1708 }
1709 }
1710 #endif
1711
1712 #if 0
1713 /* @@ This needs a bit of work before it will be right.
1714
1715 Any type of comparison can be accepted for the first and
1716 second compare. When rewriting the first jump, we must
1717 compute the what conditions can reach label3, and use the
1718 appropriate code. We can not simply reverse/swap the code
1719 of the first jump. In some cases, the second jump must be
1720 rewritten also.
1721
1722 For example,
1723 < == converts to > ==
1724 < != converts to == >
1725 etc.
1726
1727 If the code is written to only accept an '==' test for the second
1728 compare, then all that needs to be done is to swap the condition
1729 of the first branch.
1730
1731 It is questionable whether we want this optimization anyways,
1732 since if the user wrote code like this because he/she knew that
1733 the jump to label1 is taken most of the time, then rewriting
1734 this gives slower code. */
1735 /* @@ This should call get_condition to find the values being
1736 compared, instead of looking for a COMPARE insn when HAVE_cc0
1737 is not defined. This would allow it to work on the m88k. */
1738 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1739 is not defined and the condition is tested by a separate compare
1740 insn. This is because the code below assumes that the result
1741 of the compare dies in the following branch. */
1742
1743 /* Simplify test a ~= b
1744 condjump label1;
1745 test a == b
1746 condjump label2;
1747 jump label3;
1748 label1:
1749
1750 rewriting as
1751 test a ~~= b
1752 condjump label3
1753 test a == b
1754 condjump label2
1755 label1:
1756
1757 where ~= is an inequality, e.g. >, and ~~= is the swapped
1758 inequality, e.g. <.
1759
1760 We recognize this case scanning backwards.
1761
1762 TEMP is the conditional jump to `label2';
1763 TEMP1 is the test for `a == b';
1764 TEMP2 is the conditional jump to `label1';
1765 TEMP3 is the test for `a ~= b'. */
1766 else if (this_is_simplejump
1767 && (temp = prev_active_insn (insn))
1768 && no_labels_between_p (temp, insn)
1769 && condjump_p (temp)
1770 && (temp1 = prev_active_insn (temp))
1771 && no_labels_between_p (temp1, temp)
1772 && GET_CODE (temp1) == INSN
1773 && GET_CODE (PATTERN (temp1)) == SET
1774 #ifdef HAVE_cc0
1775 && sets_cc0_p (PATTERN (temp1)) == 1
1776 #else
1777 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1778 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1779 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1780 #endif
1781 && (temp2 = prev_active_insn (temp1))
1782 && no_labels_between_p (temp2, temp1)
1783 && condjump_p (temp2)
1784 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1785 && (temp3 = prev_active_insn (temp2))
1786 && no_labels_between_p (temp3, temp2)
1787 && GET_CODE (PATTERN (temp3)) == SET
1788 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1789 SET_DEST (PATTERN (temp1)))
1790 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1791 SET_SRC (PATTERN (temp3)))
1792 && ! inequality_comparisons_p (PATTERN (temp))
1793 && inequality_comparisons_p (PATTERN (temp2)))
1794 {
1795 rtx fallthrough_label = JUMP_LABEL (temp2);
1796
1797 ++LABEL_NUSES (fallthrough_label);
1798 if (swap_jump (temp2, JUMP_LABEL (insn)))
1799 {
1800 delete_insn (insn);
1801 changed = 1;
1802 }
1803
1804 if (--LABEL_NUSES (fallthrough_label) == 0)
1805 delete_insn (fallthrough_label);
1806 }
1807 #endif
1808 /* Simplify if (...) {... x = 1;} if (x) ...
1809
1810 We recognize this case backwards.
1811
1812 TEMP is the test of `x';
1813 TEMP1 is the assignment to `x' at the end of the
1814 previous statement. */
1815 /* @@ This should call get_condition to find the values being
1816 compared, instead of looking for a COMPARE insn when HAVE_cc0
1817 is not defined. This would allow it to work on the m88k. */
1818 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1819 is not defined and the condition is tested by a separate compare
1820 insn. This is because the code below assumes that the result
1821 of the compare dies in the following branch. */
1822
1823 /* ??? This has to be turned off. The problem is that the
1824 unconditional jump might indirectly end up branching to the
1825 label between TEMP1 and TEMP. We can't detect this, in general,
1826 since it may become a jump to there after further optimizations.
1827 If that jump is done, it will be deleted, so we will retry
1828 this optimization in the next pass, thus an infinite loop.
1829
1830 The present code prevents this by putting the jump after the
1831 label, but this is not logically correct. */
1832 #if 0
1833 else if (this_is_condjump
1834 /* Safe to skip USE and CLOBBER insns here
1835 since they will not be deleted. */
1836 && (temp = prev_active_insn (insn))
1837 && no_labels_between_p (temp, insn)
1838 && GET_CODE (temp) == INSN
1839 && GET_CODE (PATTERN (temp)) == SET
1840 #ifdef HAVE_cc0
1841 && sets_cc0_p (PATTERN (temp)) == 1
1842 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1843 #else
1844 /* Temp must be a compare insn, we can not accept a register
1845 to register move here, since it may not be simply a
1846 tst insn. */
1847 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1848 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1849 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1850 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1851 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1852 #endif
1853 /* May skip USE or CLOBBER insns here
1854 for checking for opportunity, since we
1855 take care of them later. */
1856 && (temp1 = prev_active_insn (temp))
1857 && GET_CODE (temp1) == INSN
1858 && GET_CODE (PATTERN (temp1)) == SET
1859 #ifdef HAVE_cc0
1860 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1861 #else
1862 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1863 == SET_DEST (PATTERN (temp1)))
1864 #endif
1865 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1866 /* If this isn't true, cse will do the job. */
1867 && ! no_labels_between_p (temp1, temp))
1868 {
1869 /* Get the if_then_else from the condjump. */
1870 rtx choice = SET_SRC (PATTERN (insn));
1871 if (GET_CODE (choice) == IF_THEN_ELSE
1872 && (GET_CODE (XEXP (choice, 0)) == EQ
1873 || GET_CODE (XEXP (choice, 0)) == NE))
1874 {
1875 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1876 rtx last_insn;
1877 rtx ultimate;
1878 rtx p;
1879
1880 /* Get the place that condjump will jump to
1881 if it is reached from here. */
1882 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1883 == want_nonzero)
1884 ultimate = XEXP (choice, 1);
1885 else
1886 ultimate = XEXP (choice, 2);
1887 /* Get it as a CODE_LABEL. */
1888 if (ultimate == pc_rtx)
1889 ultimate = get_label_after (insn);
1890 else
1891 /* Get the label out of the LABEL_REF. */
1892 ultimate = XEXP (ultimate, 0);
1893
1894 /* Insert the jump immediately before TEMP, specifically
1895 after the label that is between TEMP1 and TEMP. */
1896 last_insn = PREV_INSN (temp);
1897
1898 /* If we would be branching to the next insn, the jump
1899 would immediately be deleted and the re-inserted in
1900 a subsequent pass over the code. So don't do anything
1901 in that case. */
1902 if (next_active_insn (last_insn)
1903 != next_active_insn (ultimate))
1904 {
1905 emit_barrier_after (last_insn);
1906 p = emit_jump_insn_after (gen_jump (ultimate),
1907 last_insn);
1908 JUMP_LABEL (p) = ultimate;
1909 ++LABEL_NUSES (ultimate);
1910 if (INSN_UID (ultimate) < max_jump_chain
1911 && INSN_CODE (p) < max_jump_chain)
1912 {
1913 jump_chain[INSN_UID (p)]
1914 = jump_chain[INSN_UID (ultimate)];
1915 jump_chain[INSN_UID (ultimate)] = p;
1916 }
1917 changed = 1;
1918 continue;
1919 }
1920 }
1921 }
1922 #endif
1923 #ifdef HAVE_trap
1924 /* Detect a conditional jump jumping over an unconditional trap. */
1925 else if (HAVE_trap
1926 && this_is_condjump && ! this_is_simplejump
1927 && reallabelprev != 0
1928 && GET_CODE (reallabelprev) == INSN
1929 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1930 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1931 && prev_active_insn (reallabelprev) == insn
1932 && no_labels_between_p (insn, reallabelprev)
1933 && (temp2 = get_condition (insn, &temp4))
1934 && can_reverse_comparison_p (temp2, insn))
1935 {
1936 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1937 XEXP (temp2, 0), XEXP (temp2, 1),
1938 TRAP_CODE (PATTERN (reallabelprev)));
1939
1940 if (new)
1941 {
1942 emit_insn_before (new, temp4);
1943 delete_insn (reallabelprev);
1944 delete_jump (insn);
1945 changed = 1;
1946 continue;
1947 }
1948 }
1949 /* Detect a jump jumping to an unconditional trap. */
1950 else if (HAVE_trap && this_is_condjump
1951 && (temp = next_active_insn (JUMP_LABEL (insn)))
1952 && GET_CODE (temp) == INSN
1953 && GET_CODE (PATTERN (temp)) == TRAP_IF
1954 && (this_is_simplejump
1955 || (temp2 = get_condition (insn, &temp4))))
1956 {
1957 rtx tc = TRAP_CONDITION (PATTERN (temp));
1958
1959 if (tc == const_true_rtx
1960 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1961 {
1962 rtx new;
1963 /* Replace an unconditional jump to a trap with a trap. */
1964 if (this_is_simplejump)
1965 {
1966 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1967 delete_jump (insn);
1968 changed = 1;
1969 continue;
1970 }
1971 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1972 XEXP (temp2, 1),
1973 TRAP_CODE (PATTERN (temp)));
1974 if (new)
1975 {
1976 emit_insn_before (new, temp4);
1977 delete_jump (insn);
1978 changed = 1;
1979 continue;
1980 }
1981 }
1982 /* If the trap condition and jump condition are mutually
1983 exclusive, redirect the jump to the following insn. */
1984 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1985 && ! this_is_simplejump
1986 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1987 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1988 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1989 && redirect_jump (insn, get_label_after (temp)))
1990 {
1991 changed = 1;
1992 continue;
1993 }
1994 }
1995 #endif
1996 else
1997 {
1998 /* Now that the jump has been tensioned,
1999 try cross jumping: check for identical code
2000 before the jump and before its target label. */
2001
2002 /* First, cross jumping of conditional jumps: */
2003
2004 if (cross_jump && condjump_p (insn))
2005 {
2006 rtx newjpos, newlpos;
2007 rtx x = prev_real_insn (JUMP_LABEL (insn));
2008
2009 /* A conditional jump may be crossjumped
2010 only if the place it jumps to follows
2011 an opposing jump that comes back here. */
2012
2013 if (x != 0 && ! jump_back_p (x, insn))
2014 /* We have no opposing jump;
2015 cannot cross jump this insn. */
2016 x = 0;
2017
2018 newjpos = 0;
2019 /* TARGET is nonzero if it is ok to cross jump
2020 to code before TARGET. If so, see if matches. */
2021 if (x != 0)
2022 find_cross_jump (insn, x, 2,
2023 &newjpos, &newlpos);
2024
2025 if (newjpos != 0)
2026 {
2027 do_cross_jump (insn, newjpos, newlpos);
2028 /* Make the old conditional jump
2029 into an unconditional one. */
2030 SET_SRC (PATTERN (insn))
2031 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
2032 INSN_CODE (insn) = -1;
2033 emit_barrier_after (insn);
2034 /* Add to jump_chain unless this is a new label
2035 whose UID is too large. */
2036 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2037 {
2038 jump_chain[INSN_UID (insn)]
2039 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2040 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2041 }
2042 changed = 1;
2043 next = insn;
2044 }
2045 }
2046
2047 /* Cross jumping of unconditional jumps:
2048 a few differences. */
2049
2050 if (cross_jump && simplejump_p (insn))
2051 {
2052 rtx newjpos, newlpos;
2053 rtx target;
2054
2055 newjpos = 0;
2056
2057 /* TARGET is nonzero if it is ok to cross jump
2058 to code before TARGET. If so, see if matches. */
2059 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2060 &newjpos, &newlpos);
2061
2062 /* If cannot cross jump to code before the label,
2063 see if we can cross jump to another jump to
2064 the same label. */
2065 /* Try each other jump to this label. */
2066 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2067 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2068 target != 0 && newjpos == 0;
2069 target = jump_chain[INSN_UID (target)])
2070 if (target != insn
2071 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2072 /* Ignore TARGET if it's deleted. */
2073 && ! INSN_DELETED_P (target))
2074 find_cross_jump (insn, target, 2,
2075 &newjpos, &newlpos);
2076
2077 if (newjpos != 0)
2078 {
2079 do_cross_jump (insn, newjpos, newlpos);
2080 changed = 1;
2081 next = insn;
2082 }
2083 }
2084
2085 /* This code was dead in the previous jump.c! */
2086 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2087 {
2088 /* Return insns all "jump to the same place"
2089 so we can cross-jump between any two of them. */
2090
2091 rtx newjpos, newlpos, target;
2092
2093 newjpos = 0;
2094
2095 /* If cannot cross jump to code before the label,
2096 see if we can cross jump to another jump to
2097 the same label. */
2098 /* Try each other jump to this label. */
2099 for (target = jump_chain[0];
2100 target != 0 && newjpos == 0;
2101 target = jump_chain[INSN_UID (target)])
2102 if (target != insn
2103 && ! INSN_DELETED_P (target)
2104 && GET_CODE (PATTERN (target)) == RETURN)
2105 find_cross_jump (insn, target, 2,
2106 &newjpos, &newlpos);
2107
2108 if (newjpos != 0)
2109 {
2110 do_cross_jump (insn, newjpos, newlpos);
2111 changed = 1;
2112 next = insn;
2113 }
2114 }
2115 }
2116 }
2117
2118 first = 0;
2119 }
2120
2121 /* Delete extraneous line number notes.
2122 Note that two consecutive notes for different lines are not really
2123 extraneous. There should be some indication where that line belonged,
2124 even if it became empty. */
2125
2126 {
2127 rtx last_note = 0;
2128
2129 for (insn = f; insn; insn = NEXT_INSN (insn))
2130 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2131 {
2132 /* Delete this note if it is identical to previous note. */
2133 if (last_note
2134 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2135 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2136 {
2137 delete_insn (insn);
2138 continue;
2139 }
2140
2141 last_note = insn;
2142 }
2143 }
2144
2145 #ifdef HAVE_return
2146 if (HAVE_return)
2147 {
2148 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2149 in front of it. If the machine allows it at this point (we might be
2150 after reload for a leaf routine), it will improve optimization for it
2151 to be there. We do this both here and at the start of this pass since
2152 the RETURN might have been deleted by some of our optimizations. */
2153 insn = get_last_insn ();
2154 while (insn && GET_CODE (insn) == NOTE)
2155 insn = PREV_INSN (insn);
2156
2157 if (insn && GET_CODE (insn) != BARRIER)
2158 {
2159 emit_jump_insn (gen_return ());
2160 emit_barrier ();
2161 }
2162 }
2163 #endif
2164
2165 /* CAN_REACH_END is persistent for each function. Once set it should
2166 not be cleared. This is especially true for the case where we
2167 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2168 the front-end before compiling each function. */
2169 if (calculate_can_reach_end (last_insn, optimize != 0))
2170 can_reach_end = 1;
2171
2172 end:
2173 /* Clean up. */
2174 free (jump_chain);
2175 jump_chain = 0;
2176 }
2177 \f
2178 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2179 notes whose labels don't occur in the insn any more. Returns the
2180 largest INSN_UID found. */
2181 static int
2182 init_label_info (f)
2183 rtx f;
2184 {
2185 int largest_uid = 0;
2186 rtx insn;
2187
2188 for (insn = f; insn; insn = NEXT_INSN (insn))
2189 {
2190 if (GET_CODE (insn) == CODE_LABEL)
2191 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2192 else if (GET_CODE (insn) == JUMP_INSN)
2193 JUMP_LABEL (insn) = 0;
2194 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2195 {
2196 rtx note, next;
2197
2198 for (note = REG_NOTES (insn); note; note = next)
2199 {
2200 next = XEXP (note, 1);
2201 if (REG_NOTE_KIND (note) == REG_LABEL
2202 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2203 remove_note (insn, note);
2204 }
2205 }
2206 if (INSN_UID (insn) > largest_uid)
2207 largest_uid = INSN_UID (insn);
2208 }
2209
2210 return largest_uid;
2211 }
2212
2213 /* Delete insns following barriers, up to next label.
2214
2215 Also delete no-op jumps created by gcse. */
2216 static void
2217 delete_barrier_successors (f)
2218 rtx f;
2219 {
2220 rtx insn;
2221
2222 for (insn = f; insn;)
2223 {
2224 if (GET_CODE (insn) == BARRIER)
2225 {
2226 insn = NEXT_INSN (insn);
2227
2228 never_reached_warning (insn);
2229
2230 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2231 {
2232 if (GET_CODE (insn) == NOTE
2233 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2234 insn = NEXT_INSN (insn);
2235 else
2236 insn = delete_insn (insn);
2237 }
2238 /* INSN is now the code_label. */
2239 }
2240 /* Also remove (set (pc) (pc)) insns which can be created by
2241 gcse. We eliminate such insns now to avoid having them
2242 cause problems later. */
2243 else if (GET_CODE (insn) == JUMP_INSN
2244 && GET_CODE (PATTERN (insn)) == SET
2245 && SET_SRC (PATTERN (insn)) == pc_rtx
2246 && SET_DEST (PATTERN (insn)) == pc_rtx)
2247 insn = delete_insn (insn);
2248
2249 else
2250 insn = NEXT_INSN (insn);
2251 }
2252 }
2253
2254 /* Mark the label each jump jumps to.
2255 Combine consecutive labels, and count uses of labels.
2256
2257 For each label, make a chain (using `jump_chain')
2258 of all the *unconditional* jumps that jump to it;
2259 also make a chain of all returns.
2260
2261 CROSS_JUMP indicates whether we are doing cross jumping
2262 and if we are whether we will be paying attention to
2263 death notes or not. */
2264
2265 static void
2266 mark_all_labels (f, cross_jump)
2267 rtx f;
2268 int cross_jump;
2269 {
2270 rtx insn;
2271
2272 for (insn = f; insn; insn = NEXT_INSN (insn))
2273 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2274 {
2275 mark_jump_label (PATTERN (insn), insn, cross_jump);
2276 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2277 {
2278 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2279 {
2280 jump_chain[INSN_UID (insn)]
2281 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2282 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2283 }
2284 if (GET_CODE (PATTERN (insn)) == RETURN)
2285 {
2286 jump_chain[INSN_UID (insn)] = jump_chain[0];
2287 jump_chain[0] = insn;
2288 }
2289 }
2290 }
2291 }
2292
2293 /* Delete all labels already not referenced.
2294 Also find and return the last insn. */
2295
2296 static rtx
2297 delete_unreferenced_labels (f)
2298 rtx f;
2299 {
2300 rtx final = NULL_RTX;
2301 rtx insn;
2302
2303 for (insn = f; insn; )
2304 {
2305 if (GET_CODE (insn) == CODE_LABEL
2306 && LABEL_NUSES (insn) == 0
2307 && LABEL_ALTERNATE_NAME (insn) == NULL)
2308 insn = delete_insn (insn);
2309 else
2310 {
2311 final = insn;
2312 insn = NEXT_INSN (insn);
2313 }
2314 }
2315
2316 return final;
2317 }
2318
2319 /* Delete various simple forms of moves which have no necessary
2320 side effect. */
2321
2322 static void
2323 delete_noop_moves (f)
2324 rtx f;
2325 {
2326 rtx insn, next;
2327
2328 for (insn = f; insn; )
2329 {
2330 next = NEXT_INSN (insn);
2331
2332 if (GET_CODE (insn) == INSN)
2333 {
2334 register rtx body = PATTERN (insn);
2335
2336 /* Combine stack_adjusts with following push_insns. */
2337 #ifdef PUSH_ROUNDING
2338 if (GET_CODE (body) == SET
2339 && SET_DEST (body) == stack_pointer_rtx
2340 && GET_CODE (SET_SRC (body)) == PLUS
2341 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2342 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2343 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2344 {
2345 rtx p;
2346 rtx stack_adjust_insn = insn;
2347 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2348 int total_pushed = 0;
2349 int pushes = 0;
2350
2351 /* Find all successive push insns. */
2352 p = insn;
2353 /* Don't convert more than three pushes;
2354 that starts adding too many displaced addresses
2355 and the whole thing starts becoming a losing
2356 proposition. */
2357 while (pushes < 3)
2358 {
2359 rtx pbody, dest;
2360 p = next_nonnote_insn (p);
2361 if (p == 0 || GET_CODE (p) != INSN)
2362 break;
2363 pbody = PATTERN (p);
2364 if (GET_CODE (pbody) != SET)
2365 break;
2366 dest = SET_DEST (pbody);
2367 /* Allow a no-op move between the adjust and the push. */
2368 if (GET_CODE (dest) == REG
2369 && GET_CODE (SET_SRC (pbody)) == REG
2370 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2371 continue;
2372 if (! (GET_CODE (dest) == MEM
2373 && GET_CODE (XEXP (dest, 0)) == POST_INC
2374 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2375 break;
2376 pushes++;
2377 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2378 > stack_adjust_amount)
2379 break;
2380 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2381 }
2382
2383 /* Discard the amount pushed from the stack adjust;
2384 maybe eliminate it entirely. */
2385 if (total_pushed >= stack_adjust_amount)
2386 {
2387 delete_computation (stack_adjust_insn);
2388 total_pushed = stack_adjust_amount;
2389 }
2390 else
2391 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2392 = GEN_INT (stack_adjust_amount - total_pushed);
2393
2394 /* Change the appropriate push insns to ordinary stores. */
2395 p = insn;
2396 while (total_pushed > 0)
2397 {
2398 rtx pbody, dest;
2399 p = next_nonnote_insn (p);
2400 if (GET_CODE (p) != INSN)
2401 break;
2402 pbody = PATTERN (p);
2403 if (GET_CODE (pbody) != SET)
2404 break;
2405 dest = SET_DEST (pbody);
2406 /* Allow a no-op move between the adjust and the push. */
2407 if (GET_CODE (dest) == REG
2408 && GET_CODE (SET_SRC (pbody)) == REG
2409 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2410 continue;
2411 if (! (GET_CODE (dest) == MEM
2412 && GET_CODE (XEXP (dest, 0)) == POST_INC
2413 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2414 break;
2415 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2416 /* If this push doesn't fully fit in the space
2417 of the stack adjust that we deleted,
2418 make another stack adjust here for what we
2419 didn't use up. There should be peepholes
2420 to recognize the resulting sequence of insns. */
2421 if (total_pushed < 0)
2422 {
2423 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2424 GEN_INT (- total_pushed)),
2425 p);
2426 break;
2427 }
2428 XEXP (dest, 0)
2429 = plus_constant (stack_pointer_rtx, total_pushed);
2430 }
2431 }
2432 #endif
2433
2434 /* Detect and delete no-op move instructions
2435 resulting from not allocating a parameter in a register. */
2436
2437 if (GET_CODE (body) == SET
2438 && (SET_DEST (body) == SET_SRC (body)
2439 || (GET_CODE (SET_DEST (body)) == MEM
2440 && GET_CODE (SET_SRC (body)) == MEM
2441 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2442 && ! (GET_CODE (SET_DEST (body)) == MEM
2443 && MEM_VOLATILE_P (SET_DEST (body)))
2444 && ! (GET_CODE (SET_SRC (body)) == MEM
2445 && MEM_VOLATILE_P (SET_SRC (body))))
2446 delete_computation (insn);
2447
2448 /* Detect and ignore no-op move instructions
2449 resulting from smart or fortuitous register allocation. */
2450
2451 else if (GET_CODE (body) == SET)
2452 {
2453 int sreg = true_regnum (SET_SRC (body));
2454 int dreg = true_regnum (SET_DEST (body));
2455
2456 if (sreg == dreg && sreg >= 0)
2457 delete_insn (insn);
2458 else if (sreg >= 0 && dreg >= 0)
2459 {
2460 rtx trial;
2461 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2462 sreg, NULL_PTR, dreg,
2463 GET_MODE (SET_SRC (body)));
2464
2465 if (tem != 0
2466 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2467 {
2468 /* DREG may have been the target of a REG_DEAD note in
2469 the insn which makes INSN redundant. If so, reorg
2470 would still think it is dead. So search for such a
2471 note and delete it if we find it. */
2472 if (! find_regno_note (insn, REG_UNUSED, dreg))
2473 for (trial = prev_nonnote_insn (insn);
2474 trial && GET_CODE (trial) != CODE_LABEL;
2475 trial = prev_nonnote_insn (trial))
2476 if (find_regno_note (trial, REG_DEAD, dreg))
2477 {
2478 remove_death (dreg, trial);
2479 break;
2480 }
2481
2482 /* Deleting insn could lose a death-note for SREG. */
2483 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2484 {
2485 /* Change this into a USE so that we won't emit
2486 code for it, but still can keep the note. */
2487 PATTERN (insn)
2488 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2489 INSN_CODE (insn) = -1;
2490 /* Remove all reg notes but the REG_DEAD one. */
2491 REG_NOTES (insn) = trial;
2492 XEXP (trial, 1) = NULL_RTX;
2493 }
2494 else
2495 delete_insn (insn);
2496 }
2497 }
2498 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2499 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2500 NULL_PTR, 0,
2501 GET_MODE (SET_DEST (body))))
2502 {
2503 /* This handles the case where we have two consecutive
2504 assignments of the same constant to pseudos that didn't
2505 get a hard reg. Each SET from the constant will be
2506 converted into a SET of the spill register and an
2507 output reload will be made following it. This produces
2508 two loads of the same constant into the same spill
2509 register. */
2510
2511 rtx in_insn = insn;
2512
2513 /* Look back for a death note for the first reg.
2514 If there is one, it is no longer accurate. */
2515 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2516 {
2517 if ((GET_CODE (in_insn) == INSN
2518 || GET_CODE (in_insn) == JUMP_INSN)
2519 && find_regno_note (in_insn, REG_DEAD, dreg))
2520 {
2521 remove_death (dreg, in_insn);
2522 break;
2523 }
2524 in_insn = PREV_INSN (in_insn);
2525 }
2526
2527 /* Delete the second load of the value. */
2528 delete_insn (insn);
2529 }
2530 }
2531 else if (GET_CODE (body) == PARALLEL)
2532 {
2533 /* If each part is a set between two identical registers or
2534 a USE or CLOBBER, delete the insn. */
2535 int i, sreg, dreg;
2536 rtx tem;
2537
2538 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2539 {
2540 tem = XVECEXP (body, 0, i);
2541 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2542 continue;
2543
2544 if (GET_CODE (tem) != SET
2545 || (sreg = true_regnum (SET_SRC (tem))) < 0
2546 || (dreg = true_regnum (SET_DEST (tem))) < 0
2547 || dreg != sreg)
2548 break;
2549 }
2550
2551 if (i < 0)
2552 delete_insn (insn);
2553 }
2554 /* Also delete insns to store bit fields if they are no-ops. */
2555 /* Not worth the hair to detect this in the big-endian case. */
2556 else if (! BYTES_BIG_ENDIAN
2557 && GET_CODE (body) == SET
2558 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2559 && XEXP (SET_DEST (body), 2) == const0_rtx
2560 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2561 && ! (GET_CODE (SET_SRC (body)) == MEM
2562 && MEM_VOLATILE_P (SET_SRC (body))))
2563 delete_insn (insn);
2564 }
2565 insn = next;
2566 }
2567 }
2568
2569 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2570 If so indicate that this function can drop off the end by returning
2571 1, else return 0.
2572
2573 CHECK_DELETED indicates whether we must check if the note being
2574 searched for has the deleted flag set.
2575
2576 DELETE_FINAL_NOTE indicates whether we should delete the note
2577 if we find it. */
2578
2579 static int
2580 calculate_can_reach_end (last, delete_final_note)
2581 rtx last;
2582 int delete_final_note;
2583 {
2584 rtx insn = last;
2585 int n_labels = 1;
2586
2587 while (insn != NULL_RTX)
2588 {
2589 int ok = 0;
2590
2591 /* One label can follow the end-note: the return label. */
2592 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2593 ok = 1;
2594 /* Ordinary insns can follow it if returning a structure. */
2595 else if (GET_CODE (insn) == INSN)
2596 ok = 1;
2597 /* If machine uses explicit RETURN insns, no epilogue,
2598 then one of them follows the note. */
2599 else if (GET_CODE (insn) == JUMP_INSN
2600 && GET_CODE (PATTERN (insn)) == RETURN)
2601 ok = 1;
2602 /* A barrier can follow the return insn. */
2603 else if (GET_CODE (insn) == BARRIER)
2604 ok = 1;
2605 /* Other kinds of notes can follow also. */
2606 else if (GET_CODE (insn) == NOTE
2607 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2608 ok = 1;
2609
2610 if (ok != 1)
2611 break;
2612
2613 insn = PREV_INSN (insn);
2614 }
2615
2616 /* See if we backed up to the appropriate type of note. */
2617 if (insn != NULL_RTX
2618 && GET_CODE (insn) == NOTE
2619 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2620 {
2621 if (delete_final_note)
2622 delete_insn (insn);
2623 return 1;
2624 }
2625
2626 return 0;
2627 }
2628
2629 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2630 jump. Assume that this unconditional jump is to the exit test code. If
2631 the code is sufficiently simple, make a copy of it before INSN,
2632 followed by a jump to the exit of the loop. Then delete the unconditional
2633 jump after INSN.
2634
2635 Return 1 if we made the change, else 0.
2636
2637 This is only safe immediately after a regscan pass because it uses the
2638 values of regno_first_uid and regno_last_uid. */
2639
2640 static int
2641 duplicate_loop_exit_test (loop_start)
2642 rtx loop_start;
2643 {
2644 rtx insn, set, reg, p, link;
2645 rtx copy = 0, first_copy = 0;
2646 int num_insns = 0;
2647 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2648 rtx lastexit;
2649 int max_reg = max_reg_num ();
2650 rtx *reg_map = 0;
2651
2652 /* Scan the exit code. We do not perform this optimization if any insn:
2653
2654 is a CALL_INSN
2655 is a CODE_LABEL
2656 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2657 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2658 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2659 is not valid.
2660
2661 We also do not do this if we find an insn with ASM_OPERANDS. While
2662 this restriction should not be necessary, copying an insn with
2663 ASM_OPERANDS can confuse asm_noperands in some cases.
2664
2665 Also, don't do this if the exit code is more than 20 insns. */
2666
2667 for (insn = exitcode;
2668 insn
2669 && ! (GET_CODE (insn) == NOTE
2670 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2671 insn = NEXT_INSN (insn))
2672 {
2673 switch (GET_CODE (insn))
2674 {
2675 case CODE_LABEL:
2676 case CALL_INSN:
2677 return 0;
2678 case NOTE:
2679 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2680 a jump immediately after the loop start that branches outside
2681 the loop but within an outer loop, near the exit test.
2682 If we copied this exit test and created a phony
2683 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2684 before the exit test look like these could be safely moved
2685 out of the loop even if they actually may be never executed.
2686 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2687
2688 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2689 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2690 return 0;
2691
2692 if (optimize < 2
2693 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2694 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2695 /* If we were to duplicate this code, we would not move
2696 the BLOCK notes, and so debugging the moved code would
2697 be difficult. Thus, we only move the code with -O2 or
2698 higher. */
2699 return 0;
2700
2701 break;
2702 case JUMP_INSN:
2703 case INSN:
2704 /* The code below would grossly mishandle REG_WAS_0 notes,
2705 so get rid of them here. */
2706 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2707 remove_note (insn, p);
2708 if (++num_insns > 20
2709 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2710 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2711 return 0;
2712 break;
2713 default:
2714 break;
2715 }
2716 }
2717
2718 /* Unless INSN is zero, we can do the optimization. */
2719 if (insn == 0)
2720 return 0;
2721
2722 lastexit = insn;
2723
2724 /* See if any insn sets a register only used in the loop exit code and
2725 not a user variable. If so, replace it with a new register. */
2726 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2727 if (GET_CODE (insn) == INSN
2728 && (set = single_set (insn)) != 0
2729 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2730 || (GET_CODE (reg) == SUBREG
2731 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2732 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2733 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2734 {
2735 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2736 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2737 break;
2738
2739 if (p != lastexit)
2740 {
2741 /* We can do the replacement. Allocate reg_map if this is the
2742 first replacement we found. */
2743 if (reg_map == 0)
2744 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
2745
2746 REG_LOOP_TEST_P (reg) = 1;
2747
2748 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2749 }
2750 }
2751
2752 /* Now copy each insn. */
2753 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2754 {
2755 switch (GET_CODE (insn))
2756 {
2757 case BARRIER:
2758 copy = emit_barrier_before (loop_start);
2759 break;
2760 case NOTE:
2761 /* Only copy line-number notes. */
2762 if (NOTE_LINE_NUMBER (insn) >= 0)
2763 {
2764 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2765 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2766 }
2767 break;
2768
2769 case INSN:
2770 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
2771 if (reg_map)
2772 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2773
2774 mark_jump_label (PATTERN (copy), copy, 0);
2775
2776 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2777 make them. */
2778 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2779 if (REG_NOTE_KIND (link) != REG_LABEL)
2780 REG_NOTES (copy)
2781 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2782 XEXP (link, 0),
2783 REG_NOTES (copy)));
2784 if (reg_map && REG_NOTES (copy))
2785 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2786 break;
2787
2788 case JUMP_INSN:
2789 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)), loop_start);
2790 if (reg_map)
2791 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2792 mark_jump_label (PATTERN (copy), copy, 0);
2793 if (REG_NOTES (insn))
2794 {
2795 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
2796 if (reg_map)
2797 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2798 }
2799
2800 /* If this is a simple jump, add it to the jump chain. */
2801
2802 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2803 && simplejump_p (copy))
2804 {
2805 jump_chain[INSN_UID (copy)]
2806 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2807 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2808 }
2809 break;
2810
2811 default:
2812 abort ();
2813 }
2814
2815 /* Record the first insn we copied. We need it so that we can
2816 scan the copied insns for new pseudo registers. */
2817 if (! first_copy)
2818 first_copy = copy;
2819 }
2820
2821 /* Now clean up by emitting a jump to the end label and deleting the jump
2822 at the start of the loop. */
2823 if (! copy || GET_CODE (copy) != BARRIER)
2824 {
2825 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2826 loop_start);
2827
2828 /* Record the first insn we copied. We need it so that we can
2829 scan the copied insns for new pseudo registers. This may not
2830 be strictly necessary since we should have copied at least one
2831 insn above. But I am going to be safe. */
2832 if (! first_copy)
2833 first_copy = copy;
2834
2835 mark_jump_label (PATTERN (copy), copy, 0);
2836 if (INSN_UID (copy) < max_jump_chain
2837 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2838 {
2839 jump_chain[INSN_UID (copy)]
2840 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2841 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2842 }
2843 emit_barrier_before (loop_start);
2844 }
2845
2846 /* Now scan from the first insn we copied to the last insn we copied
2847 (copy) for new pseudo registers. Do this after the code to jump to
2848 the end label since that might create a new pseudo too. */
2849 reg_scan_update (first_copy, copy, max_reg);
2850
2851 /* Mark the exit code as the virtual top of the converted loop. */
2852 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2853
2854 delete_insn (next_nonnote_insn (loop_start));
2855
2856 /* Clean up. */
2857 if (reg_map)
2858 free (reg_map);
2859
2860 return 1;
2861 }
2862 \f
2863 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2864 loop-end notes between START and END out before START. Assume that
2865 END is not such a note. START may be such a note. Returns the value
2866 of the new starting insn, which may be different if the original start
2867 was such a note. */
2868
2869 rtx
2870 squeeze_notes (start, end)
2871 rtx start, end;
2872 {
2873 rtx insn;
2874 rtx next;
2875
2876 for (insn = start; insn != end; insn = next)
2877 {
2878 next = NEXT_INSN (insn);
2879 if (GET_CODE (insn) == NOTE
2880 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2881 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2882 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2883 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2884 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2885 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2886 {
2887 if (insn == start)
2888 start = next;
2889 else
2890 {
2891 rtx prev = PREV_INSN (insn);
2892 PREV_INSN (insn) = PREV_INSN (start);
2893 NEXT_INSN (insn) = start;
2894 NEXT_INSN (PREV_INSN (insn)) = insn;
2895 PREV_INSN (NEXT_INSN (insn)) = insn;
2896 NEXT_INSN (prev) = next;
2897 PREV_INSN (next) = prev;
2898 }
2899 }
2900 }
2901
2902 return start;
2903 }
2904 \f
2905 /* Compare the instructions before insn E1 with those before E2
2906 to find an opportunity for cross jumping.
2907 (This means detecting identical sequences of insns followed by
2908 jumps to the same place, or followed by a label and a jump
2909 to that label, and replacing one with a jump to the other.)
2910
2911 Assume E1 is a jump that jumps to label E2
2912 (that is not always true but it might as well be).
2913 Find the longest possible equivalent sequences
2914 and store the first insns of those sequences into *F1 and *F2.
2915 Store zero there if no equivalent preceding instructions are found.
2916
2917 We give up if we find a label in stream 1.
2918 Actually we could transfer that label into stream 2. */
2919
2920 static void
2921 find_cross_jump (e1, e2, minimum, f1, f2)
2922 rtx e1, e2;
2923 int minimum;
2924 rtx *f1, *f2;
2925 {
2926 register rtx i1 = e1, i2 = e2;
2927 register rtx p1, p2;
2928 int lose = 0;
2929
2930 rtx last1 = 0, last2 = 0;
2931 rtx afterlast1 = 0, afterlast2 = 0;
2932
2933 *f1 = 0;
2934 *f2 = 0;
2935
2936 while (1)
2937 {
2938 i1 = prev_nonnote_insn (i1);
2939
2940 i2 = PREV_INSN (i2);
2941 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2942 i2 = PREV_INSN (i2);
2943
2944 if (i1 == 0)
2945 break;
2946
2947 /* Don't allow the range of insns preceding E1 or E2
2948 to include the other (E2 or E1). */
2949 if (i2 == e1 || i1 == e2)
2950 break;
2951
2952 /* If we will get to this code by jumping, those jumps will be
2953 tensioned to go directly to the new label (before I2),
2954 so this cross-jumping won't cost extra. So reduce the minimum. */
2955 if (GET_CODE (i1) == CODE_LABEL)
2956 {
2957 --minimum;
2958 break;
2959 }
2960
2961 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2962 break;
2963
2964 /* Avoid moving insns across EH regions if either of the insns
2965 can throw. */
2966 if (flag_exceptions
2967 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2968 && !in_same_eh_region (i1, i2))
2969 break;
2970
2971 p1 = PATTERN (i1);
2972 p2 = PATTERN (i2);
2973
2974 /* If this is a CALL_INSN, compare register usage information.
2975 If we don't check this on stack register machines, the two
2976 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2977 numbers of stack registers in the same basic block.
2978 If we don't check this on machines with delay slots, a delay slot may
2979 be filled that clobbers a parameter expected by the subroutine.
2980
2981 ??? We take the simple route for now and assume that if they're
2982 equal, they were constructed identically. */
2983
2984 if (GET_CODE (i1) == CALL_INSN
2985 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2986 CALL_INSN_FUNCTION_USAGE (i2)))
2987 lose = 1;
2988
2989 #ifdef STACK_REGS
2990 /* If cross_jump_death_matters is not 0, the insn's mode
2991 indicates whether or not the insn contains any stack-like
2992 regs. */
2993
2994 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
2995 {
2996 /* If register stack conversion has already been done, then
2997 death notes must also be compared before it is certain that
2998 the two instruction streams match. */
2999
3000 rtx note;
3001 HARD_REG_SET i1_regset, i2_regset;
3002
3003 CLEAR_HARD_REG_SET (i1_regset);
3004 CLEAR_HARD_REG_SET (i2_regset);
3005
3006 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3007 if (REG_NOTE_KIND (note) == REG_DEAD
3008 && STACK_REG_P (XEXP (note, 0)))
3009 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3010
3011 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3012 if (REG_NOTE_KIND (note) == REG_DEAD
3013 && STACK_REG_P (XEXP (note, 0)))
3014 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3015
3016 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3017
3018 lose = 1;
3019
3020 done:
3021 ;
3022 }
3023 #endif
3024
3025 /* Don't allow old-style asm or volatile extended asms to be accepted
3026 for cross jumping purposes. It is conceptually correct to allow
3027 them, since cross-jumping preserves the dynamic instruction order
3028 even though it is changing the static instruction order. However,
3029 if an asm is being used to emit an assembler pseudo-op, such as
3030 the MIPS `.set reorder' pseudo-op, then the static instruction order
3031 matters and it must be preserved. */
3032 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
3033 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
3034 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
3035 lose = 1;
3036
3037 if (lose || GET_CODE (p1) != GET_CODE (p2)
3038 || ! rtx_renumbered_equal_p (p1, p2))
3039 {
3040 /* The following code helps take care of G++ cleanups. */
3041 rtx equiv1;
3042 rtx equiv2;
3043
3044 if (!lose && GET_CODE (p1) == GET_CODE (p2)
3045 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
3046 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
3047 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
3048 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
3049 /* If the equivalences are not to a constant, they may
3050 reference pseudos that no longer exist, so we can't
3051 use them. */
3052 && CONSTANT_P (XEXP (equiv1, 0))
3053 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3054 {
3055 rtx s1 = single_set (i1);
3056 rtx s2 = single_set (i2);
3057 if (s1 != 0 && s2 != 0
3058 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3059 {
3060 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3061 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3062 if (! rtx_renumbered_equal_p (p1, p2))
3063 cancel_changes (0);
3064 else if (apply_change_group ())
3065 goto win;
3066 }
3067 }
3068
3069 /* Insns fail to match; cross jumping is limited to the following
3070 insns. */
3071
3072 #ifdef HAVE_cc0
3073 /* Don't allow the insn after a compare to be shared by
3074 cross-jumping unless the compare is also shared.
3075 Here, if either of these non-matching insns is a compare,
3076 exclude the following insn from possible cross-jumping. */
3077 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3078 last1 = afterlast1, last2 = afterlast2, ++minimum;
3079 #endif
3080
3081 /* If cross-jumping here will feed a jump-around-jump
3082 optimization, this jump won't cost extra, so reduce
3083 the minimum. */
3084 if (GET_CODE (i1) == JUMP_INSN
3085 && JUMP_LABEL (i1)
3086 && prev_real_insn (JUMP_LABEL (i1)) == e1)
3087 --minimum;
3088 break;
3089 }
3090
3091 win:
3092 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3093 {
3094 /* Ok, this insn is potentially includable in a cross-jump here. */
3095 afterlast1 = last1, afterlast2 = last2;
3096 last1 = i1, last2 = i2, --minimum;
3097 }
3098 }
3099
3100 if (minimum <= 0 && last1 != 0 && last1 != e1)
3101 *f1 = last1, *f2 = last2;
3102 }
3103
3104 static void
3105 do_cross_jump (insn, newjpos, newlpos)
3106 rtx insn, newjpos, newlpos;
3107 {
3108 /* Find an existing label at this point
3109 or make a new one if there is none. */
3110 register rtx label = get_label_before (newlpos);
3111
3112 /* Make the same jump insn jump to the new point. */
3113 if (GET_CODE (PATTERN (insn)) == RETURN)
3114 {
3115 /* Remove from jump chain of returns. */
3116 delete_from_jump_chain (insn);
3117 /* Change the insn. */
3118 PATTERN (insn) = gen_jump (label);
3119 INSN_CODE (insn) = -1;
3120 JUMP_LABEL (insn) = label;
3121 LABEL_NUSES (label)++;
3122 /* Add to new the jump chain. */
3123 if (INSN_UID (label) < max_jump_chain
3124 && INSN_UID (insn) < max_jump_chain)
3125 {
3126 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3127 jump_chain[INSN_UID (label)] = insn;
3128 }
3129 }
3130 else
3131 redirect_jump (insn, label);
3132
3133 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3134 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3135 the NEWJPOS stream. */
3136
3137 while (newjpos != insn)
3138 {
3139 rtx lnote;
3140
3141 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3142 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3143 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3144 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3145 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3146 remove_note (newlpos, lnote);
3147
3148 delete_insn (newjpos);
3149 newjpos = next_real_insn (newjpos);
3150 newlpos = next_real_insn (newlpos);
3151 }
3152 }
3153 \f
3154 /* Return the label before INSN, or put a new label there. */
3155
3156 rtx
3157 get_label_before (insn)
3158 rtx insn;
3159 {
3160 rtx label;
3161
3162 /* Find an existing label at this point
3163 or make a new one if there is none. */
3164 label = prev_nonnote_insn (insn);
3165
3166 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3167 {
3168 rtx prev = PREV_INSN (insn);
3169
3170 label = gen_label_rtx ();
3171 emit_label_after (label, prev);
3172 LABEL_NUSES (label) = 0;
3173 }
3174 return label;
3175 }
3176
3177 /* Return the label after INSN, or put a new label there. */
3178
3179 rtx
3180 get_label_after (insn)
3181 rtx insn;
3182 {
3183 rtx label;
3184
3185 /* Find an existing label at this point
3186 or make a new one if there is none. */
3187 label = next_nonnote_insn (insn);
3188
3189 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3190 {
3191 label = gen_label_rtx ();
3192 emit_label_after (label, insn);
3193 LABEL_NUSES (label) = 0;
3194 }
3195 return label;
3196 }
3197 \f
3198 /* Return 1 if INSN is a jump that jumps to right after TARGET
3199 only on the condition that TARGET itself would drop through.
3200 Assumes that TARGET is a conditional jump. */
3201
3202 static int
3203 jump_back_p (insn, target)
3204 rtx insn, target;
3205 {
3206 rtx cinsn, ctarget;
3207 enum rtx_code codei, codet;
3208
3209 if (simplejump_p (insn) || ! condjump_p (insn)
3210 || simplejump_p (target)
3211 || target != prev_real_insn (JUMP_LABEL (insn)))
3212 return 0;
3213
3214 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3215 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3216
3217 codei = GET_CODE (cinsn);
3218 codet = GET_CODE (ctarget);
3219
3220 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3221 {
3222 if (! can_reverse_comparison_p (cinsn, insn))
3223 return 0;
3224 codei = reverse_condition (codei);
3225 }
3226
3227 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3228 {
3229 if (! can_reverse_comparison_p (ctarget, target))
3230 return 0;
3231 codet = reverse_condition (codet);
3232 }
3233
3234 return (codei == codet
3235 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3236 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3237 }
3238 \f
3239 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3240 return non-zero if it is safe to reverse this comparison. It is if our
3241 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3242 this is known to be an integer comparison. */
3243
3244 int
3245 can_reverse_comparison_p (comparison, insn)
3246 rtx comparison;
3247 rtx insn;
3248 {
3249 rtx arg0;
3250
3251 /* If this is not actually a comparison, we can't reverse it. */
3252 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3253 return 0;
3254
3255 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3256 /* If this is an NE comparison, it is safe to reverse it to an EQ
3257 comparison and vice versa, even for floating point. If no operands
3258 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3259 always false and NE is always true, so the reversal is also valid. */
3260 || flag_fast_math
3261 || GET_CODE (comparison) == NE
3262 || GET_CODE (comparison) == EQ)
3263 return 1;
3264
3265 arg0 = XEXP (comparison, 0);
3266
3267 /* Make sure ARG0 is one of the actual objects being compared. If we
3268 can't do this, we can't be sure the comparison can be reversed.
3269
3270 Handle cc0 and a MODE_CC register. */
3271 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3272 #ifdef HAVE_cc0
3273 || arg0 == cc0_rtx
3274 #endif
3275 )
3276 {
3277 rtx prev = prev_nonnote_insn (insn);
3278 rtx set;
3279
3280 /* First see if the condition code mode alone if enough to say we can
3281 reverse the condition. If not, then search backwards for a set of
3282 ARG0. We do not need to check for an insn clobbering it since valid
3283 code will contain set a set with no intervening clobber. But
3284 stop when we reach a label. */
3285 #ifdef REVERSIBLE_CC_MODE
3286 if (GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC
3287 && REVERSIBLE_CC_MODE (GET_MODE (arg0)))
3288 return 1;
3289 #endif
3290
3291 for (prev = prev_nonnote_insn (insn);
3292 prev != 0 && GET_CODE (prev) != CODE_LABEL;
3293 prev = prev_nonnote_insn (prev))
3294 if ((set = single_set (prev)) != 0
3295 && rtx_equal_p (SET_DEST (set), arg0))
3296 {
3297 arg0 = SET_SRC (set);
3298
3299 if (GET_CODE (arg0) == COMPARE)
3300 arg0 = XEXP (arg0, 0);
3301 break;
3302 }
3303 }
3304
3305 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3306 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3307 return (GET_CODE (arg0) == CONST_INT
3308 || (GET_MODE (arg0) != VOIDmode
3309 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3310 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3311 }
3312
3313 /* Given an rtx-code for a comparison, return the code for the negated
3314 comparison. If no such code exists, return UNKNOWN.
3315
3316 WATCH OUT! reverse_condition is not safe to use on a jump that might
3317 be acting on the results of an IEEE floating point comparison, because
3318 of the special treatment of non-signaling nans in comparisons.
3319 Use can_reverse_comparison_p to be sure. */
3320
3321 enum rtx_code
3322 reverse_condition (code)
3323 enum rtx_code code;
3324 {
3325 switch (code)
3326 {
3327 case EQ:
3328 return NE;
3329 case NE:
3330 return EQ;
3331 case GT:
3332 return LE;
3333 case GE:
3334 return LT;
3335 case LT:
3336 return GE;
3337 case LE:
3338 return GT;
3339 case GTU:
3340 return LEU;
3341 case GEU:
3342 return LTU;
3343 case LTU:
3344 return GEU;
3345 case LEU:
3346 return GTU;
3347 case UNORDERED:
3348 return ORDERED;
3349 case ORDERED:
3350 return UNORDERED;
3351
3352 case UNLT:
3353 case UNLE:
3354 case UNGT:
3355 case UNGE:
3356 case UNEQ:
3357 case LTGT:
3358 return UNKNOWN;
3359
3360 default:
3361 abort ();
3362 }
3363 }
3364
3365 /* Similar, but we're allowed to generate unordered comparisons, which
3366 makes it safe for IEEE floating-point. Of course, we have to recognize
3367 that the target will support them too... */
3368
3369 enum rtx_code
3370 reverse_condition_maybe_unordered (code)
3371 enum rtx_code code;
3372 {
3373 /* Non-IEEE formats don't have unordered conditions. */
3374 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
3375 return reverse_condition (code);
3376
3377 switch (code)
3378 {
3379 case EQ:
3380 return NE;
3381 case NE:
3382 return EQ;
3383 case GT:
3384 return UNLE;
3385 case GE:
3386 return UNLT;
3387 case LT:
3388 return UNGE;
3389 case LE:
3390 return UNGT;
3391 case LTGT:
3392 return UNEQ;
3393 case GTU:
3394 return LEU;
3395 case GEU:
3396 return LTU;
3397 case LTU:
3398 return GEU;
3399 case LEU:
3400 return GTU;
3401 case UNORDERED:
3402 return ORDERED;
3403 case ORDERED:
3404 return UNORDERED;
3405 case UNLT:
3406 return GE;
3407 case UNLE:
3408 return GT;
3409 case UNGT:
3410 return LE;
3411 case UNGE:
3412 return LT;
3413 case UNEQ:
3414 return LTGT;
3415
3416 default:
3417 abort ();
3418 }
3419 }
3420
3421 /* Similar, but return the code when two operands of a comparison are swapped.
3422 This IS safe for IEEE floating-point. */
3423
3424 enum rtx_code
3425 swap_condition (code)
3426 enum rtx_code code;
3427 {
3428 switch (code)
3429 {
3430 case EQ:
3431 case NE:
3432 case UNORDERED:
3433 case ORDERED:
3434 case UNEQ:
3435 case LTGT:
3436 return code;
3437
3438 case GT:
3439 return LT;
3440 case GE:
3441 return LE;
3442 case LT:
3443 return GT;
3444 case LE:
3445 return GE;
3446 case GTU:
3447 return LTU;
3448 case GEU:
3449 return LEU;
3450 case LTU:
3451 return GTU;
3452 case LEU:
3453 return GEU;
3454 case UNLT:
3455 return UNGT;
3456 case UNLE:
3457 return UNGE;
3458 case UNGT:
3459 return UNLT;
3460 case UNGE:
3461 return UNLE;
3462
3463 default:
3464 abort ();
3465 }
3466 }
3467
3468 /* Given a comparison CODE, return the corresponding unsigned comparison.
3469 If CODE is an equality comparison or already an unsigned comparison,
3470 CODE is returned. */
3471
3472 enum rtx_code
3473 unsigned_condition (code)
3474 enum rtx_code code;
3475 {
3476 switch (code)
3477 {
3478 case EQ:
3479 case NE:
3480 case GTU:
3481 case GEU:
3482 case LTU:
3483 case LEU:
3484 return code;
3485
3486 case GT:
3487 return GTU;
3488 case GE:
3489 return GEU;
3490 case LT:
3491 return LTU;
3492 case LE:
3493 return LEU;
3494
3495 default:
3496 abort ();
3497 }
3498 }
3499
3500 /* Similarly, return the signed version of a comparison. */
3501
3502 enum rtx_code
3503 signed_condition (code)
3504 enum rtx_code code;
3505 {
3506 switch (code)
3507 {
3508 case EQ:
3509 case NE:
3510 case GT:
3511 case GE:
3512 case LT:
3513 case LE:
3514 return code;
3515
3516 case GTU:
3517 return GT;
3518 case GEU:
3519 return GE;
3520 case LTU:
3521 return LT;
3522 case LEU:
3523 return LE;
3524
3525 default:
3526 abort ();
3527 }
3528 }
3529 \f
3530 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3531 truth of CODE1 implies the truth of CODE2. */
3532
3533 int
3534 comparison_dominates_p (code1, code2)
3535 enum rtx_code code1, code2;
3536 {
3537 if (code1 == code2)
3538 return 1;
3539
3540 switch (code1)
3541 {
3542 case EQ:
3543 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
3544 || code2 == ORDERED)
3545 return 1;
3546 break;
3547
3548 case LT:
3549 if (code2 == LE || code2 == NE || code2 == ORDERED)
3550 return 1;
3551 break;
3552
3553 case GT:
3554 if (code2 == GE || code2 == NE || code2 == ORDERED)
3555 return 1;
3556 break;
3557
3558 case GE:
3559 case LE:
3560 if (code2 == ORDERED)
3561 return 1;
3562 break;
3563
3564 case LTGT:
3565 if (code2 == NE || code2 == ORDERED)
3566 return 1;
3567 break;
3568
3569 case LTU:
3570 if (code2 == LEU || code2 == NE)
3571 return 1;
3572 break;
3573
3574 case GTU:
3575 if (code2 == GEU || code2 == NE)
3576 return 1;
3577 break;
3578
3579 case UNORDERED:
3580 if (code2 == NE)
3581 return 1;
3582 break;
3583
3584 default:
3585 break;
3586 }
3587
3588 return 0;
3589 }
3590 \f
3591 /* Return 1 if INSN is an unconditional jump and nothing else. */
3592
3593 int
3594 simplejump_p (insn)
3595 rtx insn;
3596 {
3597 return (GET_CODE (insn) == JUMP_INSN
3598 && GET_CODE (PATTERN (insn)) == SET
3599 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3600 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3601 }
3602
3603 /* Return nonzero if INSN is a (possibly) conditional jump
3604 and nothing more. */
3605
3606 int
3607 condjump_p (insn)
3608 rtx insn;
3609 {
3610 register rtx x = PATTERN (insn);
3611
3612 if (GET_CODE (x) != SET
3613 || GET_CODE (SET_DEST (x)) != PC)
3614 return 0;
3615
3616 x = SET_SRC (x);
3617 if (GET_CODE (x) == LABEL_REF)
3618 return 1;
3619 else return (GET_CODE (x) == IF_THEN_ELSE
3620 && ((GET_CODE (XEXP (x, 2)) == PC
3621 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
3622 || GET_CODE (XEXP (x, 1)) == RETURN))
3623 || (GET_CODE (XEXP (x, 1)) == PC
3624 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
3625 || GET_CODE (XEXP (x, 2)) == RETURN))));
3626
3627 return 0;
3628 }
3629
3630 /* Return nonzero if INSN is a (possibly) conditional jump inside a
3631 PARALLEL. */
3632
3633 int
3634 condjump_in_parallel_p (insn)
3635 rtx insn;
3636 {
3637 register rtx x = PATTERN (insn);
3638
3639 if (GET_CODE (x) != PARALLEL)
3640 return 0;
3641 else
3642 x = XVECEXP (x, 0, 0);
3643
3644 if (GET_CODE (x) != SET)
3645 return 0;
3646 if (GET_CODE (SET_DEST (x)) != PC)
3647 return 0;
3648 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3649 return 1;
3650 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3651 return 0;
3652 if (XEXP (SET_SRC (x), 2) == pc_rtx
3653 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3654 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3655 return 1;
3656 if (XEXP (SET_SRC (x), 1) == pc_rtx
3657 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3658 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3659 return 1;
3660 return 0;
3661 }
3662
3663 /* Return the label of a conditional jump. */
3664
3665 rtx
3666 condjump_label (insn)
3667 rtx insn;
3668 {
3669 register rtx x = PATTERN (insn);
3670
3671 if (GET_CODE (x) == PARALLEL)
3672 x = XVECEXP (x, 0, 0);
3673 if (GET_CODE (x) != SET)
3674 return NULL_RTX;
3675 if (GET_CODE (SET_DEST (x)) != PC)
3676 return NULL_RTX;
3677 x = SET_SRC (x);
3678 if (GET_CODE (x) == LABEL_REF)
3679 return x;
3680 if (GET_CODE (x) != IF_THEN_ELSE)
3681 return NULL_RTX;
3682 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3683 return XEXP (x, 1);
3684 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3685 return XEXP (x, 2);
3686 return NULL_RTX;
3687 }
3688
3689 /* Return true if INSN is a (possibly conditional) return insn. */
3690
3691 static int
3692 returnjump_p_1 (loc, data)
3693 rtx *loc;
3694 void *data ATTRIBUTE_UNUSED;
3695 {
3696 rtx x = *loc;
3697 return x && GET_CODE (x) == RETURN;
3698 }
3699
3700 int
3701 returnjump_p (insn)
3702 rtx insn;
3703 {
3704 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3705 }
3706
3707 /* Return true if INSN is a jump that only transfers control and
3708 nothing more. */
3709
3710 int
3711 onlyjump_p (insn)
3712 rtx insn;
3713 {
3714 rtx set;
3715
3716 if (GET_CODE (insn) != JUMP_INSN)
3717 return 0;
3718
3719 set = single_set (insn);
3720 if (set == NULL)
3721 return 0;
3722 if (GET_CODE (SET_DEST (set)) != PC)
3723 return 0;
3724 if (side_effects_p (SET_SRC (set)))
3725 return 0;
3726
3727 return 1;
3728 }
3729
3730 #ifdef HAVE_cc0
3731
3732 /* Return 1 if X is an RTX that does nothing but set the condition codes
3733 and CLOBBER or USE registers.
3734 Return -1 if X does explicitly set the condition codes,
3735 but also does other things. */
3736
3737 int
3738 sets_cc0_p (x)
3739 rtx x ATTRIBUTE_UNUSED;
3740 {
3741 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3742 return 1;
3743 if (GET_CODE (x) == PARALLEL)
3744 {
3745 int i;
3746 int sets_cc0 = 0;
3747 int other_things = 0;
3748 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3749 {
3750 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3751 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3752 sets_cc0 = 1;
3753 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3754 other_things = 1;
3755 }
3756 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3757 }
3758 return 0;
3759 }
3760 #endif
3761 \f
3762 /* Follow any unconditional jump at LABEL;
3763 return the ultimate label reached by any such chain of jumps.
3764 If LABEL is not followed by a jump, return LABEL.
3765 If the chain loops or we can't find end, return LABEL,
3766 since that tells caller to avoid changing the insn.
3767
3768 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3769 a USE or CLOBBER. */
3770
3771 rtx
3772 follow_jumps (label)
3773 rtx label;
3774 {
3775 register rtx insn;
3776 register rtx next;
3777 register rtx value = label;
3778 register int depth;
3779
3780 for (depth = 0;
3781 (depth < 10
3782 && (insn = next_active_insn (value)) != 0
3783 && GET_CODE (insn) == JUMP_INSN
3784 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3785 || GET_CODE (PATTERN (insn)) == RETURN)
3786 && (next = NEXT_INSN (insn))
3787 && GET_CODE (next) == BARRIER);
3788 depth++)
3789 {
3790 /* Don't chain through the insn that jumps into a loop
3791 from outside the loop,
3792 since that would create multiple loop entry jumps
3793 and prevent loop optimization. */
3794 rtx tem;
3795 if (!reload_completed)
3796 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3797 if (GET_CODE (tem) == NOTE
3798 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3799 /* ??? Optional. Disables some optimizations, but makes
3800 gcov output more accurate with -O. */
3801 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3802 return value;
3803
3804 /* If we have found a cycle, make the insn jump to itself. */
3805 if (JUMP_LABEL (insn) == label)
3806 return label;
3807
3808 tem = next_active_insn (JUMP_LABEL (insn));
3809 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3810 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3811 break;
3812
3813 value = JUMP_LABEL (insn);
3814 }
3815 if (depth == 10)
3816 return label;
3817 return value;
3818 }
3819
3820 /* Assuming that field IDX of X is a vector of label_refs,
3821 replace each of them by the ultimate label reached by it.
3822 Return nonzero if a change is made.
3823 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3824
3825 static int
3826 tension_vector_labels (x, idx)
3827 register rtx x;
3828 register int idx;
3829 {
3830 int changed = 0;
3831 register int i;
3832 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3833 {
3834 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3835 register rtx nlabel = follow_jumps (olabel);
3836 if (nlabel && nlabel != olabel)
3837 {
3838 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3839 ++LABEL_NUSES (nlabel);
3840 if (--LABEL_NUSES (olabel) == 0)
3841 delete_insn (olabel);
3842 changed = 1;
3843 }
3844 }
3845 return changed;
3846 }
3847 \f
3848 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3849 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3850 in INSN, then store one of them in JUMP_LABEL (INSN).
3851 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3852 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3853 Also, when there are consecutive labels, canonicalize on the last of them.
3854
3855 Note that two labels separated by a loop-beginning note
3856 must be kept distinct if we have not yet done loop-optimization,
3857 because the gap between them is where loop-optimize
3858 will want to move invariant code to. CROSS_JUMP tells us
3859 that loop-optimization is done with.
3860
3861 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3862 two labels distinct if they are separated by only USE or CLOBBER insns. */
3863
3864 static void
3865 mark_jump_label (x, insn, cross_jump)
3866 register rtx x;
3867 rtx insn;
3868 int cross_jump;
3869 {
3870 register RTX_CODE code = GET_CODE (x);
3871 register int i;
3872 register const char *fmt;
3873
3874 switch (code)
3875 {
3876 case PC:
3877 case CC0:
3878 case REG:
3879 case SUBREG:
3880 case CONST_INT:
3881 case SYMBOL_REF:
3882 case CONST_DOUBLE:
3883 case CLOBBER:
3884 case CALL:
3885 return;
3886
3887 case MEM:
3888 /* If this is a constant-pool reference, see if it is a label. */
3889 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3890 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3891 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3892 break;
3893
3894 case LABEL_REF:
3895 {
3896 rtx label = XEXP (x, 0);
3897 rtx olabel = label;
3898 rtx note;
3899 rtx next;
3900
3901 if (GET_CODE (label) != CODE_LABEL)
3902 abort ();
3903
3904 /* Ignore references to labels of containing functions. */
3905 if (LABEL_REF_NONLOCAL_P (x))
3906 break;
3907
3908 /* If there are other labels following this one,
3909 replace it with the last of the consecutive labels. */
3910 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3911 {
3912 if (GET_CODE (next) == CODE_LABEL)
3913 label = next;
3914 else if (cross_jump && GET_CODE (next) == INSN
3915 && (GET_CODE (PATTERN (next)) == USE
3916 || GET_CODE (PATTERN (next)) == CLOBBER))
3917 continue;
3918 else if (GET_CODE (next) != NOTE)
3919 break;
3920 else if (! cross_jump
3921 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3922 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3923 /* ??? Optional. Disables some optimizations, but
3924 makes gcov output more accurate with -O. */
3925 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3926 break;
3927 }
3928
3929 XEXP (x, 0) = label;
3930 if (! insn || ! INSN_DELETED_P (insn))
3931 ++LABEL_NUSES (label);
3932
3933 if (insn)
3934 {
3935 if (GET_CODE (insn) == JUMP_INSN)
3936 JUMP_LABEL (insn) = label;
3937
3938 /* If we've changed OLABEL and we had a REG_LABEL note
3939 for it, update it as well. */
3940 else if (label != olabel
3941 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3942 XEXP (note, 0) = label;
3943
3944 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3945 is one. */
3946 else if (! find_reg_note (insn, REG_LABEL, label))
3947 {
3948 /* This code used to ignore labels which refered to dispatch
3949 tables to avoid flow.c generating worse code.
3950
3951 However, in the presense of global optimizations like
3952 gcse which call find_basic_blocks without calling
3953 life_analysis, not recording such labels will lead
3954 to compiler aborts because of inconsistencies in the
3955 flow graph. So we go ahead and record the label.
3956
3957 It may also be the case that the optimization argument
3958 is no longer valid because of the more accurate cfg
3959 we build in find_basic_blocks -- it no longer pessimizes
3960 code when it finds a REG_LABEL note. */
3961 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3962 REG_NOTES (insn));
3963 }
3964 }
3965 return;
3966 }
3967
3968 /* Do walk the labels in a vector, but not the first operand of an
3969 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3970 case ADDR_VEC:
3971 case ADDR_DIFF_VEC:
3972 if (! INSN_DELETED_P (insn))
3973 {
3974 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3975
3976 for (i = 0; i < XVECLEN (x, eltnum); i++)
3977 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3978 }
3979 return;
3980
3981 default:
3982 break;
3983 }
3984
3985 fmt = GET_RTX_FORMAT (code);
3986 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3987 {
3988 if (fmt[i] == 'e')
3989 mark_jump_label (XEXP (x, i), insn, cross_jump);
3990 else if (fmt[i] == 'E')
3991 {
3992 register int j;
3993 for (j = 0; j < XVECLEN (x, i); j++)
3994 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3995 }
3996 }
3997 }
3998
3999 /* If all INSN does is set the pc, delete it,
4000 and delete the insn that set the condition codes for it
4001 if that's what the previous thing was. */
4002
4003 void
4004 delete_jump (insn)
4005 rtx insn;
4006 {
4007 register rtx set = single_set (insn);
4008
4009 if (set && GET_CODE (SET_DEST (set)) == PC)
4010 delete_computation (insn);
4011 }
4012
4013 /* Verify INSN is a BARRIER and delete it. */
4014
4015 void
4016 delete_barrier (insn)
4017 rtx insn;
4018 {
4019 if (GET_CODE (insn) != BARRIER)
4020 abort ();
4021
4022 delete_insn (insn);
4023 }
4024
4025 /* Recursively delete prior insns that compute the value (used only by INSN
4026 which the caller is deleting) stored in the register mentioned by NOTE
4027 which is a REG_DEAD note associated with INSN. */
4028
4029 static void
4030 delete_prior_computation (note, insn)
4031 rtx note;
4032 rtx insn;
4033 {
4034 rtx our_prev;
4035 rtx reg = XEXP (note, 0);
4036
4037 for (our_prev = prev_nonnote_insn (insn);
4038 our_prev && (GET_CODE (our_prev) == INSN
4039 || GET_CODE (our_prev) == CALL_INSN);
4040 our_prev = prev_nonnote_insn (our_prev))
4041 {
4042 rtx pat = PATTERN (our_prev);
4043
4044 /* If we reach a CALL which is not calling a const function
4045 or the callee pops the arguments, then give up. */
4046 if (GET_CODE (our_prev) == CALL_INSN
4047 && (! CONST_CALL_P (our_prev)
4048 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
4049 break;
4050
4051 /* If we reach a SEQUENCE, it is too complex to try to
4052 do anything with it, so give up. */
4053 if (GET_CODE (pat) == SEQUENCE)
4054 break;
4055
4056 if (GET_CODE (pat) == USE
4057 && GET_CODE (XEXP (pat, 0)) == INSN)
4058 /* reorg creates USEs that look like this. We leave them
4059 alone because reorg needs them for its own purposes. */
4060 break;
4061
4062 if (reg_set_p (reg, pat))
4063 {
4064 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
4065 break;
4066
4067 if (GET_CODE (pat) == PARALLEL)
4068 {
4069 /* If we find a SET of something else, we can't
4070 delete the insn. */
4071
4072 int i;
4073
4074 for (i = 0; i < XVECLEN (pat, 0); i++)
4075 {
4076 rtx part = XVECEXP (pat, 0, i);
4077
4078 if (GET_CODE (part) == SET
4079 && SET_DEST (part) != reg)
4080 break;
4081 }
4082
4083 if (i == XVECLEN (pat, 0))
4084 delete_computation (our_prev);
4085 }
4086 else if (GET_CODE (pat) == SET
4087 && GET_CODE (SET_DEST (pat)) == REG)
4088 {
4089 int dest_regno = REGNO (SET_DEST (pat));
4090 int dest_endregno
4091 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4092 ? HARD_REGNO_NREGS (dest_regno,
4093 GET_MODE (SET_DEST (pat))) : 1);
4094 int regno = REGNO (reg);
4095 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
4096 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
4097
4098 if (dest_regno >= regno
4099 && dest_endregno <= endregno)
4100 delete_computation (our_prev);
4101
4102 /* We may have a multi-word hard register and some, but not
4103 all, of the words of the register are needed in subsequent
4104 insns. Write REG_UNUSED notes for those parts that were not
4105 needed. */
4106 else if (dest_regno <= regno
4107 && dest_endregno >= endregno)
4108 {
4109 int i;
4110
4111 REG_NOTES (our_prev)
4112 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
4113
4114 for (i = dest_regno; i < dest_endregno; i++)
4115 if (! find_regno_note (our_prev, REG_UNUSED, i))
4116 break;
4117
4118 if (i == dest_endregno)
4119 delete_computation (our_prev);
4120 }
4121 }
4122
4123 break;
4124 }
4125
4126 /* If PAT references the register that dies here, it is an
4127 additional use. Hence any prior SET isn't dead. However, this
4128 insn becomes the new place for the REG_DEAD note. */
4129 if (reg_overlap_mentioned_p (reg, pat))
4130 {
4131 XEXP (note, 1) = REG_NOTES (our_prev);
4132 REG_NOTES (our_prev) = note;
4133 break;
4134 }
4135 }
4136 }
4137
4138 /* Delete INSN and recursively delete insns that compute values used only
4139 by INSN. This uses the REG_DEAD notes computed during flow analysis.
4140 If we are running before flow.c, we need do nothing since flow.c will
4141 delete dead code. We also can't know if the registers being used are
4142 dead or not at this point.
4143
4144 Otherwise, look at all our REG_DEAD notes. If a previous insn does
4145 nothing other than set a register that dies in this insn, we can delete
4146 that insn as well.
4147
4148 On machines with CC0, if CC0 is used in this insn, we may be able to
4149 delete the insn that set it. */
4150
4151 static void
4152 delete_computation (insn)
4153 rtx insn;
4154 {
4155 rtx note, next;
4156 rtx set;
4157
4158 #ifdef HAVE_cc0
4159 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
4160 {
4161 rtx prev = prev_nonnote_insn (insn);
4162 /* We assume that at this stage
4163 CC's are always set explicitly
4164 and always immediately before the jump that
4165 will use them. So if the previous insn
4166 exists to set the CC's, delete it
4167 (unless it performs auto-increments, etc.). */
4168 if (prev && GET_CODE (prev) == INSN
4169 && sets_cc0_p (PATTERN (prev)))
4170 {
4171 if (sets_cc0_p (PATTERN (prev)) > 0
4172 && ! side_effects_p (PATTERN (prev)))
4173 delete_computation (prev);
4174 else
4175 /* Otherwise, show that cc0 won't be used. */
4176 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
4177 cc0_rtx, REG_NOTES (prev));
4178 }
4179 }
4180 #endif
4181
4182 #ifdef INSN_SCHEDULING
4183 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4184 reload has completed. The schedulers need to be fixed. Until
4185 they are, we must not rely on the death notes here. */
4186 if (reload_completed && flag_schedule_insns_after_reload)
4187 {
4188 delete_insn (insn);
4189 return;
4190 }
4191 #endif
4192
4193 /* The REG_DEAD note may have been omitted for a register
4194 which is both set and used by the insn. */
4195 set = single_set (insn);
4196 if (set && GET_CODE (SET_DEST (set)) == REG)
4197 {
4198 int dest_regno = REGNO (SET_DEST (set));
4199 int dest_endregno
4200 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4201 ? HARD_REGNO_NREGS (dest_regno,
4202 GET_MODE (SET_DEST (set))) : 1);
4203 int i;
4204
4205 for (i = dest_regno; i < dest_endregno; i++)
4206 {
4207 if (! refers_to_regno_p (i, i + 1, SET_SRC (set), NULL_PTR)
4208 || find_regno_note (insn, REG_DEAD, i))
4209 continue;
4210
4211 note = gen_rtx_EXPR_LIST (REG_DEAD, (i < FIRST_PSEUDO_REGISTER
4212 ? gen_rtx_REG (reg_raw_mode[i], i)
4213 : SET_DEST (set)), NULL_RTX);
4214 delete_prior_computation (note, insn);
4215 }
4216 }
4217
4218 for (note = REG_NOTES (insn); note; note = next)
4219 {
4220 next = XEXP (note, 1);
4221
4222 if (REG_NOTE_KIND (note) != REG_DEAD
4223 /* Verify that the REG_NOTE is legitimate. */
4224 || GET_CODE (XEXP (note, 0)) != REG)
4225 continue;
4226
4227 delete_prior_computation (note, insn);
4228 }
4229
4230 delete_insn (insn);
4231 }
4232 \f
4233 /* Delete insn INSN from the chain of insns and update label ref counts.
4234 May delete some following insns as a consequence; may even delete
4235 a label elsewhere and insns that follow it.
4236
4237 Returns the first insn after INSN that was not deleted. */
4238
4239 rtx
4240 delete_insn (insn)
4241 register rtx insn;
4242 {
4243 register rtx next = NEXT_INSN (insn);
4244 register rtx prev = PREV_INSN (insn);
4245 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4246 register int dont_really_delete = 0;
4247
4248 while (next && INSN_DELETED_P (next))
4249 next = NEXT_INSN (next);
4250
4251 /* This insn is already deleted => return first following nondeleted. */
4252 if (INSN_DELETED_P (insn))
4253 return next;
4254
4255 if (was_code_label)
4256 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4257
4258 /* Don't delete user-declared labels. Convert them to special NOTEs
4259 instead. */
4260 if (was_code_label && LABEL_NAME (insn) != 0
4261 && optimize && ! dont_really_delete)
4262 {
4263 PUT_CODE (insn, NOTE);
4264 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4265 NOTE_SOURCE_FILE (insn) = 0;
4266 dont_really_delete = 1;
4267 }
4268 else
4269 /* Mark this insn as deleted. */
4270 INSN_DELETED_P (insn) = 1;
4271
4272 /* If this is an unconditional jump, delete it from the jump chain. */
4273 if (simplejump_p (insn))
4274 delete_from_jump_chain (insn);
4275
4276 /* If instruction is followed by a barrier,
4277 delete the barrier too. */
4278
4279 if (next != 0 && GET_CODE (next) == BARRIER)
4280 {
4281 INSN_DELETED_P (next) = 1;
4282 next = NEXT_INSN (next);
4283 }
4284
4285 /* Patch out INSN (and the barrier if any) */
4286
4287 if (! dont_really_delete)
4288 {
4289 if (prev)
4290 {
4291 NEXT_INSN (prev) = next;
4292 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4293 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4294 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4295 }
4296
4297 if (next)
4298 {
4299 PREV_INSN (next) = prev;
4300 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4301 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4302 }
4303
4304 if (prev && NEXT_INSN (prev) == 0)
4305 set_last_insn (prev);
4306 }
4307
4308 /* If deleting a jump, decrement the count of the label,
4309 and delete the label if it is now unused. */
4310
4311 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4312 {
4313 rtx lab = JUMP_LABEL (insn), lab_next;
4314
4315 if (--LABEL_NUSES (lab) == 0)
4316 {
4317 /* This can delete NEXT or PREV,
4318 either directly if NEXT is JUMP_LABEL (INSN),
4319 or indirectly through more levels of jumps. */
4320 delete_insn (lab);
4321
4322 /* I feel a little doubtful about this loop,
4323 but I see no clean and sure alternative way
4324 to find the first insn after INSN that is not now deleted.
4325 I hope this works. */
4326 while (next && INSN_DELETED_P (next))
4327 next = NEXT_INSN (next);
4328 return next;
4329 }
4330 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4331 && GET_CODE (lab_next) == JUMP_INSN
4332 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4333 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4334 {
4335 /* If we're deleting the tablejump, delete the dispatch table.
4336 We may not be able to kill the label immediately preceeding
4337 just yet, as it might be referenced in code leading up to
4338 the tablejump. */
4339 delete_insn (lab_next);
4340 }
4341 }
4342
4343 /* Likewise if we're deleting a dispatch table. */
4344
4345 if (GET_CODE (insn) == JUMP_INSN
4346 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4347 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4348 {
4349 rtx pat = PATTERN (insn);
4350 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4351 int len = XVECLEN (pat, diff_vec_p);
4352
4353 for (i = 0; i < len; i++)
4354 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4355 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4356 while (next && INSN_DELETED_P (next))
4357 next = NEXT_INSN (next);
4358 return next;
4359 }
4360
4361 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4362 prev = PREV_INSN (prev);
4363
4364 /* If INSN was a label and a dispatch table follows it,
4365 delete the dispatch table. The tablejump must have gone already.
4366 It isn't useful to fall through into a table. */
4367
4368 if (was_code_label
4369 && NEXT_INSN (insn) != 0
4370 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4371 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4372 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4373 next = delete_insn (NEXT_INSN (insn));
4374
4375 /* If INSN was a label, delete insns following it if now unreachable. */
4376
4377 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4378 {
4379 register RTX_CODE code;
4380 while (next != 0
4381 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4382 || code == NOTE || code == BARRIER
4383 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4384 {
4385 if (code == NOTE
4386 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4387 next = NEXT_INSN (next);
4388 /* Keep going past other deleted labels to delete what follows. */
4389 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4390 next = NEXT_INSN (next);
4391 else
4392 /* Note: if this deletes a jump, it can cause more
4393 deletion of unreachable code, after a different label.
4394 As long as the value from this recursive call is correct,
4395 this invocation functions correctly. */
4396 next = delete_insn (next);
4397 }
4398 }
4399
4400 return next;
4401 }
4402
4403 /* Advance from INSN till reaching something not deleted
4404 then return that. May return INSN itself. */
4405
4406 rtx
4407 next_nondeleted_insn (insn)
4408 rtx insn;
4409 {
4410 while (INSN_DELETED_P (insn))
4411 insn = NEXT_INSN (insn);
4412 return insn;
4413 }
4414 \f
4415 /* Delete a range of insns from FROM to TO, inclusive.
4416 This is for the sake of peephole optimization, so assume
4417 that whatever these insns do will still be done by a new
4418 peephole insn that will replace them. */
4419
4420 void
4421 delete_for_peephole (from, to)
4422 register rtx from, to;
4423 {
4424 register rtx insn = from;
4425
4426 while (1)
4427 {
4428 register rtx next = NEXT_INSN (insn);
4429 register rtx prev = PREV_INSN (insn);
4430
4431 if (GET_CODE (insn) != NOTE)
4432 {
4433 INSN_DELETED_P (insn) = 1;
4434
4435 /* Patch this insn out of the chain. */
4436 /* We don't do this all at once, because we
4437 must preserve all NOTEs. */
4438 if (prev)
4439 NEXT_INSN (prev) = next;
4440
4441 if (next)
4442 PREV_INSN (next) = prev;
4443 }
4444
4445 if (insn == to)
4446 break;
4447 insn = next;
4448 }
4449
4450 /* Note that if TO is an unconditional jump
4451 we *do not* delete the BARRIER that follows,
4452 since the peephole that replaces this sequence
4453 is also an unconditional jump in that case. */
4454 }
4455 \f
4456 /* We have determined that INSN is never reached, and are about to
4457 delete it. Print a warning if the user asked for one.
4458
4459 To try to make this warning more useful, this should only be called
4460 once per basic block not reached, and it only warns when the basic
4461 block contains more than one line from the current function, and
4462 contains at least one operation. CSE and inlining can duplicate insns,
4463 so it's possible to get spurious warnings from this. */
4464
4465 void
4466 never_reached_warning (avoided_insn)
4467 rtx avoided_insn;
4468 {
4469 rtx insn;
4470 rtx a_line_note = NULL;
4471 int two_avoided_lines = 0;
4472 int contains_insn = 0;
4473
4474 if (! warn_notreached)
4475 return;
4476
4477 /* Scan forwards, looking at LINE_NUMBER notes, until
4478 we hit a LABEL or we run out of insns. */
4479
4480 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
4481 {
4482 if (GET_CODE (insn) == CODE_LABEL)
4483 break;
4484 else if (GET_CODE (insn) == NOTE /* A line number note? */
4485 && NOTE_LINE_NUMBER (insn) >= 0)
4486 {
4487 if (a_line_note == NULL)
4488 a_line_note = insn;
4489 else
4490 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
4491 != NOTE_LINE_NUMBER (insn));
4492 }
4493 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4494 contains_insn = 1;
4495 }
4496 if (two_avoided_lines && contains_insn)
4497 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
4498 NOTE_LINE_NUMBER (a_line_note),
4499 "will never be executed");
4500 }
4501 \f
4502 /* Invert the condition of the jump JUMP, and make it jump
4503 to label NLABEL instead of where it jumps now. */
4504
4505 int
4506 invert_jump (jump, nlabel)
4507 rtx jump, nlabel;
4508 {
4509 /* We have to either invert the condition and change the label or
4510 do neither. Either operation could fail. We first try to invert
4511 the jump. If that succeeds, we try changing the label. If that fails,
4512 we invert the jump back to what it was. */
4513
4514 if (! invert_exp (PATTERN (jump), jump))
4515 return 0;
4516
4517 if (redirect_jump (jump, nlabel))
4518 {
4519 if (flag_branch_probabilities)
4520 {
4521 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4522
4523 /* An inverted jump means that a probability taken becomes a
4524 probability not taken. Subtract the branch probability from the
4525 probability base to convert it back to a taken probability.
4526 (We don't flip the probability on a branch that's never taken. */
4527 if (note && XINT (XEXP (note, 0), 0) >= 0)
4528 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4529 }
4530
4531 return 1;
4532 }
4533
4534 if (! invert_exp (PATTERN (jump), jump))
4535 /* This should just be putting it back the way it was. */
4536 abort ();
4537
4538 return 0;
4539 }
4540
4541 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4542
4543 Return 1 if we can do so, 0 if we cannot find a way to do so that
4544 matches a pattern. */
4545
4546 int
4547 invert_exp (x, insn)
4548 rtx x;
4549 rtx insn;
4550 {
4551 register RTX_CODE code;
4552 register int i;
4553 register const char *fmt;
4554
4555 code = GET_CODE (x);
4556
4557 if (code == IF_THEN_ELSE)
4558 {
4559 register rtx comp = XEXP (x, 0);
4560 register rtx tem;
4561
4562 /* We can do this in two ways: The preferable way, which can only
4563 be done if this is not an integer comparison, is to reverse
4564 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4565 of the IF_THEN_ELSE. If we can't do either, fail. */
4566
4567 if (can_reverse_comparison_p (comp, insn)
4568 && validate_change (insn, &XEXP (x, 0),
4569 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4570 GET_MODE (comp), XEXP (comp, 0),
4571 XEXP (comp, 1)), 0))
4572 return 1;
4573
4574 tem = XEXP (x, 1);
4575 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4576 validate_change (insn, &XEXP (x, 2), tem, 1);
4577 return apply_change_group ();
4578 }
4579
4580 fmt = GET_RTX_FORMAT (code);
4581 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4582 {
4583 if (fmt[i] == 'e')
4584 {
4585 if (! invert_exp (XEXP (x, i), insn))
4586 return 0;
4587 }
4588 else if (fmt[i] == 'E')
4589 {
4590 register int j;
4591 for (j = 0; j < XVECLEN (x, i); j++)
4592 if (!invert_exp (XVECEXP (x, i, j), insn))
4593 return 0;
4594 }
4595 }
4596
4597 return 1;
4598 }
4599 \f
4600 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4601 If the old jump target label is unused as a result,
4602 it and the code following it may be deleted.
4603
4604 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4605 RETURN insn.
4606
4607 The return value will be 1 if the change was made, 0 if it wasn't (this
4608 can only occur for NLABEL == 0). */
4609
4610 int
4611 redirect_jump (jump, nlabel)
4612 rtx jump, nlabel;
4613 {
4614 register rtx olabel = JUMP_LABEL (jump);
4615
4616 if (nlabel == olabel)
4617 return 1;
4618
4619 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4620 return 0;
4621
4622 /* If this is an unconditional branch, delete it from the jump_chain of
4623 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4624 have UID's in range and JUMP_CHAIN is valid). */
4625 if (jump_chain && (simplejump_p (jump)
4626 || GET_CODE (PATTERN (jump)) == RETURN))
4627 {
4628 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4629
4630 delete_from_jump_chain (jump);
4631 if (label_index < max_jump_chain
4632 && INSN_UID (jump) < max_jump_chain)
4633 {
4634 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4635 jump_chain[label_index] = jump;
4636 }
4637 }
4638
4639 JUMP_LABEL (jump) = nlabel;
4640 if (nlabel)
4641 ++LABEL_NUSES (nlabel);
4642
4643 /* If we're eliding the jump over exception cleanups at the end of a
4644 function, move the function end note so that -Wreturn-type works. */
4645 if (olabel && NEXT_INSN (olabel)
4646 && GET_CODE (NEXT_INSN (olabel)) == NOTE
4647 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
4648 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
4649
4650 if (olabel && --LABEL_NUSES (olabel) == 0)
4651 delete_insn (olabel);
4652
4653 return 1;
4654 }
4655
4656 /* Delete the instruction JUMP from any jump chain it might be on. */
4657
4658 static void
4659 delete_from_jump_chain (jump)
4660 rtx jump;
4661 {
4662 int index;
4663 rtx olabel = JUMP_LABEL (jump);
4664
4665 /* Handle unconditional jumps. */
4666 if (jump_chain && olabel != 0
4667 && INSN_UID (olabel) < max_jump_chain
4668 && simplejump_p (jump))
4669 index = INSN_UID (olabel);
4670 /* Handle return insns. */
4671 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4672 index = 0;
4673 else return;
4674
4675 if (jump_chain[index] == jump)
4676 jump_chain[index] = jump_chain[INSN_UID (jump)];
4677 else
4678 {
4679 rtx insn;
4680
4681 for (insn = jump_chain[index];
4682 insn != 0;
4683 insn = jump_chain[INSN_UID (insn)])
4684 if (jump_chain[INSN_UID (insn)] == jump)
4685 {
4686 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4687 break;
4688 }
4689 }
4690 }
4691
4692 /* If NLABEL is nonzero, throughout the rtx at LOC,
4693 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4694 zero, alter (RETURN) to (LABEL_REF NLABEL).
4695
4696 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4697 validity with validate_change. Convert (set (pc) (label_ref olabel))
4698 to (return).
4699
4700 Return 0 if we found a change we would like to make but it is invalid.
4701 Otherwise, return 1. */
4702
4703 int
4704 redirect_exp (loc, olabel, nlabel, insn)
4705 rtx *loc;
4706 rtx olabel, nlabel;
4707 rtx insn;
4708 {
4709 register rtx x = *loc;
4710 register RTX_CODE code = GET_CODE (x);
4711 register int i;
4712 register const char *fmt;
4713
4714 if (code == LABEL_REF)
4715 {
4716 if (XEXP (x, 0) == olabel)
4717 {
4718 if (nlabel)
4719 XEXP (x, 0) = nlabel;
4720 else
4721 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4722 return 1;
4723 }
4724 }
4725 else if (code == RETURN && olabel == 0)
4726 {
4727 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4728 if (loc == &PATTERN (insn))
4729 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4730 return validate_change (insn, loc, x, 0);
4731 }
4732
4733 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4734 && GET_CODE (SET_SRC (x)) == LABEL_REF
4735 && XEXP (SET_SRC (x), 0) == olabel)
4736 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4737
4738 fmt = GET_RTX_FORMAT (code);
4739 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4740 {
4741 if (fmt[i] == 'e')
4742 {
4743 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4744 return 0;
4745 }
4746 else if (fmt[i] == 'E')
4747 {
4748 register int j;
4749 for (j = 0; j < XVECLEN (x, i); j++)
4750 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4751 return 0;
4752 }
4753 }
4754
4755 return 1;
4756 }
4757 \f
4758 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4759
4760 If the old jump target label (before the dispatch table) becomes unused,
4761 it and the dispatch table may be deleted. In that case, find the insn
4762 before the jump references that label and delete it and logical successors
4763 too. */
4764
4765 static void
4766 redirect_tablejump (jump, nlabel)
4767 rtx jump, nlabel;
4768 {
4769 register rtx olabel = JUMP_LABEL (jump);
4770
4771 /* Add this jump to the jump_chain of NLABEL. */
4772 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4773 && INSN_UID (jump) < max_jump_chain)
4774 {
4775 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4776 jump_chain[INSN_UID (nlabel)] = jump;
4777 }
4778
4779 PATTERN (jump) = gen_jump (nlabel);
4780 JUMP_LABEL (jump) = nlabel;
4781 ++LABEL_NUSES (nlabel);
4782 INSN_CODE (jump) = -1;
4783
4784 if (--LABEL_NUSES (olabel) == 0)
4785 {
4786 delete_labelref_insn (jump, olabel, 0);
4787 delete_insn (olabel);
4788 }
4789 }
4790
4791 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4792 If we found one, delete it and then delete this insn if DELETE_THIS is
4793 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4794
4795 static int
4796 delete_labelref_insn (insn, label, delete_this)
4797 rtx insn, label;
4798 int delete_this;
4799 {
4800 int deleted = 0;
4801 rtx link;
4802
4803 if (GET_CODE (insn) != NOTE
4804 && reg_mentioned_p (label, PATTERN (insn)))
4805 {
4806 if (delete_this)
4807 {
4808 delete_insn (insn);
4809 deleted = 1;
4810 }
4811 else
4812 return 1;
4813 }
4814
4815 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4816 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4817 {
4818 if (delete_this)
4819 {
4820 delete_insn (insn);
4821 deleted = 1;
4822 }
4823 else
4824 return 1;
4825 }
4826
4827 return deleted;
4828 }
4829 \f
4830 /* Like rtx_equal_p except that it considers two REGs as equal
4831 if they renumber to the same value and considers two commutative
4832 operations to be the same if the order of the operands has been
4833 reversed.
4834
4835 ??? Addition is not commutative on the PA due to the weird implicit
4836 space register selection rules for memory addresses. Therefore, we
4837 don't consider a + b == b + a.
4838
4839 We could/should make this test a little tighter. Possibly only
4840 disabling it on the PA via some backend macro or only disabling this
4841 case when the PLUS is inside a MEM. */
4842
4843 int
4844 rtx_renumbered_equal_p (x, y)
4845 rtx x, y;
4846 {
4847 register int i;
4848 register RTX_CODE code = GET_CODE (x);
4849 register const char *fmt;
4850
4851 if (x == y)
4852 return 1;
4853
4854 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4855 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4856 && GET_CODE (SUBREG_REG (y)) == REG)))
4857 {
4858 int reg_x = -1, reg_y = -1;
4859 int word_x = 0, word_y = 0;
4860
4861 if (GET_MODE (x) != GET_MODE (y))
4862 return 0;
4863
4864 /* If we haven't done any renumbering, don't
4865 make any assumptions. */
4866 if (reg_renumber == 0)
4867 return rtx_equal_p (x, y);
4868
4869 if (code == SUBREG)
4870 {
4871 reg_x = REGNO (SUBREG_REG (x));
4872 word_x = SUBREG_WORD (x);
4873
4874 if (reg_renumber[reg_x] >= 0)
4875 {
4876 reg_x = reg_renumber[reg_x] + word_x;
4877 word_x = 0;
4878 }
4879 }
4880
4881 else
4882 {
4883 reg_x = REGNO (x);
4884 if (reg_renumber[reg_x] >= 0)
4885 reg_x = reg_renumber[reg_x];
4886 }
4887
4888 if (GET_CODE (y) == SUBREG)
4889 {
4890 reg_y = REGNO (SUBREG_REG (y));
4891 word_y = SUBREG_WORD (y);
4892
4893 if (reg_renumber[reg_y] >= 0)
4894 {
4895 reg_y = reg_renumber[reg_y];
4896 word_y = 0;
4897 }
4898 }
4899
4900 else
4901 {
4902 reg_y = REGNO (y);
4903 if (reg_renumber[reg_y] >= 0)
4904 reg_y = reg_renumber[reg_y];
4905 }
4906
4907 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4908 }
4909
4910 /* Now we have disposed of all the cases
4911 in which different rtx codes can match. */
4912 if (code != GET_CODE (y))
4913 return 0;
4914
4915 switch (code)
4916 {
4917 case PC:
4918 case CC0:
4919 case ADDR_VEC:
4920 case ADDR_DIFF_VEC:
4921 return 0;
4922
4923 case CONST_INT:
4924 return INTVAL (x) == INTVAL (y);
4925
4926 case LABEL_REF:
4927 /* We can't assume nonlocal labels have their following insns yet. */
4928 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4929 return XEXP (x, 0) == XEXP (y, 0);
4930
4931 /* Two label-refs are equivalent if they point at labels
4932 in the same position in the instruction stream. */
4933 return (next_real_insn (XEXP (x, 0))
4934 == next_real_insn (XEXP (y, 0)));
4935
4936 case SYMBOL_REF:
4937 return XSTR (x, 0) == XSTR (y, 0);
4938
4939 case CODE_LABEL:
4940 /* If we didn't match EQ equality above, they aren't the same. */
4941 return 0;
4942
4943 default:
4944 break;
4945 }
4946
4947 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4948
4949 if (GET_MODE (x) != GET_MODE (y))
4950 return 0;
4951
4952 /* For commutative operations, the RTX match if the operand match in any
4953 order. Also handle the simple binary and unary cases without a loop.
4954
4955 ??? Don't consider PLUS a commutative operator; see comments above. */
4956 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4957 && code != PLUS)
4958 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4959 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4960 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4961 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4962 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4963 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4964 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4965 else if (GET_RTX_CLASS (code) == '1')
4966 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4967
4968 /* Compare the elements. If any pair of corresponding elements
4969 fail to match, return 0 for the whole things. */
4970
4971 fmt = GET_RTX_FORMAT (code);
4972 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4973 {
4974 register int j;
4975 switch (fmt[i])
4976 {
4977 case 'w':
4978 if (XWINT (x, i) != XWINT (y, i))
4979 return 0;
4980 break;
4981
4982 case 'i':
4983 if (XINT (x, i) != XINT (y, i))
4984 return 0;
4985 break;
4986
4987 case 's':
4988 if (strcmp (XSTR (x, i), XSTR (y, i)))
4989 return 0;
4990 break;
4991
4992 case 'e':
4993 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4994 return 0;
4995 break;
4996
4997 case 'u':
4998 if (XEXP (x, i) != XEXP (y, i))
4999 return 0;
5000 /* fall through. */
5001 case '0':
5002 break;
5003
5004 case 'E':
5005 if (XVECLEN (x, i) != XVECLEN (y, i))
5006 return 0;
5007 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5008 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
5009 return 0;
5010 break;
5011
5012 default:
5013 abort ();
5014 }
5015 }
5016 return 1;
5017 }
5018 \f
5019 /* If X is a hard register or equivalent to one or a subregister of one,
5020 return the hard register number. If X is a pseudo register that was not
5021 assigned a hard register, return the pseudo register number. Otherwise,
5022 return -1. Any rtx is valid for X. */
5023
5024 int
5025 true_regnum (x)
5026 rtx x;
5027 {
5028 if (GET_CODE (x) == REG)
5029 {
5030 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
5031 return reg_renumber[REGNO (x)];
5032 return REGNO (x);
5033 }
5034 if (GET_CODE (x) == SUBREG)
5035 {
5036 int base = true_regnum (SUBREG_REG (x));
5037 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
5038 return SUBREG_WORD (x) + base;
5039 }
5040 return -1;
5041 }
5042 \f
5043 /* Optimize code of the form:
5044
5045 for (x = a[i]; x; ...)
5046 ...
5047 for (x = a[i]; x; ...)
5048 ...
5049 foo:
5050
5051 Loop optimize will change the above code into
5052
5053 if (x = a[i])
5054 for (;;)
5055 { ...; if (! (x = ...)) break; }
5056 if (x = a[i])
5057 for (;;)
5058 { ...; if (! (x = ...)) break; }
5059 foo:
5060
5061 In general, if the first test fails, the program can branch
5062 directly to `foo' and skip the second try which is doomed to fail.
5063 We run this after loop optimization and before flow analysis. */
5064
5065 /* When comparing the insn patterns, we track the fact that different
5066 pseudo-register numbers may have been used in each computation.
5067 The following array stores an equivalence -- same_regs[I] == J means
5068 that pseudo register I was used in the first set of tests in a context
5069 where J was used in the second set. We also count the number of such
5070 pending equivalences. If nonzero, the expressions really aren't the
5071 same. */
5072
5073 static int *same_regs;
5074
5075 static int num_same_regs;
5076
5077 /* Track any registers modified between the target of the first jump and
5078 the second jump. They never compare equal. */
5079
5080 static char *modified_regs;
5081
5082 /* Record if memory was modified. */
5083
5084 static int modified_mem;
5085
5086 /* Called via note_stores on each insn between the target of the first
5087 branch and the second branch. It marks any changed registers. */
5088
5089 static void
5090 mark_modified_reg (dest, x, data)
5091 rtx dest;
5092 rtx x ATTRIBUTE_UNUSED;
5093 void *data ATTRIBUTE_UNUSED;
5094 {
5095 int regno, i;
5096
5097 if (GET_CODE (dest) == SUBREG)
5098 dest = SUBREG_REG (dest);
5099
5100 if (GET_CODE (dest) == MEM)
5101 modified_mem = 1;
5102
5103 if (GET_CODE (dest) != REG)
5104 return;
5105
5106 regno = REGNO (dest);
5107 if (regno >= FIRST_PSEUDO_REGISTER)
5108 modified_regs[regno] = 1;
5109 else
5110 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
5111 modified_regs[regno + i] = 1;
5112 }
5113
5114 /* F is the first insn in the chain of insns. */
5115
5116 void
5117 thread_jumps (f, max_reg, flag_before_loop)
5118 rtx f;
5119 int max_reg;
5120 int flag_before_loop;
5121 {
5122 /* Basic algorithm is to find a conditional branch,
5123 the label it may branch to, and the branch after
5124 that label. If the two branches test the same condition,
5125 walk back from both branch paths until the insn patterns
5126 differ, or code labels are hit. If we make it back to
5127 the target of the first branch, then we know that the first branch
5128 will either always succeed or always fail depending on the relative
5129 senses of the two branches. So adjust the first branch accordingly
5130 in this case. */
5131
5132 rtx label, b1, b2, t1, t2;
5133 enum rtx_code code1, code2;
5134 rtx b1op0, b1op1, b2op0, b2op1;
5135 int changed = 1;
5136 int i;
5137 int *all_reset;
5138
5139 /* Allocate register tables and quick-reset table. */
5140 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
5141 same_regs = (int *) xmalloc (max_reg * sizeof (int));
5142 all_reset = (int *) xmalloc (max_reg * sizeof (int));
5143 for (i = 0; i < max_reg; i++)
5144 all_reset[i] = -1;
5145
5146 while (changed)
5147 {
5148 changed = 0;
5149
5150 for (b1 = f; b1; b1 = NEXT_INSN (b1))
5151 {
5152 /* Get to a candidate branch insn. */
5153 if (GET_CODE (b1) != JUMP_INSN
5154 || ! condjump_p (b1) || simplejump_p (b1)
5155 || JUMP_LABEL (b1) == 0)
5156 continue;
5157
5158 bzero (modified_regs, max_reg * sizeof (char));
5159 modified_mem = 0;
5160
5161 bcopy ((char *) all_reset, (char *) same_regs,
5162 max_reg * sizeof (int));
5163 num_same_regs = 0;
5164
5165 label = JUMP_LABEL (b1);
5166
5167 /* Look for a branch after the target. Record any registers and
5168 memory modified between the target and the branch. Stop when we
5169 get to a label since we can't know what was changed there. */
5170 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
5171 {
5172 if (GET_CODE (b2) == CODE_LABEL)
5173 break;
5174
5175 else if (GET_CODE (b2) == JUMP_INSN)
5176 {
5177 /* If this is an unconditional jump and is the only use of
5178 its target label, we can follow it. */
5179 if (simplejump_p (b2)
5180 && JUMP_LABEL (b2) != 0
5181 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
5182 {
5183 b2 = JUMP_LABEL (b2);
5184 continue;
5185 }
5186 else
5187 break;
5188 }
5189
5190 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
5191 continue;
5192
5193 if (GET_CODE (b2) == CALL_INSN)
5194 {
5195 modified_mem = 1;
5196 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5197 if (call_used_regs[i] && ! fixed_regs[i]
5198 && i != STACK_POINTER_REGNUM
5199 && i != FRAME_POINTER_REGNUM
5200 && i != HARD_FRAME_POINTER_REGNUM
5201 && i != ARG_POINTER_REGNUM)
5202 modified_regs[i] = 1;
5203 }
5204
5205 note_stores (PATTERN (b2), mark_modified_reg, NULL);
5206 }
5207
5208 /* Check the next candidate branch insn from the label
5209 of the first. */
5210 if (b2 == 0
5211 || GET_CODE (b2) != JUMP_INSN
5212 || b2 == b1
5213 || ! condjump_p (b2)
5214 || simplejump_p (b2))
5215 continue;
5216
5217 /* Get the comparison codes and operands, reversing the
5218 codes if appropriate. If we don't have comparison codes,
5219 we can't do anything. */
5220 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
5221 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
5222 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
5223 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
5224 code1 = reverse_condition (code1);
5225
5226 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
5227 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
5228 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
5229 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
5230 code2 = reverse_condition (code2);
5231
5232 /* If they test the same things and knowing that B1 branches
5233 tells us whether or not B2 branches, check if we
5234 can thread the branch. */
5235 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
5236 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
5237 && (comparison_dominates_p (code1, code2)
5238 || (can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
5239 0),
5240 b1)
5241 && comparison_dominates_p (code1, reverse_condition (code2)))))
5242
5243 {
5244 t1 = prev_nonnote_insn (b1);
5245 t2 = prev_nonnote_insn (b2);
5246
5247 while (t1 != 0 && t2 != 0)
5248 {
5249 if (t2 == label)
5250 {
5251 /* We have reached the target of the first branch.
5252 If there are no pending register equivalents,
5253 we know that this branch will either always
5254 succeed (if the senses of the two branches are
5255 the same) or always fail (if not). */
5256 rtx new_label;
5257
5258 if (num_same_regs != 0)
5259 break;
5260
5261 if (comparison_dominates_p (code1, code2))
5262 new_label = JUMP_LABEL (b2);
5263 else
5264 new_label = get_label_after (b2);
5265
5266 if (JUMP_LABEL (b1) != new_label)
5267 {
5268 rtx prev = PREV_INSN (new_label);
5269
5270 if (flag_before_loop
5271 && GET_CODE (prev) == NOTE
5272 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5273 {
5274 /* Don't thread to the loop label. If a loop
5275 label is reused, loop optimization will
5276 be disabled for that loop. */
5277 new_label = gen_label_rtx ();
5278 emit_label_after (new_label, PREV_INSN (prev));
5279 }
5280 changed |= redirect_jump (b1, new_label);
5281 }
5282 break;
5283 }
5284
5285 /* If either of these is not a normal insn (it might be
5286 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5287 have already been skipped above.) Similarly, fail
5288 if the insns are different. */
5289 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5290 || recog_memoized (t1) != recog_memoized (t2)
5291 || ! rtx_equal_for_thread_p (PATTERN (t1),
5292 PATTERN (t2), t2))
5293 break;
5294
5295 t1 = prev_nonnote_insn (t1);
5296 t2 = prev_nonnote_insn (t2);
5297 }
5298 }
5299 }
5300 }
5301
5302 /* Clean up. */
5303 free (modified_regs);
5304 free (same_regs);
5305 free (all_reset);
5306 }
5307 \f
5308 /* This is like RTX_EQUAL_P except that it knows about our handling of
5309 possibly equivalent registers and knows to consider volatile and
5310 modified objects as not equal.
5311
5312 YINSN is the insn containing Y. */
5313
5314 int
5315 rtx_equal_for_thread_p (x, y, yinsn)
5316 rtx x, y;
5317 rtx yinsn;
5318 {
5319 register int i;
5320 register int j;
5321 register enum rtx_code code;
5322 register const char *fmt;
5323
5324 code = GET_CODE (x);
5325 /* Rtx's of different codes cannot be equal. */
5326 if (code != GET_CODE (y))
5327 return 0;
5328
5329 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5330 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5331
5332 if (GET_MODE (x) != GET_MODE (y))
5333 return 0;
5334
5335 /* For floating-point, consider everything unequal. This is a bit
5336 pessimistic, but this pass would only rarely do anything for FP
5337 anyway. */
5338 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5339 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5340 return 0;
5341
5342 /* For commutative operations, the RTX match if the operand match in any
5343 order. Also handle the simple binary and unary cases without a loop. */
5344 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5345 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5346 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5347 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5348 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5349 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5350 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5351 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5352 else if (GET_RTX_CLASS (code) == '1')
5353 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5354
5355 /* Handle special-cases first. */
5356 switch (code)
5357 {
5358 case REG:
5359 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5360 return 1;
5361
5362 /* If neither is user variable or hard register, check for possible
5363 equivalence. */
5364 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5365 || REGNO (x) < FIRST_PSEUDO_REGISTER
5366 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5367 return 0;
5368
5369 if (same_regs[REGNO (x)] == -1)
5370 {
5371 same_regs[REGNO (x)] = REGNO (y);
5372 num_same_regs++;
5373
5374 /* If this is the first time we are seeing a register on the `Y'
5375 side, see if it is the last use. If not, we can't thread the
5376 jump, so mark it as not equivalent. */
5377 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5378 return 0;
5379
5380 return 1;
5381 }
5382 else
5383 return (same_regs[REGNO (x)] == REGNO (y));
5384
5385 break;
5386
5387 case MEM:
5388 /* If memory modified or either volatile, not equivalent.
5389 Else, check address. */
5390 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5391 return 0;
5392
5393 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5394
5395 case ASM_INPUT:
5396 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5397 return 0;
5398
5399 break;
5400
5401 case SET:
5402 /* Cancel a pending `same_regs' if setting equivalenced registers.
5403 Then process source. */
5404 if (GET_CODE (SET_DEST (x)) == REG
5405 && GET_CODE (SET_DEST (y)) == REG)
5406 {
5407 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
5408 {
5409 same_regs[REGNO (SET_DEST (x))] = -1;
5410 num_same_regs--;
5411 }
5412 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5413 return 0;
5414 }
5415 else
5416 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5417 return 0;
5418
5419 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5420
5421 case LABEL_REF:
5422 return XEXP (x, 0) == XEXP (y, 0);
5423
5424 case SYMBOL_REF:
5425 return XSTR (x, 0) == XSTR (y, 0);
5426
5427 default:
5428 break;
5429 }
5430
5431 if (x == y)
5432 return 1;
5433
5434 fmt = GET_RTX_FORMAT (code);
5435 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5436 {
5437 switch (fmt[i])
5438 {
5439 case 'w':
5440 if (XWINT (x, i) != XWINT (y, i))
5441 return 0;
5442 break;
5443
5444 case 'n':
5445 case 'i':
5446 if (XINT (x, i) != XINT (y, i))
5447 return 0;
5448 break;
5449
5450 case 'V':
5451 case 'E':
5452 /* Two vectors must have the same length. */
5453 if (XVECLEN (x, i) != XVECLEN (y, i))
5454 return 0;
5455
5456 /* And the corresponding elements must match. */
5457 for (j = 0; j < XVECLEN (x, i); j++)
5458 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5459 XVECEXP (y, i, j), yinsn) == 0)
5460 return 0;
5461 break;
5462
5463 case 'e':
5464 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5465 return 0;
5466 break;
5467
5468 case 'S':
5469 case 's':
5470 if (strcmp (XSTR (x, i), XSTR (y, i)))
5471 return 0;
5472 break;
5473
5474 case 'u':
5475 /* These are just backpointers, so they don't matter. */
5476 break;
5477
5478 case '0':
5479 case 't':
5480 break;
5481
5482 /* It is believed that rtx's at this level will never
5483 contain anything but integers and other rtx's,
5484 except for within LABEL_REFs and SYMBOL_REFs. */
5485 default:
5486 abort ();
5487 }
5488 }
5489 return 1;
5490 }
5491 \f
5492
5493 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
5494 /* Return the insn that NEW can be safely inserted in front of starting at
5495 the jump insn INSN. Return 0 if it is not safe to do this jump
5496 optimization. Note that NEW must contain a single set. */
5497
5498 static rtx
5499 find_insert_position (insn, new)
5500 rtx insn;
5501 rtx new;
5502 {
5503 int i;
5504 rtx prev;
5505
5506 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5507 if (GET_CODE (PATTERN (new)) != PARALLEL)
5508 return insn;
5509
5510 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5511 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5512 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5513 insn))
5514 break;
5515
5516 if (i < 0)
5517 return insn;
5518
5519 /* There is a good chance that the previous insn PREV sets the thing
5520 being clobbered (often the CC in a hard reg). If PREV does not
5521 use what NEW sets, we can insert NEW before PREV. */
5522
5523 prev = prev_active_insn (insn);
5524 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5525 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5526 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5527 insn)
5528 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5529 prev))
5530 return 0;
5531
5532 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5533 }
5534 #endif /* !HAVE_cc0 */