jump.c (jump_optimize_1): Don't try to duplicate the loop exit test if optimizing...
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
70
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
83
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
91
92 static rtx *jump_chain;
93
94 /* Maximum index in jump_chain. */
95
96 static int max_jump_chain;
97
98 /* Set nonzero by jump_optimize if control can fall through
99 to the end of the function. */
100 int can_reach_end;
101
102 /* Indicates whether death notes are significant in cross jump analysis.
103 Normally they are not significant, because of A and B jump to C,
104 and R dies in A, it must die in B. But this might not be true after
105 stack register conversion, and we must compare death notes in that
106 case. */
107
108 static int cross_jump_death_matters = 0;
109
110 static int init_label_info PROTO((rtx));
111 static void delete_barrier_successors PROTO((rtx));
112 static void mark_all_labels PROTO((rtx, int));
113 static rtx delete_unreferenced_labels PROTO((rtx));
114 static void delete_noop_moves PROTO((rtx));
115 static int calculate_can_reach_end PROTO((rtx, int, int));
116 static int duplicate_loop_exit_test PROTO((rtx));
117 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
118 static void do_cross_jump PROTO((rtx, rtx, rtx));
119 static int jump_back_p PROTO((rtx, rtx));
120 static int tension_vector_labels PROTO((rtx, int));
121 static void mark_jump_label PROTO((rtx, rtx, int));
122 static void delete_computation PROTO((rtx));
123 static void delete_from_jump_chain PROTO((rtx));
124 static int delete_labelref_insn PROTO((rtx, rtx, int));
125 static void mark_modified_reg PROTO((rtx, rtx, void *));
126 static void redirect_tablejump PROTO((rtx, rtx));
127 static void jump_optimize_1 PROTO ((rtx, int, int, int, int));
128 #if ! defined(HAVE_cc0) && ! defined(HAVE_conditional_arithmetic)
129 static rtx find_insert_position PROTO((rtx, rtx));
130 #endif
131 static int returnjump_p_1 PROTO((rtx *, void *));
132 static void delete_prior_computation PROTO((rtx, rtx));
133
134 /* Main external entry point into the jump optimizer. See comments before
135 jump_optimize_1 for descriptions of the arguments. */
136 void
137 jump_optimize (f, cross_jump, noop_moves, after_regscan)
138 rtx f;
139 int cross_jump;
140 int noop_moves;
141 int after_regscan;
142 {
143 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
144 }
145
146 /* Alternate entry into the jump optimizer. This entry point only rebuilds
147 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
148 instructions. */
149 void
150 rebuild_jump_labels (f)
151 rtx f;
152 {
153 jump_optimize_1 (f, 0, 0, 0, 1);
154 }
155
156 \f
157 /* Delete no-op jumps and optimize jumps to jumps
158 and jumps around jumps.
159 Delete unused labels and unreachable code.
160
161 If CROSS_JUMP is 1, detect matching code
162 before a jump and its destination and unify them.
163 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
164
165 If NOOP_MOVES is nonzero, delete no-op move insns.
166
167 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
168 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
169
170 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
171 and JUMP_LABEL field for jumping insns.
172
173 If `optimize' is zero, don't change any code,
174 just determine whether control drops off the end of the function.
175 This case occurs when we have -W and not -O.
176 It works because `delete_insn' checks the value of `optimize'
177 and refrains from actually deleting when that is 0. */
178
179 static void
180 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
181 rtx f;
182 int cross_jump;
183 int noop_moves;
184 int after_regscan;
185 int mark_labels_only;
186 {
187 register rtx insn, next;
188 int changed;
189 int old_max_reg;
190 int first = 1;
191 int max_uid = 0;
192 rtx last_insn;
193
194 cross_jump_death_matters = (cross_jump == 2);
195 max_uid = init_label_info (f) + 1;
196
197 /* If we are performing cross jump optimizations, then initialize
198 tables mapping UIDs to EH regions to avoid incorrect movement
199 of insns from one EH region to another. */
200 if (flag_exceptions && cross_jump)
201 init_insn_eh_region (f, max_uid);
202
203 delete_barrier_successors (f);
204
205 /* Leave some extra room for labels and duplicate exit test insns
206 we make. */
207 max_jump_chain = max_uid * 14 / 10;
208 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
209
210 mark_all_labels (f, cross_jump);
211
212 /* Keep track of labels used from static data;
213 they cannot ever be deleted. */
214
215 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
216 LABEL_NUSES (XEXP (insn, 0))++;
217
218 check_exception_handler_labels ();
219
220 /* Keep track of labels used for marking handlers for exception
221 regions; they cannot usually be deleted. */
222
223 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
224 LABEL_NUSES (XEXP (insn, 0))++;
225
226 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
227 notes and recompute LABEL_NUSES. */
228 if (mark_labels_only)
229 goto end;
230
231 exception_optimize ();
232
233 last_insn = delete_unreferenced_labels (f);
234
235 if (optimize == 0)
236 {
237 /* CAN_REACH_END is persistent for each function. Once set it should
238 not be cleared. This is especially true for the case where we
239 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
240 the front-end before compiling each function. */
241 if (calculate_can_reach_end (last_insn, 1, 0))
242 can_reach_end = 1;
243
244 /* Zero the "deleted" flag of all the "deleted" insns. */
245 for (insn = f; insn; insn = NEXT_INSN (insn))
246 INSN_DELETED_P (insn) = 0;
247
248 goto end;
249 }
250
251 #ifdef HAVE_return
252 if (HAVE_return)
253 {
254 /* If we fall through to the epilogue, see if we can insert a RETURN insn
255 in front of it. If the machine allows it at this point (we might be
256 after reload for a leaf routine), it will improve optimization for it
257 to be there. */
258 insn = get_last_insn ();
259 while (insn && GET_CODE (insn) == NOTE)
260 insn = PREV_INSN (insn);
261
262 if (insn && GET_CODE (insn) != BARRIER)
263 {
264 emit_jump_insn (gen_return ());
265 emit_barrier ();
266 }
267 }
268 #endif
269
270 if (noop_moves)
271 delete_noop_moves (f);
272
273 /* If we haven't yet gotten to reload and we have just run regscan,
274 delete any insn that sets a register that isn't used elsewhere.
275 This helps some of the optimizations below by having less insns
276 being jumped around. */
277
278 if (! reload_completed && after_regscan)
279 for (insn = f; insn; insn = next)
280 {
281 rtx set = single_set (insn);
282
283 next = NEXT_INSN (insn);
284
285 if (set && GET_CODE (SET_DEST (set)) == REG
286 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
287 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
288 /* We use regno_last_note_uid so as not to delete the setting
289 of a reg that's used in notes. A subsequent optimization
290 might arrange to use that reg for real. */
291 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
292 && ! side_effects_p (SET_SRC (set))
293 && ! find_reg_note (insn, REG_RETVAL, 0)
294 /* An ADDRESSOF expression can turn into a use of the internal arg
295 pointer, so do not delete the initialization of the internal
296 arg pointer yet. If it is truly dead, flow will delete the
297 initializing insn. */
298 && SET_DEST (set) != current_function_internal_arg_pointer)
299 delete_insn (insn);
300 }
301
302 /* Now iterate optimizing jumps until nothing changes over one pass. */
303 changed = 1;
304 old_max_reg = max_reg_num ();
305 while (changed)
306 {
307 changed = 0;
308
309 for (insn = f; insn; insn = next)
310 {
311 rtx reallabelprev;
312 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
313 rtx nlabel;
314 int this_is_simplejump, this_is_condjump, reversep = 0;
315 int this_is_condjump_in_parallel;
316
317 next = NEXT_INSN (insn);
318
319 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
320 jump. Try to optimize by duplicating the loop exit test if so.
321 This is only safe immediately after regscan, because it uses
322 the values of regno_first_uid and regno_last_uid. Don't do this
323 if optimizing for size. */
324
325 if (! optimize_size
326 && after_regscan
327 && GET_CODE (insn) == NOTE
328 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
329 && (temp1 = next_nonnote_insn (insn)) != 0
330 && simplejump_p (temp1))
331 {
332 temp = PREV_INSN (insn);
333 if (duplicate_loop_exit_test (insn))
334 {
335 changed = 1;
336 next = NEXT_INSN (temp);
337 continue;
338 }
339 }
340
341 if (GET_CODE (insn) != JUMP_INSN)
342 continue;
343
344 this_is_simplejump = simplejump_p (insn);
345 this_is_condjump = condjump_p (insn);
346 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
347
348 /* Tension the labels in dispatch tables. */
349
350 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
351 changed |= tension_vector_labels (PATTERN (insn), 0);
352 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
353 changed |= tension_vector_labels (PATTERN (insn), 1);
354
355 /* See if this jump goes to another jump and redirect if so. */
356 nlabel = follow_jumps (JUMP_LABEL (insn));
357 if (nlabel != JUMP_LABEL (insn))
358 changed |= redirect_jump (insn, nlabel);
359
360 /* If a dispatch table always goes to the same place,
361 get rid of it and replace the insn that uses it. */
362
363 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
364 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
365 {
366 int i;
367 rtx pat = PATTERN (insn);
368 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
369 int len = XVECLEN (pat, diff_vec_p);
370 rtx dispatch = prev_real_insn (insn);
371 rtx set;
372
373 for (i = 0; i < len; i++)
374 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
375 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
376 break;
377
378 if (i == len
379 && dispatch != 0
380 && GET_CODE (dispatch) == JUMP_INSN
381 && JUMP_LABEL (dispatch) != 0
382 /* Don't mess with a casesi insn.
383 XXX according to the comment before computed_jump_p(),
384 all casesi insns should be a parallel of the jump
385 and a USE of a LABEL_REF. */
386 && ! ((set = single_set (dispatch)) != NULL
387 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
388 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
389 {
390 redirect_tablejump (dispatch,
391 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
392 changed = 1;
393 }
394 }
395
396 /* If a jump references the end of the function, try to turn
397 it into a RETURN insn, possibly a conditional one. */
398 if (JUMP_LABEL (insn) != 0
399 && (next_active_insn (JUMP_LABEL (insn)) == 0
400 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
401 == RETURN))
402 changed |= redirect_jump (insn, NULL_RTX);
403
404 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
405
406 /* Detect jump to following insn. */
407 if (reallabelprev == insn && this_is_condjump)
408 {
409 next = next_real_insn (JUMP_LABEL (insn));
410 delete_jump (insn);
411 changed = 1;
412 continue;
413 }
414
415 /* Detect a conditional jump going to the same place
416 as an immediately following unconditional jump. */
417 else if (this_is_condjump
418 && (temp = next_active_insn (insn)) != 0
419 && simplejump_p (temp)
420 && (next_active_insn (JUMP_LABEL (insn))
421 == next_active_insn (JUMP_LABEL (temp))))
422 {
423 /* Don't mess up test coverage analysis. */
424 temp2 = temp;
425 if (flag_test_coverage && !reload_completed)
426 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
427 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
428 break;
429
430 if (temp2 == temp)
431 {
432 delete_jump (insn);
433 changed = 1;
434 continue;
435 }
436 }
437
438 /* Detect a conditional jump jumping over an unconditional jump. */
439
440 else if ((this_is_condjump || this_is_condjump_in_parallel)
441 && ! this_is_simplejump
442 && reallabelprev != 0
443 && GET_CODE (reallabelprev) == JUMP_INSN
444 && prev_active_insn (reallabelprev) == insn
445 && no_labels_between_p (insn, reallabelprev)
446 && simplejump_p (reallabelprev))
447 {
448 /* When we invert the unconditional jump, we will be
449 decrementing the usage count of its old label.
450 Make sure that we don't delete it now because that
451 might cause the following code to be deleted. */
452 rtx prev_uses = prev_nonnote_insn (reallabelprev);
453 rtx prev_label = JUMP_LABEL (insn);
454
455 if (prev_label)
456 ++LABEL_NUSES (prev_label);
457
458 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
459 {
460 /* It is very likely that if there are USE insns before
461 this jump, they hold REG_DEAD notes. These REG_DEAD
462 notes are no longer valid due to this optimization,
463 and will cause the life-analysis that following passes
464 (notably delayed-branch scheduling) to think that
465 these registers are dead when they are not.
466
467 To prevent this trouble, we just remove the USE insns
468 from the insn chain. */
469
470 while (prev_uses && GET_CODE (prev_uses) == INSN
471 && GET_CODE (PATTERN (prev_uses)) == USE)
472 {
473 rtx useless = prev_uses;
474 prev_uses = prev_nonnote_insn (prev_uses);
475 delete_insn (useless);
476 }
477
478 delete_insn (reallabelprev);
479 changed = 1;
480 }
481
482 /* We can now safely delete the label if it is unreferenced
483 since the delete_insn above has deleted the BARRIER. */
484 if (prev_label && --LABEL_NUSES (prev_label) == 0)
485 delete_insn (prev_label);
486
487 next = NEXT_INSN (insn);
488 }
489
490 /* If we have an unconditional jump preceded by a USE, try to put
491 the USE before the target and jump there. This simplifies many
492 of the optimizations below since we don't have to worry about
493 dealing with these USE insns. We only do this if the label
494 being branch to already has the identical USE or if code
495 never falls through to that label. */
496
497 else if (this_is_simplejump
498 && (temp = prev_nonnote_insn (insn)) != 0
499 && GET_CODE (temp) == INSN
500 && GET_CODE (PATTERN (temp)) == USE
501 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
502 && (GET_CODE (temp1) == BARRIER
503 || (GET_CODE (temp1) == INSN
504 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
505 /* Don't do this optimization if we have a loop containing
506 only the USE instruction, and the loop start label has
507 a usage count of 1. This is because we will redo this
508 optimization everytime through the outer loop, and jump
509 opt will never exit. */
510 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
511 && temp2 == JUMP_LABEL (insn)
512 && LABEL_NUSES (temp2) == 1))
513 {
514 if (GET_CODE (temp1) == BARRIER)
515 {
516 emit_insn_after (PATTERN (temp), temp1);
517 temp1 = NEXT_INSN (temp1);
518 }
519
520 delete_insn (temp);
521 redirect_jump (insn, get_label_before (temp1));
522 reallabelprev = prev_real_insn (temp1);
523 changed = 1;
524 next = NEXT_INSN (insn);
525 }
526
527 /* Simplify if (...) x = a; else x = b; by converting it
528 to x = b; if (...) x = a;
529 if B is sufficiently simple, the test doesn't involve X,
530 and nothing in the test modifies B or X.
531
532 If we have small register classes, we also can't do this if X
533 is a hard register.
534
535 If the "x = b;" insn has any REG_NOTES, we don't do this because
536 of the possibility that we are running after CSE and there is a
537 REG_EQUAL note that is only valid if the branch has already been
538 taken. If we move the insn with the REG_EQUAL note, we may
539 fold the comparison to always be false in a later CSE pass.
540 (We could also delete the REG_NOTES when moving the insn, but it
541 seems simpler to not move it.) An exception is that we can move
542 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
543 value is the same as "b".
544
545 INSN is the branch over the `else' part.
546
547 We set:
548
549 TEMP to the jump insn preceding "x = a;"
550 TEMP1 to X
551 TEMP2 to the insn that sets "x = b;"
552 TEMP3 to the insn that sets "x = a;"
553 TEMP4 to the set of "x = b"; */
554
555 if (this_is_simplejump
556 && (temp3 = prev_active_insn (insn)) != 0
557 && GET_CODE (temp3) == INSN
558 && (temp4 = single_set (temp3)) != 0
559 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
560 && (! SMALL_REGISTER_CLASSES
561 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
562 && (temp2 = next_active_insn (insn)) != 0
563 && GET_CODE (temp2) == INSN
564 && (temp4 = single_set (temp2)) != 0
565 && rtx_equal_p (SET_DEST (temp4), temp1)
566 && ! side_effects_p (SET_SRC (temp4))
567 && ! may_trap_p (SET_SRC (temp4))
568 && (REG_NOTES (temp2) == 0
569 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
570 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
571 && XEXP (REG_NOTES (temp2), 1) == 0
572 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
573 SET_SRC (temp4))))
574 && (temp = prev_active_insn (temp3)) != 0
575 && condjump_p (temp) && ! simplejump_p (temp)
576 /* TEMP must skip over the "x = a;" insn */
577 && prev_real_insn (JUMP_LABEL (temp)) == insn
578 && no_labels_between_p (insn, JUMP_LABEL (temp))
579 /* There must be no other entries to the "x = b;" insn. */
580 && no_labels_between_p (JUMP_LABEL (temp), temp2)
581 /* INSN must either branch to the insn after TEMP2 or the insn
582 after TEMP2 must branch to the same place as INSN. */
583 && (reallabelprev == temp2
584 || ((temp5 = next_active_insn (temp2)) != 0
585 && simplejump_p (temp5)
586 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
587 {
588 /* The test expression, X, may be a complicated test with
589 multiple branches. See if we can find all the uses of
590 the label that TEMP branches to without hitting a CALL_INSN
591 or a jump to somewhere else. */
592 rtx target = JUMP_LABEL (temp);
593 int nuses = LABEL_NUSES (target);
594 rtx p;
595 #ifdef HAVE_cc0
596 rtx q;
597 #endif
598
599 /* Set P to the first jump insn that goes around "x = a;". */
600 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
601 {
602 if (GET_CODE (p) == JUMP_INSN)
603 {
604 if (condjump_p (p) && ! simplejump_p (p)
605 && JUMP_LABEL (p) == target)
606 {
607 nuses--;
608 if (nuses == 0)
609 break;
610 }
611 else
612 break;
613 }
614 else if (GET_CODE (p) == CALL_INSN)
615 break;
616 }
617
618 #ifdef HAVE_cc0
619 /* We cannot insert anything between a set of cc and its use
620 so if P uses cc0, we must back up to the previous insn. */
621 q = prev_nonnote_insn (p);
622 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
623 && sets_cc0_p (PATTERN (q)))
624 p = q;
625 #endif
626
627 if (p)
628 p = PREV_INSN (p);
629
630 /* If we found all the uses and there was no data conflict, we
631 can move the assignment unless we can branch into the middle
632 from somewhere. */
633 if (nuses == 0 && p
634 && no_labels_between_p (p, insn)
635 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
636 && ! reg_set_between_p (temp1, p, temp3)
637 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
638 || ! modified_between_p (SET_SRC (temp4), p, temp2))
639 /* Verify that registers used by the jump are not clobbered
640 by the instruction being moved. */
641 && ! regs_set_between_p (PATTERN (temp),
642 PREV_INSN (temp2),
643 NEXT_INSN (temp2)))
644 {
645 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
646 delete_insn (temp2);
647
648 /* Set NEXT to an insn that we know won't go away. */
649 next = next_active_insn (insn);
650
651 /* Delete the jump around the set. Note that we must do
652 this before we redirect the test jumps so that it won't
653 delete the code immediately following the assignment
654 we moved (which might be a jump). */
655
656 delete_insn (insn);
657
658 /* We either have two consecutive labels or a jump to
659 a jump, so adjust all the JUMP_INSNs to branch to where
660 INSN branches to. */
661 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
662 if (GET_CODE (p) == JUMP_INSN)
663 redirect_jump (p, target);
664
665 changed = 1;
666 next = NEXT_INSN (insn);
667 continue;
668 }
669 }
670
671 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
672 to x = a; if (...) goto l; x = b;
673 if A is sufficiently simple, the test doesn't involve X,
674 and nothing in the test modifies A or X.
675
676 If we have small register classes, we also can't do this if X
677 is a hard register.
678
679 If the "x = a;" insn has any REG_NOTES, we don't do this because
680 of the possibility that we are running after CSE and there is a
681 REG_EQUAL note that is only valid if the branch has already been
682 taken. If we move the insn with the REG_EQUAL note, we may
683 fold the comparison to always be false in a later CSE pass.
684 (We could also delete the REG_NOTES when moving the insn, but it
685 seems simpler to not move it.) An exception is that we can move
686 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
687 value is the same as "a".
688
689 INSN is the goto.
690
691 We set:
692
693 TEMP to the jump insn preceding "x = a;"
694 TEMP1 to X
695 TEMP2 to the insn that sets "x = b;"
696 TEMP3 to the insn that sets "x = a;"
697 TEMP4 to the set of "x = a"; */
698
699 if (this_is_simplejump
700 && (temp2 = next_active_insn (insn)) != 0
701 && GET_CODE (temp2) == INSN
702 && (temp4 = single_set (temp2)) != 0
703 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
704 && (! SMALL_REGISTER_CLASSES
705 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
706 && (temp3 = prev_active_insn (insn)) != 0
707 && GET_CODE (temp3) == INSN
708 && (temp4 = single_set (temp3)) != 0
709 && rtx_equal_p (SET_DEST (temp4), temp1)
710 && ! side_effects_p (SET_SRC (temp4))
711 && ! may_trap_p (SET_SRC (temp4))
712 && (REG_NOTES (temp3) == 0
713 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
714 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
715 && XEXP (REG_NOTES (temp3), 1) == 0
716 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
717 SET_SRC (temp4))))
718 && (temp = prev_active_insn (temp3)) != 0
719 && condjump_p (temp) && ! simplejump_p (temp)
720 /* TEMP must skip over the "x = a;" insn */
721 && prev_real_insn (JUMP_LABEL (temp)) == insn
722 && no_labels_between_p (temp, insn))
723 {
724 rtx prev_label = JUMP_LABEL (temp);
725 rtx insert_after = prev_nonnote_insn (temp);
726
727 #ifdef HAVE_cc0
728 /* We cannot insert anything between a set of cc and its use. */
729 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
730 && sets_cc0_p (PATTERN (insert_after)))
731 insert_after = prev_nonnote_insn (insert_after);
732 #endif
733 ++LABEL_NUSES (prev_label);
734
735 if (insert_after
736 && no_labels_between_p (insert_after, temp)
737 && ! reg_referenced_between_p (temp1, insert_after, temp3)
738 && ! reg_referenced_between_p (temp1, temp3,
739 NEXT_INSN (temp2))
740 && ! reg_set_between_p (temp1, insert_after, temp)
741 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
742 /* Verify that registers used by the jump are not clobbered
743 by the instruction being moved. */
744 && ! regs_set_between_p (PATTERN (temp),
745 PREV_INSN (temp3),
746 NEXT_INSN (temp3))
747 && invert_jump (temp, JUMP_LABEL (insn)))
748 {
749 emit_insn_after_with_line_notes (PATTERN (temp3),
750 insert_after, temp3);
751 delete_insn (temp3);
752 delete_insn (insn);
753 /* Set NEXT to an insn that we know won't go away. */
754 next = temp2;
755 changed = 1;
756 }
757 if (prev_label && --LABEL_NUSES (prev_label) == 0)
758 delete_insn (prev_label);
759 if (changed)
760 continue;
761 }
762
763 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
764
765 /* If we have if (...) x = exp; and branches are expensive,
766 EXP is a single insn, does not have any side effects, cannot
767 trap, and is not too costly, convert this to
768 t = exp; if (...) x = t;
769
770 Don't do this when we have CC0 because it is unlikely to help
771 and we'd need to worry about where to place the new insn and
772 the potential for conflicts. We also can't do this when we have
773 notes on the insn for the same reason as above.
774
775 If we have conditional arithmetic, this will make this
776 harder to optimize later and isn't needed, so don't do it
777 in that case either.
778
779 We set:
780
781 TEMP to the "x = exp;" insn.
782 TEMP1 to the single set in the "x = exp;" insn.
783 TEMP2 to "x". */
784
785 if (! reload_completed
786 && this_is_condjump && ! this_is_simplejump
787 && BRANCH_COST >= 3
788 && (temp = next_nonnote_insn (insn)) != 0
789 && GET_CODE (temp) == INSN
790 && REG_NOTES (temp) == 0
791 && (reallabelprev == temp
792 || ((temp2 = next_active_insn (temp)) != 0
793 && simplejump_p (temp2)
794 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
795 && (temp1 = single_set (temp)) != 0
796 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
797 && (! SMALL_REGISTER_CLASSES
798 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
799 && GET_CODE (SET_SRC (temp1)) != REG
800 && GET_CODE (SET_SRC (temp1)) != SUBREG
801 && GET_CODE (SET_SRC (temp1)) != CONST_INT
802 && ! side_effects_p (SET_SRC (temp1))
803 && ! may_trap_p (SET_SRC (temp1))
804 && rtx_cost (SET_SRC (temp1), SET) < 10)
805 {
806 rtx new = gen_reg_rtx (GET_MODE (temp2));
807
808 if ((temp3 = find_insert_position (insn, temp))
809 && validate_change (temp, &SET_DEST (temp1), new, 0))
810 {
811 next = emit_insn_after (gen_move_insn (temp2, new), insn);
812 emit_insn_after_with_line_notes (PATTERN (temp),
813 PREV_INSN (temp3), temp);
814 delete_insn (temp);
815 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
816
817 if (after_regscan)
818 {
819 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
820 old_max_reg = max_reg_num ();
821 }
822 }
823 }
824
825 /* Similarly, if it takes two insns to compute EXP but they
826 have the same destination. Here TEMP3 will be the second
827 insn and TEMP4 the SET from that insn. */
828
829 if (! reload_completed
830 && this_is_condjump && ! this_is_simplejump
831 && BRANCH_COST >= 4
832 && (temp = next_nonnote_insn (insn)) != 0
833 && GET_CODE (temp) == INSN
834 && REG_NOTES (temp) == 0
835 && (temp3 = next_nonnote_insn (temp)) != 0
836 && GET_CODE (temp3) == INSN
837 && REG_NOTES (temp3) == 0
838 && (reallabelprev == temp3
839 || ((temp2 = next_active_insn (temp3)) != 0
840 && simplejump_p (temp2)
841 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
842 && (temp1 = single_set (temp)) != 0
843 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
844 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
845 && (! SMALL_REGISTER_CLASSES
846 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
847 && ! side_effects_p (SET_SRC (temp1))
848 && ! may_trap_p (SET_SRC (temp1))
849 && rtx_cost (SET_SRC (temp1), SET) < 10
850 && (temp4 = single_set (temp3)) != 0
851 && rtx_equal_p (SET_DEST (temp4), temp2)
852 && ! side_effects_p (SET_SRC (temp4))
853 && ! may_trap_p (SET_SRC (temp4))
854 && rtx_cost (SET_SRC (temp4), SET) < 10)
855 {
856 rtx new = gen_reg_rtx (GET_MODE (temp2));
857
858 if ((temp5 = find_insert_position (insn, temp))
859 && (temp6 = find_insert_position (insn, temp3))
860 && validate_change (temp, &SET_DEST (temp1), new, 0))
861 {
862 /* Use the earliest of temp5 and temp6. */
863 if (temp5 != insn)
864 temp6 = temp5;
865 next = emit_insn_after (gen_move_insn (temp2, new), insn);
866 emit_insn_after_with_line_notes (PATTERN (temp),
867 PREV_INSN (temp6), temp);
868 emit_insn_after_with_line_notes
869 (replace_rtx (PATTERN (temp3), temp2, new),
870 PREV_INSN (temp6), temp3);
871 delete_insn (temp);
872 delete_insn (temp3);
873 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
874
875 if (after_regscan)
876 {
877 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
878 old_max_reg = max_reg_num ();
879 }
880 }
881 }
882
883 /* Finally, handle the case where two insns are used to
884 compute EXP but a temporary register is used. Here we must
885 ensure that the temporary register is not used anywhere else. */
886
887 if (! reload_completed
888 && after_regscan
889 && this_is_condjump && ! this_is_simplejump
890 && BRANCH_COST >= 4
891 && (temp = next_nonnote_insn (insn)) != 0
892 && GET_CODE (temp) == INSN
893 && REG_NOTES (temp) == 0
894 && (temp3 = next_nonnote_insn (temp)) != 0
895 && GET_CODE (temp3) == INSN
896 && REG_NOTES (temp3) == 0
897 && (reallabelprev == temp3
898 || ((temp2 = next_active_insn (temp3)) != 0
899 && simplejump_p (temp2)
900 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
901 && (temp1 = single_set (temp)) != 0
902 && (temp5 = SET_DEST (temp1),
903 (GET_CODE (temp5) == REG
904 || (GET_CODE (temp5) == SUBREG
905 && (temp5 = SUBREG_REG (temp5),
906 GET_CODE (temp5) == REG))))
907 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
908 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
909 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
910 && ! side_effects_p (SET_SRC (temp1))
911 && ! may_trap_p (SET_SRC (temp1))
912 && rtx_cost (SET_SRC (temp1), SET) < 10
913 && (temp4 = single_set (temp3)) != 0
914 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
915 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
916 && (! SMALL_REGISTER_CLASSES
917 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
918 && rtx_equal_p (SET_DEST (temp4), temp2)
919 && ! side_effects_p (SET_SRC (temp4))
920 && ! may_trap_p (SET_SRC (temp4))
921 && rtx_cost (SET_SRC (temp4), SET) < 10)
922 {
923 rtx new = gen_reg_rtx (GET_MODE (temp2));
924
925 if ((temp5 = find_insert_position (insn, temp))
926 && (temp6 = find_insert_position (insn, temp3))
927 && validate_change (temp3, &SET_DEST (temp4), new, 0))
928 {
929 /* Use the earliest of temp5 and temp6. */
930 if (temp5 != insn)
931 temp6 = temp5;
932 next = emit_insn_after (gen_move_insn (temp2, new), insn);
933 emit_insn_after_with_line_notes (PATTERN (temp),
934 PREV_INSN (temp6), temp);
935 emit_insn_after_with_line_notes (PATTERN (temp3),
936 PREV_INSN (temp6), temp3);
937 delete_insn (temp);
938 delete_insn (temp3);
939 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
940
941 if (after_regscan)
942 {
943 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
944 old_max_reg = max_reg_num ();
945 }
946 }
947 }
948 #endif /* HAVE_cc0 */
949
950 #ifdef HAVE_conditional_arithmetic
951 /* ??? This is disabled in genconfig, as this simple-minded
952 transformation can incredibly lengthen register lifetimes.
953
954 Consider this example from cexp.c's yyparse:
955
956 234 (set (pc)
957 (if_then_else (ne (reg:DI 149) (const_int 0 [0x0]))
958 (label_ref 248) (pc)))
959 237 (set (reg/i:DI 0 $0) (const_int 1 [0x1]))
960 239 (set (pc) (label_ref 2382))
961 248 (code_label ("yybackup"))
962
963 This will be transformed to:
964
965 237 (set (reg/i:DI 0 $0)
966 (if_then_else:DI (eq (reg:DI 149) (const_int 0 [0x0]))
967 (const_int 1 [0x1]) (reg/i:DI 0 $0)))
968 239 (set (pc)
969 (if_then_else (eq (reg:DI 149) (const_int 0 [0x0]))
970 (label_ref 2382) (pc)))
971
972 which, from this narrow viewpoint looks fine. Except that
973 between this and 3 other ocurrences of the same pattern, $0
974 is now live for basically the entire function, and we'll
975 get an abort in caller_save.
976
977 Any replacement for this code should recall that a set of
978 a register that is not live need not, and indeed should not,
979 be conditionalized. Either that, or delay the transformation
980 until after register allocation. */
981
982 /* See if this is a conditional jump around a small number of
983 instructions that we can conditionalize. Don't do this before
984 the initial CSE pass or after reload.
985
986 We reject any insns that have side effects or may trap.
987 Strictly speaking, this is not needed since the machine may
988 support conditionalizing these too, but we won't deal with that
989 now. Specifically, this means that we can't conditionalize a
990 CALL_INSN, which some machines, such as the ARC, can do, but
991 this is a very minor optimization. */
992 if (this_is_condjump && ! this_is_simplejump
993 && cse_not_expected && optimize > 0 && ! reload_completed
994 && BRANCH_COST > 2
995 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (insn)), 0),
996 insn))
997 {
998 rtx ourcond = XEXP (SET_SRC (PATTERN (insn)), 0);
999 int num_insns = 0;
1000 char *storage = (char *) oballoc (0);
1001 int last_insn = 0, failed = 0;
1002 rtx changed_jump = 0;
1003
1004 ourcond = gen_rtx (reverse_condition (GET_CODE (ourcond)),
1005 VOIDmode, XEXP (ourcond, 0),
1006 XEXP (ourcond, 1));
1007
1008 /* Scan forward BRANCH_COST real insns looking for the JUMP_LABEL
1009 of this insn. We see if we think we can conditionalize the
1010 insns we pass. For now, we only deal with insns that have
1011 one SET. We stop after an insn that modifies anything in
1012 OURCOND, if we have too many insns, or if we have an insn
1013 with a side effect or that may trip. Note that we will
1014 be modifying any unconditional jumps we encounter to be
1015 conditional; this will have the effect of also doing this
1016 optimization on the "else" the next time around. */
1017 for (temp1 = NEXT_INSN (insn);
1018 num_insns <= BRANCH_COST && ! failed && temp1 != 0
1019 && GET_CODE (temp1) != CODE_LABEL;
1020 temp1 = NEXT_INSN (temp1))
1021 {
1022 /* Ignore everything but an active insn. */
1023 if (GET_RTX_CLASS (GET_CODE (temp1)) != 'i'
1024 || GET_CODE (PATTERN (temp1)) == USE
1025 || GET_CODE (PATTERN (temp1)) == CLOBBER)
1026 continue;
1027
1028 /* If this was an unconditional jump, record it since we'll
1029 need to remove the BARRIER if we succeed. We can only
1030 have one such jump since there must be a label after
1031 the BARRIER and it's either ours, in which case it's the
1032 only one or some other, in which case we'd fail.
1033 Likewise if it's a CALL_INSN followed by a BARRIER. */
1034
1035 if (simplejump_p (temp1)
1036 || (GET_CODE (temp1) == CALL_INSN
1037 && NEXT_INSN (temp1) != 0
1038 && GET_CODE (NEXT_INSN (temp1)) == BARRIER))
1039 {
1040 if (changed_jump == 0)
1041 changed_jump = temp1;
1042 else
1043 changed_jump
1044 = gen_rtx_INSN_LIST (VOIDmode, temp1, changed_jump);
1045 }
1046
1047 /* See if we are allowed another insn and if this insn
1048 if one we think we may be able to handle. */
1049 if (++num_insns > BRANCH_COST
1050 || last_insn
1051 || (((temp2 = single_set (temp1)) == 0
1052 || side_effects_p (SET_SRC (temp2))
1053 || may_trap_p (SET_SRC (temp2)))
1054 && GET_CODE (temp1) != CALL_INSN))
1055 failed = 1;
1056 else if (temp2 != 0)
1057 validate_change (temp1, &SET_SRC (temp2),
1058 gen_rtx_IF_THEN_ELSE
1059 (GET_MODE (SET_DEST (temp2)),
1060 copy_rtx (ourcond),
1061 SET_SRC (temp2), SET_DEST (temp2)),
1062 1);
1063 else
1064 {
1065 /* This is a CALL_INSN that doesn't have a SET. */
1066 rtx *call_loc = &PATTERN (temp1);
1067
1068 if (GET_CODE (*call_loc) == PARALLEL)
1069 call_loc = &XVECEXP (*call_loc, 0, 0);
1070
1071 validate_change (temp1, call_loc,
1072 gen_rtx_IF_THEN_ELSE
1073 (VOIDmode, copy_rtx (ourcond),
1074 *call_loc, const0_rtx),
1075 1);
1076 }
1077
1078
1079 if (modified_in_p (ourcond, temp1))
1080 last_insn = 1;
1081 }
1082
1083 /* If we've reached our jump label, haven't failed, and all
1084 the changes above are valid, we can delete this jump
1085 insn. Also remove a BARRIER after any jump that used
1086 to be unconditional and remove any REG_EQUAL or REG_EQUIV
1087 that might have previously been present on insns we
1088 made conditional. */
1089 if (temp1 == JUMP_LABEL (insn) && ! failed
1090 && apply_change_group ())
1091 {
1092 for (temp1 = NEXT_INSN (insn); temp1 != JUMP_LABEL (insn);
1093 temp1 = NEXT_INSN (temp1))
1094 if (GET_RTX_CLASS (GET_CODE (temp1)) == 'i')
1095 for (temp2 = REG_NOTES (temp1); temp2 != 0;
1096 temp2 = XEXP (temp2, 1))
1097 if (REG_NOTE_KIND (temp2) == REG_EQUAL
1098 || REG_NOTE_KIND (temp2) == REG_EQUIV)
1099 remove_note (temp1, temp2);
1100
1101 if (changed_jump != 0)
1102 {
1103 while (GET_CODE (changed_jump) == INSN_LIST)
1104 {
1105 delete_barrier (NEXT_INSN (XEXP (changed_jump, 0)));
1106 changed_jump = XEXP (changed_jump, 1);
1107 }
1108
1109 delete_barrier (NEXT_INSN (changed_jump));
1110 }
1111
1112 delete_insn (insn);
1113 changed = 1;
1114 continue;
1115 }
1116 else
1117 {
1118 cancel_changes (0);
1119 obfree (storage);
1120 }
1121 }
1122 #endif
1123 /* If branches are expensive, convert
1124 if (foo) bar++; to bar += (foo != 0);
1125 and similarly for "bar--;"
1126
1127 INSN is the conditional branch around the arithmetic. We set:
1128
1129 TEMP is the arithmetic insn.
1130 TEMP1 is the SET doing the arithmetic.
1131 TEMP2 is the operand being incremented or decremented.
1132 TEMP3 to the condition being tested.
1133 TEMP4 to the earliest insn used to find the condition. */
1134
1135 if ((BRANCH_COST >= 2
1136 #ifdef HAVE_incscc
1137 || HAVE_incscc
1138 #endif
1139 #ifdef HAVE_decscc
1140 || HAVE_decscc
1141 #endif
1142 )
1143 && ! reload_completed
1144 && this_is_condjump && ! this_is_simplejump
1145 && (temp = next_nonnote_insn (insn)) != 0
1146 && (temp1 = single_set (temp)) != 0
1147 && (temp2 = SET_DEST (temp1),
1148 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1149 && GET_CODE (SET_SRC (temp1)) == PLUS
1150 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1151 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1152 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1153 && ! side_effects_p (temp2)
1154 && ! may_trap_p (temp2)
1155 /* INSN must either branch to the insn after TEMP or the insn
1156 after TEMP must branch to the same place as INSN. */
1157 && (reallabelprev == temp
1158 || ((temp3 = next_active_insn (temp)) != 0
1159 && simplejump_p (temp3)
1160 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1161 && (temp3 = get_condition (insn, &temp4)) != 0
1162 /* We must be comparing objects whose modes imply the size.
1163 We could handle BLKmode if (1) emit_store_flag could
1164 and (2) we could find the size reliably. */
1165 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1166 && can_reverse_comparison_p (temp3, insn))
1167 {
1168 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1169 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1170
1171 start_sequence ();
1172
1173 /* It must be the case that TEMP2 is not modified in the range
1174 [TEMP4, INSN). The one exception we make is if the insn
1175 before INSN sets TEMP2 to something which is also unchanged
1176 in that range. In that case, we can move the initialization
1177 into our sequence. */
1178
1179 if ((temp5 = prev_active_insn (insn)) != 0
1180 && no_labels_between_p (temp5, insn)
1181 && GET_CODE (temp5) == INSN
1182 && (temp6 = single_set (temp5)) != 0
1183 && rtx_equal_p (temp2, SET_DEST (temp6))
1184 && (CONSTANT_P (SET_SRC (temp6))
1185 || GET_CODE (SET_SRC (temp6)) == REG
1186 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1187 {
1188 emit_insn (PATTERN (temp5));
1189 init_insn = temp5;
1190 init = SET_SRC (temp6);
1191 }
1192
1193 if (CONSTANT_P (init)
1194 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1195 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1196 XEXP (temp3, 0), XEXP (temp3, 1),
1197 VOIDmode,
1198 (code == LTU || code == LEU
1199 || code == GTU || code == GEU), 1);
1200
1201 /* If we can do the store-flag, do the addition or
1202 subtraction. */
1203
1204 if (target)
1205 target = expand_binop (GET_MODE (temp2),
1206 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1207 ? add_optab : sub_optab),
1208 temp2, target, temp2, 0, OPTAB_WIDEN);
1209
1210 if (target != 0)
1211 {
1212 /* Put the result back in temp2 in case it isn't already.
1213 Then replace the jump, possible a CC0-setting insn in
1214 front of the jump, and TEMP, with the sequence we have
1215 made. */
1216
1217 if (target != temp2)
1218 emit_move_insn (temp2, target);
1219
1220 seq = get_insns ();
1221 end_sequence ();
1222
1223 emit_insns_before (seq, temp4);
1224 delete_insn (temp);
1225
1226 if (init_insn)
1227 delete_insn (init_insn);
1228
1229 next = NEXT_INSN (insn);
1230 #ifdef HAVE_cc0
1231 delete_insn (prev_nonnote_insn (insn));
1232 #endif
1233 delete_insn (insn);
1234
1235 if (after_regscan)
1236 {
1237 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1238 old_max_reg = max_reg_num ();
1239 }
1240
1241 changed = 1;
1242 continue;
1243 }
1244 else
1245 end_sequence ();
1246 }
1247
1248 /* Try to use a conditional move (if the target has them), or a
1249 store-flag insn. If the target has conditional arithmetic as
1250 well as conditional move, the above code will have done something.
1251 Note that we prefer the above code since it is more general: the
1252 code below can make changes that require work to undo.
1253
1254 The general case here is:
1255
1256 1) x = a; if (...) x = b; and
1257 2) if (...) x = b;
1258
1259 If the jump would be faster, the machine should not have defined
1260 the movcc or scc insns!. These cases are often made by the
1261 previous optimization.
1262
1263 The second case is treated as x = x; if (...) x = b;.
1264
1265 INSN here is the jump around the store. We set:
1266
1267 TEMP to the "x op= b;" insn.
1268 TEMP1 to X.
1269 TEMP2 to B.
1270 TEMP3 to A (X in the second case).
1271 TEMP4 to the condition being tested.
1272 TEMP5 to the earliest insn used to find the condition.
1273 TEMP6 to the SET of TEMP. */
1274
1275 if (/* We can't do this after reload has completed. */
1276 ! reload_completed
1277 #ifdef HAVE_conditional_arithmetic
1278 /* Defer this until after CSE so the above code gets the
1279 first crack at it. */
1280 && cse_not_expected
1281 #endif
1282 && this_is_condjump && ! this_is_simplejump
1283 /* Set TEMP to the "x = b;" insn. */
1284 && (temp = next_nonnote_insn (insn)) != 0
1285 && GET_CODE (temp) == INSN
1286 && (temp6 = single_set (temp)) != NULL_RTX
1287 && GET_CODE (temp1 = SET_DEST (temp6)) == REG
1288 && (! SMALL_REGISTER_CLASSES
1289 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
1290 && ! side_effects_p (temp2 = SET_SRC (temp6))
1291 && ! may_trap_p (temp2)
1292 /* Allow either form, but prefer the former if both apply.
1293 There is no point in using the old value of TEMP1 if
1294 it is a register, since cse will alias them. It can
1295 lose if the old value were a hard register since CSE
1296 won't replace hard registers. Avoid using TEMP3 if
1297 small register classes and it is a hard register. */
1298 && (((temp3 = reg_set_last (temp1, insn)) != 0
1299 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
1300 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
1301 /* Make the latter case look like x = x; if (...) x = b; */
1302 || (temp3 = temp1, 1))
1303 /* INSN must either branch to the insn after TEMP or the insn
1304 after TEMP must branch to the same place as INSN. */
1305 && (reallabelprev == temp
1306 || ((temp4 = next_active_insn (temp)) != 0
1307 && simplejump_p (temp4)
1308 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1309 && (temp4 = get_condition (insn, &temp5)) != 0
1310 /* We must be comparing objects whose modes imply the size.
1311 We could handle BLKmode if (1) emit_store_flag could
1312 and (2) we could find the size reliably. */
1313 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1314 /* Even if branches are cheap, the store_flag optimization
1315 can win when the operation to be performed can be
1316 expressed directly. */
1317 #ifdef HAVE_cc0
1318 /* If the previous insn sets CC0 and something else, we can't
1319 do this since we are going to delete that insn. */
1320
1321 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1322 && GET_CODE (temp6) == INSN
1323 && (sets_cc0_p (PATTERN (temp6)) == -1
1324 || (sets_cc0_p (PATTERN (temp6)) == 1
1325 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1326 #endif
1327 )
1328 {
1329 #ifdef HAVE_conditional_move
1330 /* First try a conditional move. */
1331 {
1332 enum rtx_code code = GET_CODE (temp4);
1333 rtx var = temp1;
1334 rtx cond0, cond1, aval, bval;
1335 rtx target, new_insn;
1336
1337 /* Copy the compared variables into cond0 and cond1, so that
1338 any side effects performed in or after the old comparison,
1339 will not affect our compare which will come later. */
1340 /* ??? Is it possible to just use the comparison in the jump
1341 insn? After all, we're going to delete it. We'd have
1342 to modify emit_conditional_move to take a comparison rtx
1343 instead or write a new function. */
1344 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1345 /* We want the target to be able to simplify comparisons with
1346 zero (and maybe other constants as well), so don't create
1347 pseudos for them. There's no need to either. */
1348 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1349 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1350 cond1 = XEXP (temp4, 1);
1351 else
1352 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1353
1354 /* Careful about copying these values -- an IOR or what may
1355 need to do other things, like clobber flags. */
1356 /* ??? Assume for the moment that AVAL is ok. */
1357 aval = temp3;
1358
1359 start_sequence ();
1360
1361 /* We're dealing with a single_set insn with no side effects
1362 on SET_SRC. We do need to be reasonably certain that if
1363 we need to force BVAL into a register that we won't
1364 clobber the flags -- general_operand should suffice. */
1365 if (general_operand (temp2, GET_MODE (var)))
1366 bval = temp2;
1367 else
1368 {
1369 bval = gen_reg_rtx (GET_MODE (var));
1370 new_insn = copy_rtx (temp);
1371 temp6 = single_set (new_insn);
1372 SET_DEST (temp6) = bval;
1373 emit_insn (PATTERN (new_insn));
1374 }
1375
1376 target = emit_conditional_move (var, code,
1377 cond0, cond1, VOIDmode,
1378 aval, bval, GET_MODE (var),
1379 (code == LTU || code == GEU
1380 || code == LEU || code == GTU));
1381
1382 if (target)
1383 {
1384 rtx seq1, seq2, last;
1385 int copy_ok;
1386
1387 /* Save the conditional move sequence but don't emit it
1388 yet. On some machines, like the alpha, it is possible
1389 that temp5 == insn, so next generate the sequence that
1390 saves the compared values and then emit both
1391 sequences ensuring seq1 occurs before seq2. */
1392 seq2 = get_insns ();
1393 end_sequence ();
1394
1395 /* "Now that we can't fail..." Famous last words.
1396 Generate the copy insns that preserve the compared
1397 values. */
1398 start_sequence ();
1399 emit_move_insn (cond0, XEXP (temp4, 0));
1400 if (cond1 != XEXP (temp4, 1))
1401 emit_move_insn (cond1, XEXP (temp4, 1));
1402 seq1 = get_insns ();
1403 end_sequence ();
1404
1405 /* Validate the sequence -- this may be some weird
1406 bit-extract-and-test instruction for which there
1407 exists no complimentary bit-extract insn. */
1408 copy_ok = 1;
1409 for (last = seq1; last ; last = NEXT_INSN (last))
1410 if (recog_memoized (last) < 0)
1411 {
1412 copy_ok = 0;
1413 break;
1414 }
1415
1416 if (copy_ok)
1417 {
1418 emit_insns_before (seq1, temp5);
1419
1420 /* Insert conditional move after insn, to be sure
1421 that the jump and a possible compare won't be
1422 separated. */
1423 last = emit_insns_after (seq2, insn);
1424
1425 /* ??? We can also delete the insn that sets X to A.
1426 Flow will do it too though. */
1427 delete_insn (temp);
1428 next = NEXT_INSN (insn);
1429 delete_jump (insn);
1430
1431 if (after_regscan)
1432 {
1433 reg_scan_update (seq1, NEXT_INSN (last),
1434 old_max_reg);
1435 old_max_reg = max_reg_num ();
1436 }
1437
1438 changed = 1;
1439 continue;
1440 }
1441 }
1442 else
1443 end_sequence ();
1444 }
1445 #endif
1446
1447 /* That didn't work, try a store-flag insn.
1448
1449 We further divide the cases into:
1450
1451 1) x = a; if (...) x = b; and either A or B is zero,
1452 2) if (...) x = 0; and jumps are expensive,
1453 3) x = a; if (...) x = b; and A and B are constants where all
1454 the set bits in A are also set in B and jumps are expensive,
1455 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1456 more expensive, and
1457 5) if (...) x = b; if jumps are even more expensive. */
1458
1459 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1460 /* We will be passing this as operand into expand_and. No
1461 good if it's not valid as an operand. */
1462 && general_operand (temp2, GET_MODE (temp2))
1463 && ((GET_CODE (temp3) == CONST_INT)
1464 /* Make the latter case look like
1465 x = x; if (...) x = 0; */
1466 || (temp3 = temp1,
1467 ((BRANCH_COST >= 2
1468 && temp2 == const0_rtx)
1469 || BRANCH_COST >= 3)))
1470 /* If B is zero, OK; if A is zero, can only do (1) if we
1471 can reverse the condition. See if (3) applies possibly
1472 by reversing the condition. Prefer reversing to (4) when
1473 branches are very expensive. */
1474 && (((BRANCH_COST >= 2
1475 || STORE_FLAG_VALUE == -1
1476 || (STORE_FLAG_VALUE == 1
1477 /* Check that the mask is a power of two,
1478 so that it can probably be generated
1479 with a shift. */
1480 && GET_CODE (temp3) == CONST_INT
1481 && exact_log2 (INTVAL (temp3)) >= 0))
1482 && (reversep = 0, temp2 == const0_rtx))
1483 || ((BRANCH_COST >= 2
1484 || STORE_FLAG_VALUE == -1
1485 || (STORE_FLAG_VALUE == 1
1486 && GET_CODE (temp2) == CONST_INT
1487 && exact_log2 (INTVAL (temp2)) >= 0))
1488 && temp3 == const0_rtx
1489 && (reversep = can_reverse_comparison_p (temp4, insn)))
1490 || (BRANCH_COST >= 2
1491 && GET_CODE (temp2) == CONST_INT
1492 && GET_CODE (temp3) == CONST_INT
1493 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1494 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1495 && (reversep = can_reverse_comparison_p (temp4,
1496 insn)))))
1497 || BRANCH_COST >= 3)
1498 )
1499 {
1500 enum rtx_code code = GET_CODE (temp4);
1501 rtx uval, cval, var = temp1;
1502 int normalizep;
1503 rtx target;
1504
1505 /* If necessary, reverse the condition. */
1506 if (reversep)
1507 code = reverse_condition (code), uval = temp2, cval = temp3;
1508 else
1509 uval = temp3, cval = temp2;
1510
1511 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1512 is the constant 1, it is best to just compute the result
1513 directly. If UVAL is constant and STORE_FLAG_VALUE
1514 includes all of its bits, it is best to compute the flag
1515 value unnormalized and `and' it with UVAL. Otherwise,
1516 normalize to -1 and `and' with UVAL. */
1517 normalizep = (cval != const0_rtx ? -1
1518 : (uval == const1_rtx ? 1
1519 : (GET_CODE (uval) == CONST_INT
1520 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1521 ? 0 : -1));
1522
1523 /* We will be putting the store-flag insn immediately in
1524 front of the comparison that was originally being done,
1525 so we know all the variables in TEMP4 will be valid.
1526 However, this might be in front of the assignment of
1527 A to VAR. If it is, it would clobber the store-flag
1528 we will be emitting.
1529
1530 Therefore, emit into a temporary which will be copied to
1531 VAR immediately after TEMP. */
1532
1533 start_sequence ();
1534 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1535 XEXP (temp4, 0), XEXP (temp4, 1),
1536 VOIDmode,
1537 (code == LTU || code == LEU
1538 || code == GEU || code == GTU),
1539 normalizep);
1540 if (target)
1541 {
1542 rtx seq;
1543 rtx before = insn;
1544
1545 seq = get_insns ();
1546 end_sequence ();
1547
1548 /* Put the store-flag insns in front of the first insn
1549 used to compute the condition to ensure that we
1550 use the same values of them as the current
1551 comparison. However, the remainder of the insns we
1552 generate will be placed directly in front of the
1553 jump insn, in case any of the pseudos we use
1554 are modified earlier. */
1555
1556 emit_insns_before (seq, temp5);
1557
1558 start_sequence ();
1559
1560 /* Both CVAL and UVAL are non-zero. */
1561 if (cval != const0_rtx && uval != const0_rtx)
1562 {
1563 rtx tem1, tem2;
1564
1565 tem1 = expand_and (uval, target, NULL_RTX);
1566 if (GET_CODE (cval) == CONST_INT
1567 && GET_CODE (uval) == CONST_INT
1568 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1569 tem2 = cval;
1570 else
1571 {
1572 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1573 target, NULL_RTX, 0);
1574 tem2 = expand_and (cval, tem2,
1575 (GET_CODE (tem2) == REG
1576 ? tem2 : 0));
1577 }
1578
1579 /* If we usually make new pseudos, do so here. This
1580 turns out to help machines that have conditional
1581 move insns. */
1582 /* ??? Conditional moves have already been handled.
1583 This may be obsolete. */
1584
1585 if (flag_expensive_optimizations)
1586 target = 0;
1587
1588 target = expand_binop (GET_MODE (var), ior_optab,
1589 tem1, tem2, target,
1590 1, OPTAB_WIDEN);
1591 }
1592 else if (normalizep != 1)
1593 {
1594 /* We know that either CVAL or UVAL is zero. If
1595 UVAL is zero, negate TARGET and `and' with CVAL.
1596 Otherwise, `and' with UVAL. */
1597 if (uval == const0_rtx)
1598 {
1599 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1600 target, NULL_RTX, 0);
1601 uval = cval;
1602 }
1603
1604 target = expand_and (uval, target,
1605 (GET_CODE (target) == REG
1606 && ! preserve_subexpressions_p ()
1607 ? target : NULL_RTX));
1608 }
1609
1610 emit_move_insn (var, target);
1611 seq = get_insns ();
1612 end_sequence ();
1613 #ifdef HAVE_cc0
1614 /* If INSN uses CC0, we must not separate it from the
1615 insn that sets cc0. */
1616 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1617 before = prev_nonnote_insn (before);
1618 #endif
1619 emit_insns_before (seq, before);
1620
1621 delete_insn (temp);
1622 next = NEXT_INSN (insn);
1623 delete_jump (insn);
1624
1625 if (after_regscan)
1626 {
1627 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1628 old_max_reg = max_reg_num ();
1629 }
1630
1631 changed = 1;
1632 continue;
1633 }
1634 else
1635 end_sequence ();
1636 }
1637 }
1638
1639
1640 /* Simplify if (...) x = 1; else {...} if (x) ...
1641 We recognize this case scanning backwards as well.
1642
1643 TEMP is the assignment to x;
1644 TEMP1 is the label at the head of the second if. */
1645 /* ?? This should call get_condition to find the values being
1646 compared, instead of looking for a COMPARE insn when HAVE_cc0
1647 is not defined. This would allow it to work on the m88k. */
1648 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1649 is not defined and the condition is tested by a separate compare
1650 insn. This is because the code below assumes that the result
1651 of the compare dies in the following branch.
1652
1653 Not only that, but there might be other insns between the
1654 compare and branch whose results are live. Those insns need
1655 to be executed.
1656
1657 A way to fix this is to move the insns at JUMP_LABEL (insn)
1658 to before INSN. If we are running before flow, they will
1659 be deleted if they aren't needed. But this doesn't work
1660 well after flow.
1661
1662 This is really a special-case of jump threading, anyway. The
1663 right thing to do is to replace this and jump threading with
1664 much simpler code in cse.
1665
1666 This code has been turned off in the non-cc0 case in the
1667 meantime. */
1668
1669 #ifdef HAVE_cc0
1670 else if (this_is_simplejump
1671 /* Safe to skip USE and CLOBBER insns here
1672 since they will not be deleted. */
1673 && (temp = prev_active_insn (insn))
1674 && no_labels_between_p (temp, insn)
1675 && GET_CODE (temp) == INSN
1676 && GET_CODE (PATTERN (temp)) == SET
1677 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1678 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1679 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1680 /* If we find that the next value tested is `x'
1681 (TEMP1 is the insn where this happens), win. */
1682 && GET_CODE (temp1) == INSN
1683 && GET_CODE (PATTERN (temp1)) == SET
1684 #ifdef HAVE_cc0
1685 /* Does temp1 `tst' the value of x? */
1686 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1687 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1688 && (temp1 = next_nonnote_insn (temp1))
1689 #else
1690 /* Does temp1 compare the value of x against zero? */
1691 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1692 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1693 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1694 == SET_DEST (PATTERN (temp)))
1695 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1696 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1697 #endif
1698 && condjump_p (temp1))
1699 {
1700 /* Get the if_then_else from the condjump. */
1701 rtx choice = SET_SRC (PATTERN (temp1));
1702 if (GET_CODE (choice) == IF_THEN_ELSE)
1703 {
1704 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1705 rtx val = SET_SRC (PATTERN (temp));
1706 rtx cond
1707 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1708 val, const0_rtx);
1709 rtx ultimate;
1710
1711 if (cond == const_true_rtx)
1712 ultimate = XEXP (choice, 1);
1713 else if (cond == const0_rtx)
1714 ultimate = XEXP (choice, 2);
1715 else
1716 ultimate = 0;
1717
1718 if (ultimate == pc_rtx)
1719 ultimate = get_label_after (temp1);
1720 else if (ultimate && GET_CODE (ultimate) != RETURN)
1721 ultimate = XEXP (ultimate, 0);
1722
1723 if (ultimate && JUMP_LABEL(insn) != ultimate)
1724 changed |= redirect_jump (insn, ultimate);
1725 }
1726 }
1727 #endif
1728
1729 #if 0
1730 /* @@ This needs a bit of work before it will be right.
1731
1732 Any type of comparison can be accepted for the first and
1733 second compare. When rewriting the first jump, we must
1734 compute the what conditions can reach label3, and use the
1735 appropriate code. We can not simply reverse/swap the code
1736 of the first jump. In some cases, the second jump must be
1737 rewritten also.
1738
1739 For example,
1740 < == converts to > ==
1741 < != converts to == >
1742 etc.
1743
1744 If the code is written to only accept an '==' test for the second
1745 compare, then all that needs to be done is to swap the condition
1746 of the first branch.
1747
1748 It is questionable whether we want this optimization anyways,
1749 since if the user wrote code like this because he/she knew that
1750 the jump to label1 is taken most of the time, then rewriting
1751 this gives slower code. */
1752 /* @@ This should call get_condition to find the values being
1753 compared, instead of looking for a COMPARE insn when HAVE_cc0
1754 is not defined. This would allow it to work on the m88k. */
1755 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1756 is not defined and the condition is tested by a separate compare
1757 insn. This is because the code below assumes that the result
1758 of the compare dies in the following branch. */
1759
1760 /* Simplify test a ~= b
1761 condjump label1;
1762 test a == b
1763 condjump label2;
1764 jump label3;
1765 label1:
1766
1767 rewriting as
1768 test a ~~= b
1769 condjump label3
1770 test a == b
1771 condjump label2
1772 label1:
1773
1774 where ~= is an inequality, e.g. >, and ~~= is the swapped
1775 inequality, e.g. <.
1776
1777 We recognize this case scanning backwards.
1778
1779 TEMP is the conditional jump to `label2';
1780 TEMP1 is the test for `a == b';
1781 TEMP2 is the conditional jump to `label1';
1782 TEMP3 is the test for `a ~= b'. */
1783 else if (this_is_simplejump
1784 && (temp = prev_active_insn (insn))
1785 && no_labels_between_p (temp, insn)
1786 && condjump_p (temp)
1787 && (temp1 = prev_active_insn (temp))
1788 && no_labels_between_p (temp1, temp)
1789 && GET_CODE (temp1) == INSN
1790 && GET_CODE (PATTERN (temp1)) == SET
1791 #ifdef HAVE_cc0
1792 && sets_cc0_p (PATTERN (temp1)) == 1
1793 #else
1794 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1795 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1796 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1797 #endif
1798 && (temp2 = prev_active_insn (temp1))
1799 && no_labels_between_p (temp2, temp1)
1800 && condjump_p (temp2)
1801 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1802 && (temp3 = prev_active_insn (temp2))
1803 && no_labels_between_p (temp3, temp2)
1804 && GET_CODE (PATTERN (temp3)) == SET
1805 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1806 SET_DEST (PATTERN (temp1)))
1807 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1808 SET_SRC (PATTERN (temp3)))
1809 && ! inequality_comparisons_p (PATTERN (temp))
1810 && inequality_comparisons_p (PATTERN (temp2)))
1811 {
1812 rtx fallthrough_label = JUMP_LABEL (temp2);
1813
1814 ++LABEL_NUSES (fallthrough_label);
1815 if (swap_jump (temp2, JUMP_LABEL (insn)))
1816 {
1817 delete_insn (insn);
1818 changed = 1;
1819 }
1820
1821 if (--LABEL_NUSES (fallthrough_label) == 0)
1822 delete_insn (fallthrough_label);
1823 }
1824 #endif
1825 /* Simplify if (...) {... x = 1;} if (x) ...
1826
1827 We recognize this case backwards.
1828
1829 TEMP is the test of `x';
1830 TEMP1 is the assignment to `x' at the end of the
1831 previous statement. */
1832 /* @@ This should call get_condition to find the values being
1833 compared, instead of looking for a COMPARE insn when HAVE_cc0
1834 is not defined. This would allow it to work on the m88k. */
1835 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1836 is not defined and the condition is tested by a separate compare
1837 insn. This is because the code below assumes that the result
1838 of the compare dies in the following branch. */
1839
1840 /* ??? This has to be turned off. The problem is that the
1841 unconditional jump might indirectly end up branching to the
1842 label between TEMP1 and TEMP. We can't detect this, in general,
1843 since it may become a jump to there after further optimizations.
1844 If that jump is done, it will be deleted, so we will retry
1845 this optimization in the next pass, thus an infinite loop.
1846
1847 The present code prevents this by putting the jump after the
1848 label, but this is not logically correct. */
1849 #if 0
1850 else if (this_is_condjump
1851 /* Safe to skip USE and CLOBBER insns here
1852 since they will not be deleted. */
1853 && (temp = prev_active_insn (insn))
1854 && no_labels_between_p (temp, insn)
1855 && GET_CODE (temp) == INSN
1856 && GET_CODE (PATTERN (temp)) == SET
1857 #ifdef HAVE_cc0
1858 && sets_cc0_p (PATTERN (temp)) == 1
1859 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1860 #else
1861 /* Temp must be a compare insn, we can not accept a register
1862 to register move here, since it may not be simply a
1863 tst insn. */
1864 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1865 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1866 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1867 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1868 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1869 #endif
1870 /* May skip USE or CLOBBER insns here
1871 for checking for opportunity, since we
1872 take care of them later. */
1873 && (temp1 = prev_active_insn (temp))
1874 && GET_CODE (temp1) == INSN
1875 && GET_CODE (PATTERN (temp1)) == SET
1876 #ifdef HAVE_cc0
1877 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1878 #else
1879 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1880 == SET_DEST (PATTERN (temp1)))
1881 #endif
1882 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1883 /* If this isn't true, cse will do the job. */
1884 && ! no_labels_between_p (temp1, temp))
1885 {
1886 /* Get the if_then_else from the condjump. */
1887 rtx choice = SET_SRC (PATTERN (insn));
1888 if (GET_CODE (choice) == IF_THEN_ELSE
1889 && (GET_CODE (XEXP (choice, 0)) == EQ
1890 || GET_CODE (XEXP (choice, 0)) == NE))
1891 {
1892 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1893 rtx last_insn;
1894 rtx ultimate;
1895 rtx p;
1896
1897 /* Get the place that condjump will jump to
1898 if it is reached from here. */
1899 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1900 == want_nonzero)
1901 ultimate = XEXP (choice, 1);
1902 else
1903 ultimate = XEXP (choice, 2);
1904 /* Get it as a CODE_LABEL. */
1905 if (ultimate == pc_rtx)
1906 ultimate = get_label_after (insn);
1907 else
1908 /* Get the label out of the LABEL_REF. */
1909 ultimate = XEXP (ultimate, 0);
1910
1911 /* Insert the jump immediately before TEMP, specifically
1912 after the label that is between TEMP1 and TEMP. */
1913 last_insn = PREV_INSN (temp);
1914
1915 /* If we would be branching to the next insn, the jump
1916 would immediately be deleted and the re-inserted in
1917 a subsequent pass over the code. So don't do anything
1918 in that case. */
1919 if (next_active_insn (last_insn)
1920 != next_active_insn (ultimate))
1921 {
1922 emit_barrier_after (last_insn);
1923 p = emit_jump_insn_after (gen_jump (ultimate),
1924 last_insn);
1925 JUMP_LABEL (p) = ultimate;
1926 ++LABEL_NUSES (ultimate);
1927 if (INSN_UID (ultimate) < max_jump_chain
1928 && INSN_CODE (p) < max_jump_chain)
1929 {
1930 jump_chain[INSN_UID (p)]
1931 = jump_chain[INSN_UID (ultimate)];
1932 jump_chain[INSN_UID (ultimate)] = p;
1933 }
1934 changed = 1;
1935 continue;
1936 }
1937 }
1938 }
1939 #endif
1940 #ifdef HAVE_trap
1941 /* Detect a conditional jump jumping over an unconditional trap. */
1942 else if (HAVE_trap
1943 && this_is_condjump && ! this_is_simplejump
1944 && reallabelprev != 0
1945 && GET_CODE (reallabelprev) == INSN
1946 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1947 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1948 && prev_active_insn (reallabelprev) == insn
1949 && no_labels_between_p (insn, reallabelprev)
1950 && (temp2 = get_condition (insn, &temp4))
1951 && can_reverse_comparison_p (temp2, insn))
1952 {
1953 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1954 XEXP (temp2, 0), XEXP (temp2, 1),
1955 TRAP_CODE (PATTERN (reallabelprev)));
1956
1957 if (new)
1958 {
1959 emit_insn_before (new, temp4);
1960 delete_insn (reallabelprev);
1961 delete_jump (insn);
1962 changed = 1;
1963 continue;
1964 }
1965 }
1966 /* Detect a jump jumping to an unconditional trap. */
1967 else if (HAVE_trap && this_is_condjump
1968 && (temp = next_active_insn (JUMP_LABEL (insn)))
1969 && GET_CODE (temp) == INSN
1970 && GET_CODE (PATTERN (temp)) == TRAP_IF
1971 && (this_is_simplejump
1972 || (temp2 = get_condition (insn, &temp4))))
1973 {
1974 rtx tc = TRAP_CONDITION (PATTERN (temp));
1975
1976 if (tc == const_true_rtx
1977 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1978 {
1979 rtx new;
1980 /* Replace an unconditional jump to a trap with a trap. */
1981 if (this_is_simplejump)
1982 {
1983 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1984 delete_jump (insn);
1985 changed = 1;
1986 continue;
1987 }
1988 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1989 XEXP (temp2, 1),
1990 TRAP_CODE (PATTERN (temp)));
1991 if (new)
1992 {
1993 emit_insn_before (new, temp4);
1994 delete_jump (insn);
1995 changed = 1;
1996 continue;
1997 }
1998 }
1999 /* If the trap condition and jump condition are mutually
2000 exclusive, redirect the jump to the following insn. */
2001 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
2002 && ! this_is_simplejump
2003 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
2004 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
2005 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
2006 && redirect_jump (insn, get_label_after (temp)))
2007 {
2008 changed = 1;
2009 continue;
2010 }
2011 }
2012 #endif
2013 else
2014 {
2015 /* Detect a jump to a jump. */
2016
2017 /* Look for if (foo) bar; else break; */
2018 /* The insns look like this:
2019 insn = condjump label1;
2020 ...range1 (some insns)...
2021 jump label2;
2022 label1:
2023 ...range2 (some insns)...
2024 jump somewhere unconditionally
2025 label2: */
2026 {
2027 rtx label1 = next_label (insn);
2028 rtx range1end = label1 ? prev_active_insn (label1) : 0;
2029 /* Don't do this optimization on the first round, so that
2030 jump-around-a-jump gets simplified before we ask here
2031 whether a jump is unconditional.
2032
2033 Also don't do it when we are called after reload since
2034 it will confuse reorg. */
2035 if (! first
2036 && (reload_completed ? ! flag_delayed_branch : 1)
2037 /* Make sure INSN is something we can invert. */
2038 && condjump_p (insn)
2039 && label1 != 0
2040 && JUMP_LABEL (insn) == label1
2041 && LABEL_NUSES (label1) == 1
2042 && GET_CODE (range1end) == JUMP_INSN
2043 && simplejump_p (range1end))
2044 {
2045 rtx label2 = next_label (label1);
2046 rtx range2end = label2 ? prev_active_insn (label2) : 0;
2047 if (range1end != range2end
2048 && JUMP_LABEL (range1end) == label2
2049 && GET_CODE (range2end) == JUMP_INSN
2050 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
2051 /* Invert the jump condition, so we
2052 still execute the same insns in each case. */
2053 && invert_jump (insn, label1))
2054 {
2055 rtx range1beg = next_active_insn (insn);
2056 rtx range2beg = next_active_insn (label1);
2057 rtx range1after, range2after;
2058 rtx range1before, range2before;
2059 rtx rangenext;
2060
2061 /* Include in each range any notes before it, to be
2062 sure that we get the line number note if any, even
2063 if there are other notes here. */
2064 while (PREV_INSN (range1beg)
2065 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
2066 range1beg = PREV_INSN (range1beg);
2067
2068 while (PREV_INSN (range2beg)
2069 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
2070 range2beg = PREV_INSN (range2beg);
2071
2072 /* Don't move NOTEs for blocks or loops; shift them
2073 outside the ranges, where they'll stay put. */
2074 range1beg = squeeze_notes (range1beg, range1end);
2075 range2beg = squeeze_notes (range2beg, range2end);
2076
2077 /* Get current surrounds of the 2 ranges. */
2078 range1before = PREV_INSN (range1beg);
2079 range2before = PREV_INSN (range2beg);
2080 range1after = NEXT_INSN (range1end);
2081 range2after = NEXT_INSN (range2end);
2082
2083 /* Splice range2 where range1 was. */
2084 NEXT_INSN (range1before) = range2beg;
2085 PREV_INSN (range2beg) = range1before;
2086 NEXT_INSN (range2end) = range1after;
2087 PREV_INSN (range1after) = range2end;
2088 /* Splice range1 where range2 was. */
2089 NEXT_INSN (range2before) = range1beg;
2090 PREV_INSN (range1beg) = range2before;
2091 NEXT_INSN (range1end) = range2after;
2092 PREV_INSN (range2after) = range1end;
2093
2094 /* Check for loop notes between the end of
2095 range2, and the next code label. If there is one,
2096 then what we have really seen is
2097 if (foo) break; end_of_loop;
2098 and moved the break sequence outside the loop.
2099 We must move LOOP_END, LOOP_VTOP and LOOP_CONT
2100 notes (in order) to where the loop really ends now,
2101 or we will confuse loop optimization. Stop if we
2102 find a LOOP_BEG note first, since we don't want to
2103 move the notes in that case. */
2104 for (;range2after != label2; range2after = rangenext)
2105 {
2106 rangenext = NEXT_INSN (range2after);
2107 if (GET_CODE (range2after) == NOTE)
2108 {
2109 int kind = NOTE_LINE_NUMBER (range2after);
2110 if (kind == NOTE_INSN_LOOP_END
2111 || kind == NOTE_INSN_LOOP_VTOP
2112 || kind == NOTE_INSN_LOOP_CONT)
2113 {
2114 NEXT_INSN (PREV_INSN (range2after))
2115 = rangenext;
2116 PREV_INSN (rangenext)
2117 = PREV_INSN (range2after);
2118 PREV_INSN (range2after)
2119 = PREV_INSN (range1beg);
2120 NEXT_INSN (range2after) = range1beg;
2121 NEXT_INSN (PREV_INSN (range1beg))
2122 = range2after;
2123 PREV_INSN (range1beg) = range2after;
2124 }
2125 else if (NOTE_LINE_NUMBER (range2after)
2126 == NOTE_INSN_LOOP_BEG)
2127 break;
2128 }
2129 }
2130 changed = 1;
2131 continue;
2132 }
2133 }
2134 }
2135
2136 /* Now that the jump has been tensioned,
2137 try cross jumping: check for identical code
2138 before the jump and before its target label. */
2139
2140 /* First, cross jumping of conditional jumps: */
2141
2142 if (cross_jump && condjump_p (insn))
2143 {
2144 rtx newjpos, newlpos;
2145 rtx x = prev_real_insn (JUMP_LABEL (insn));
2146
2147 /* A conditional jump may be crossjumped
2148 only if the place it jumps to follows
2149 an opposing jump that comes back here. */
2150
2151 if (x != 0 && ! jump_back_p (x, insn))
2152 /* We have no opposing jump;
2153 cannot cross jump this insn. */
2154 x = 0;
2155
2156 newjpos = 0;
2157 /* TARGET is nonzero if it is ok to cross jump
2158 to code before TARGET. If so, see if matches. */
2159 if (x != 0)
2160 find_cross_jump (insn, x, 2,
2161 &newjpos, &newlpos);
2162
2163 if (newjpos != 0)
2164 {
2165 do_cross_jump (insn, newjpos, newlpos);
2166 /* Make the old conditional jump
2167 into an unconditional one. */
2168 SET_SRC (PATTERN (insn))
2169 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
2170 INSN_CODE (insn) = -1;
2171 emit_barrier_after (insn);
2172 /* Add to jump_chain unless this is a new label
2173 whose UID is too large. */
2174 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2175 {
2176 jump_chain[INSN_UID (insn)]
2177 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2178 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2179 }
2180 changed = 1;
2181 next = insn;
2182 }
2183 }
2184
2185 /* Cross jumping of unconditional jumps:
2186 a few differences. */
2187
2188 if (cross_jump && simplejump_p (insn))
2189 {
2190 rtx newjpos, newlpos;
2191 rtx target;
2192
2193 newjpos = 0;
2194
2195 /* TARGET is nonzero if it is ok to cross jump
2196 to code before TARGET. If so, see if matches. */
2197 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2198 &newjpos, &newlpos);
2199
2200 /* If cannot cross jump to code before the label,
2201 see if we can cross jump to another jump to
2202 the same label. */
2203 /* Try each other jump to this label. */
2204 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2205 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2206 target != 0 && newjpos == 0;
2207 target = jump_chain[INSN_UID (target)])
2208 if (target != insn
2209 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2210 /* Ignore TARGET if it's deleted. */
2211 && ! INSN_DELETED_P (target))
2212 find_cross_jump (insn, target, 2,
2213 &newjpos, &newlpos);
2214
2215 if (newjpos != 0)
2216 {
2217 do_cross_jump (insn, newjpos, newlpos);
2218 changed = 1;
2219 next = insn;
2220 }
2221 }
2222
2223 /* This code was dead in the previous jump.c! */
2224 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2225 {
2226 /* Return insns all "jump to the same place"
2227 so we can cross-jump between any two of them. */
2228
2229 rtx newjpos, newlpos, target;
2230
2231 newjpos = 0;
2232
2233 /* If cannot cross jump to code before the label,
2234 see if we can cross jump to another jump to
2235 the same label. */
2236 /* Try each other jump to this label. */
2237 for (target = jump_chain[0];
2238 target != 0 && newjpos == 0;
2239 target = jump_chain[INSN_UID (target)])
2240 if (target != insn
2241 && ! INSN_DELETED_P (target)
2242 && GET_CODE (PATTERN (target)) == RETURN)
2243 find_cross_jump (insn, target, 2,
2244 &newjpos, &newlpos);
2245
2246 if (newjpos != 0)
2247 {
2248 do_cross_jump (insn, newjpos, newlpos);
2249 changed = 1;
2250 next = insn;
2251 }
2252 }
2253 }
2254 }
2255
2256 first = 0;
2257 }
2258
2259 /* Delete extraneous line number notes.
2260 Note that two consecutive notes for different lines are not really
2261 extraneous. There should be some indication where that line belonged,
2262 even if it became empty. */
2263
2264 {
2265 rtx last_note = 0;
2266
2267 for (insn = f; insn; insn = NEXT_INSN (insn))
2268 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2269 {
2270 /* Delete this note if it is identical to previous note. */
2271 if (last_note
2272 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2273 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2274 {
2275 delete_insn (insn);
2276 continue;
2277 }
2278
2279 last_note = insn;
2280 }
2281 }
2282
2283 #ifdef HAVE_return
2284 if (HAVE_return)
2285 {
2286 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2287 in front of it. If the machine allows it at this point (we might be
2288 after reload for a leaf routine), it will improve optimization for it
2289 to be there. We do this both here and at the start of this pass since
2290 the RETURN might have been deleted by some of our optimizations. */
2291 insn = get_last_insn ();
2292 while (insn && GET_CODE (insn) == NOTE)
2293 insn = PREV_INSN (insn);
2294
2295 if (insn && GET_CODE (insn) != BARRIER)
2296 {
2297 emit_jump_insn (gen_return ());
2298 emit_barrier ();
2299 }
2300 }
2301 #endif
2302
2303 /* CAN_REACH_END is persistent for each function. Once set it should
2304 not be cleared. This is especially true for the case where we
2305 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2306 the front-end before compiling each function. */
2307 if (calculate_can_reach_end (last_insn, 0, 1))
2308 can_reach_end = 1;
2309
2310 end:
2311 /* Clean up. */
2312 free (jump_chain);
2313 jump_chain = 0;
2314 }
2315 \f
2316 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2317 notes whose labels don't occur in the insn any more. Returns the
2318 largest INSN_UID found. */
2319 static int
2320 init_label_info (f)
2321 rtx f;
2322 {
2323 int largest_uid = 0;
2324 rtx insn;
2325
2326 for (insn = f; insn; insn = NEXT_INSN (insn))
2327 {
2328 if (GET_CODE (insn) == CODE_LABEL)
2329 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2330 else if (GET_CODE (insn) == JUMP_INSN)
2331 JUMP_LABEL (insn) = 0;
2332 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2333 {
2334 rtx note, next;
2335
2336 for (note = REG_NOTES (insn); note; note = next)
2337 {
2338 next = XEXP (note, 1);
2339 if (REG_NOTE_KIND (note) == REG_LABEL
2340 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2341 remove_note (insn, note);
2342 }
2343 }
2344 if (INSN_UID (insn) > largest_uid)
2345 largest_uid = INSN_UID (insn);
2346 }
2347
2348 return largest_uid;
2349 }
2350
2351 /* Delete insns following barriers, up to next label.
2352
2353 Also delete no-op jumps created by gcse. */
2354 static void
2355 delete_barrier_successors (f)
2356 rtx f;
2357 {
2358 rtx insn;
2359
2360 for (insn = f; insn;)
2361 {
2362 if (GET_CODE (insn) == BARRIER)
2363 {
2364 insn = NEXT_INSN (insn);
2365
2366 never_reached_warning (insn);
2367
2368 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2369 {
2370 if (GET_CODE (insn) == NOTE
2371 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2372 insn = NEXT_INSN (insn);
2373 else
2374 insn = delete_insn (insn);
2375 }
2376 /* INSN is now the code_label. */
2377 }
2378 /* Also remove (set (pc) (pc)) insns which can be created by
2379 gcse. We eliminate such insns now to avoid having them
2380 cause problems later. */
2381 else if (GET_CODE (insn) == JUMP_INSN
2382 && GET_CODE (PATTERN (insn)) == SET
2383 && SET_SRC (PATTERN (insn)) == pc_rtx
2384 && SET_DEST (PATTERN (insn)) == pc_rtx)
2385 insn = delete_insn (insn);
2386
2387 else
2388 insn = NEXT_INSN (insn);
2389 }
2390 }
2391
2392 /* Mark the label each jump jumps to.
2393 Combine consecutive labels, and count uses of labels.
2394
2395 For each label, make a chain (using `jump_chain')
2396 of all the *unconditional* jumps that jump to it;
2397 also make a chain of all returns.
2398
2399 CROSS_JUMP indicates whether we are doing cross jumping
2400 and if we are whether we will be paying attention to
2401 death notes or not. */
2402
2403 static void
2404 mark_all_labels (f, cross_jump)
2405 rtx f;
2406 int cross_jump;
2407 {
2408 rtx insn;
2409
2410 for (insn = f; insn; insn = NEXT_INSN (insn))
2411 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2412 {
2413 mark_jump_label (PATTERN (insn), insn, cross_jump);
2414 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2415 {
2416 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2417 {
2418 jump_chain[INSN_UID (insn)]
2419 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2420 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2421 }
2422 if (GET_CODE (PATTERN (insn)) == RETURN)
2423 {
2424 jump_chain[INSN_UID (insn)] = jump_chain[0];
2425 jump_chain[0] = insn;
2426 }
2427 }
2428 }
2429 }
2430
2431 /* Delete all labels already not referenced.
2432 Also find and return the last insn. */
2433
2434 static rtx
2435 delete_unreferenced_labels (f)
2436 rtx f;
2437 {
2438 rtx final = NULL_RTX;
2439 rtx insn;
2440
2441 for (insn = f; insn; )
2442 {
2443 if (GET_CODE (insn) == CODE_LABEL
2444 && LABEL_NUSES (insn) == 0
2445 && LABEL_ALTERNATE_NAME (insn) == NULL)
2446 insn = delete_insn (insn);
2447 else
2448 {
2449 final = insn;
2450 insn = NEXT_INSN (insn);
2451 }
2452 }
2453
2454 return final;
2455 }
2456
2457 /* Delete various simple forms of moves which have no necessary
2458 side effect. */
2459
2460 static void
2461 delete_noop_moves (f)
2462 rtx f;
2463 {
2464 rtx insn, next;
2465
2466 for (insn = f; insn; )
2467 {
2468 next = NEXT_INSN (insn);
2469
2470 if (GET_CODE (insn) == INSN)
2471 {
2472 register rtx body = PATTERN (insn);
2473
2474 /* Combine stack_adjusts with following push_insns. */
2475 #ifdef PUSH_ROUNDING
2476 if (GET_CODE (body) == SET
2477 && SET_DEST (body) == stack_pointer_rtx
2478 && GET_CODE (SET_SRC (body)) == PLUS
2479 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2480 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2481 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2482 {
2483 rtx p;
2484 rtx stack_adjust_insn = insn;
2485 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2486 int total_pushed = 0;
2487 int pushes = 0;
2488
2489 /* Find all successive push insns. */
2490 p = insn;
2491 /* Don't convert more than three pushes;
2492 that starts adding too many displaced addresses
2493 and the whole thing starts becoming a losing
2494 proposition. */
2495 while (pushes < 3)
2496 {
2497 rtx pbody, dest;
2498 p = next_nonnote_insn (p);
2499 if (p == 0 || GET_CODE (p) != INSN)
2500 break;
2501 pbody = PATTERN (p);
2502 if (GET_CODE (pbody) != SET)
2503 break;
2504 dest = SET_DEST (pbody);
2505 /* Allow a no-op move between the adjust and the push. */
2506 if (GET_CODE (dest) == REG
2507 && GET_CODE (SET_SRC (pbody)) == REG
2508 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2509 continue;
2510 if (! (GET_CODE (dest) == MEM
2511 && GET_CODE (XEXP (dest, 0)) == POST_INC
2512 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2513 break;
2514 pushes++;
2515 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2516 > stack_adjust_amount)
2517 break;
2518 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2519 }
2520
2521 /* Discard the amount pushed from the stack adjust;
2522 maybe eliminate it entirely. */
2523 if (total_pushed >= stack_adjust_amount)
2524 {
2525 delete_computation (stack_adjust_insn);
2526 total_pushed = stack_adjust_amount;
2527 }
2528 else
2529 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2530 = GEN_INT (stack_adjust_amount - total_pushed);
2531
2532 /* Change the appropriate push insns to ordinary stores. */
2533 p = insn;
2534 while (total_pushed > 0)
2535 {
2536 rtx pbody, dest;
2537 p = next_nonnote_insn (p);
2538 if (GET_CODE (p) != INSN)
2539 break;
2540 pbody = PATTERN (p);
2541 if (GET_CODE (pbody) != SET)
2542 break;
2543 dest = SET_DEST (pbody);
2544 /* Allow a no-op move between the adjust and the push. */
2545 if (GET_CODE (dest) == REG
2546 && GET_CODE (SET_SRC (pbody)) == REG
2547 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2548 continue;
2549 if (! (GET_CODE (dest) == MEM
2550 && GET_CODE (XEXP (dest, 0)) == POST_INC
2551 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2552 break;
2553 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2554 /* If this push doesn't fully fit in the space
2555 of the stack adjust that we deleted,
2556 make another stack adjust here for what we
2557 didn't use up. There should be peepholes
2558 to recognize the resulting sequence of insns. */
2559 if (total_pushed < 0)
2560 {
2561 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2562 GEN_INT (- total_pushed)),
2563 p);
2564 break;
2565 }
2566 XEXP (dest, 0)
2567 = plus_constant (stack_pointer_rtx, total_pushed);
2568 }
2569 }
2570 #endif
2571
2572 /* Detect and delete no-op move instructions
2573 resulting from not allocating a parameter in a register. */
2574
2575 if (GET_CODE (body) == SET
2576 && (SET_DEST (body) == SET_SRC (body)
2577 || (GET_CODE (SET_DEST (body)) == MEM
2578 && GET_CODE (SET_SRC (body)) == MEM
2579 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2580 && ! (GET_CODE (SET_DEST (body)) == MEM
2581 && MEM_VOLATILE_P (SET_DEST (body)))
2582 && ! (GET_CODE (SET_SRC (body)) == MEM
2583 && MEM_VOLATILE_P (SET_SRC (body))))
2584 delete_computation (insn);
2585
2586 /* Detect and ignore no-op move instructions
2587 resulting from smart or fortuitous register allocation. */
2588
2589 else if (GET_CODE (body) == SET)
2590 {
2591 int sreg = true_regnum (SET_SRC (body));
2592 int dreg = true_regnum (SET_DEST (body));
2593
2594 if (sreg == dreg && sreg >= 0)
2595 delete_insn (insn);
2596 else if (sreg >= 0 && dreg >= 0)
2597 {
2598 rtx trial;
2599 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2600 sreg, NULL_PTR, dreg,
2601 GET_MODE (SET_SRC (body)));
2602
2603 if (tem != 0
2604 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2605 {
2606 /* DREG may have been the target of a REG_DEAD note in
2607 the insn which makes INSN redundant. If so, reorg
2608 would still think it is dead. So search for such a
2609 note and delete it if we find it. */
2610 if (! find_regno_note (insn, REG_UNUSED, dreg))
2611 for (trial = prev_nonnote_insn (insn);
2612 trial && GET_CODE (trial) != CODE_LABEL;
2613 trial = prev_nonnote_insn (trial))
2614 if (find_regno_note (trial, REG_DEAD, dreg))
2615 {
2616 remove_death (dreg, trial);
2617 break;
2618 }
2619
2620 /* Deleting insn could lose a death-note for SREG. */
2621 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2622 {
2623 /* Change this into a USE so that we won't emit
2624 code for it, but still can keep the note. */
2625 PATTERN (insn)
2626 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2627 INSN_CODE (insn) = -1;
2628 /* Remove all reg notes but the REG_DEAD one. */
2629 REG_NOTES (insn) = trial;
2630 XEXP (trial, 1) = NULL_RTX;
2631 }
2632 else
2633 delete_insn (insn);
2634 }
2635 }
2636 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2637 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2638 NULL_PTR, 0,
2639 GET_MODE (SET_DEST (body))))
2640 {
2641 /* This handles the case where we have two consecutive
2642 assignments of the same constant to pseudos that didn't
2643 get a hard reg. Each SET from the constant will be
2644 converted into a SET of the spill register and an
2645 output reload will be made following it. This produces
2646 two loads of the same constant into the same spill
2647 register. */
2648
2649 rtx in_insn = insn;
2650
2651 /* Look back for a death note for the first reg.
2652 If there is one, it is no longer accurate. */
2653 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2654 {
2655 if ((GET_CODE (in_insn) == INSN
2656 || GET_CODE (in_insn) == JUMP_INSN)
2657 && find_regno_note (in_insn, REG_DEAD, dreg))
2658 {
2659 remove_death (dreg, in_insn);
2660 break;
2661 }
2662 in_insn = PREV_INSN (in_insn);
2663 }
2664
2665 /* Delete the second load of the value. */
2666 delete_insn (insn);
2667 }
2668 }
2669 else if (GET_CODE (body) == PARALLEL)
2670 {
2671 /* If each part is a set between two identical registers or
2672 a USE or CLOBBER, delete the insn. */
2673 int i, sreg, dreg;
2674 rtx tem;
2675
2676 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2677 {
2678 tem = XVECEXP (body, 0, i);
2679 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2680 continue;
2681
2682 if (GET_CODE (tem) != SET
2683 || (sreg = true_regnum (SET_SRC (tem))) < 0
2684 || (dreg = true_regnum (SET_DEST (tem))) < 0
2685 || dreg != sreg)
2686 break;
2687 }
2688
2689 if (i < 0)
2690 delete_insn (insn);
2691 }
2692 /* Also delete insns to store bit fields if they are no-ops. */
2693 /* Not worth the hair to detect this in the big-endian case. */
2694 else if (! BYTES_BIG_ENDIAN
2695 && GET_CODE (body) == SET
2696 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2697 && XEXP (SET_DEST (body), 2) == const0_rtx
2698 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2699 && ! (GET_CODE (SET_SRC (body)) == MEM
2700 && MEM_VOLATILE_P (SET_SRC (body))))
2701 delete_insn (insn);
2702 }
2703 insn = next;
2704 }
2705 }
2706
2707 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2708 If so indicate that this function can drop off the end by returning
2709 1, else return 0.
2710
2711 CHECK_DELETED indicates whether we must check if the note being
2712 searched for has the deleted flag set.
2713
2714 DELETE_FINAL_NOTE indicates whether we should delete the note
2715 if we find it. */
2716
2717 static int
2718 calculate_can_reach_end (last, check_deleted, delete_final_note)
2719 rtx last;
2720 int check_deleted;
2721 int delete_final_note;
2722 {
2723 rtx insn = last;
2724 int n_labels = 1;
2725
2726 while (insn != NULL_RTX)
2727 {
2728 int ok = 0;
2729
2730 /* One label can follow the end-note: the return label. */
2731 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2732 ok = 1;
2733 /* Ordinary insns can follow it if returning a structure. */
2734 else if (GET_CODE (insn) == INSN)
2735 ok = 1;
2736 /* If machine uses explicit RETURN insns, no epilogue,
2737 then one of them follows the note. */
2738 else if (GET_CODE (insn) == JUMP_INSN
2739 && GET_CODE (PATTERN (insn)) == RETURN)
2740 ok = 1;
2741 /* A barrier can follow the return insn. */
2742 else if (GET_CODE (insn) == BARRIER)
2743 ok = 1;
2744 /* Other kinds of notes can follow also. */
2745 else if (GET_CODE (insn) == NOTE
2746 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2747 ok = 1;
2748
2749 if (ok != 1)
2750 break;
2751
2752 insn = PREV_INSN (insn);
2753 }
2754
2755 /* See if we backed up to the appropriate type of note. */
2756 if (insn != NULL_RTX
2757 && GET_CODE (insn) == NOTE
2758 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
2759 && (check_deleted == 0
2760 || ! INSN_DELETED_P (insn)))
2761 {
2762 if (delete_final_note)
2763 delete_insn (insn);
2764 return 1;
2765 }
2766
2767 return 0;
2768 }
2769
2770 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2771 jump. Assume that this unconditional jump is to the exit test code. If
2772 the code is sufficiently simple, make a copy of it before INSN,
2773 followed by a jump to the exit of the loop. Then delete the unconditional
2774 jump after INSN.
2775
2776 Return 1 if we made the change, else 0.
2777
2778 This is only safe immediately after a regscan pass because it uses the
2779 values of regno_first_uid and regno_last_uid. */
2780
2781 static int
2782 duplicate_loop_exit_test (loop_start)
2783 rtx loop_start;
2784 {
2785 rtx insn, set, reg, p, link;
2786 rtx copy = 0, first_copy = 0;
2787 int num_insns = 0;
2788 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2789 rtx lastexit;
2790 int max_reg = max_reg_num ();
2791 rtx *reg_map = 0;
2792
2793 /* Scan the exit code. We do not perform this optimization if any insn:
2794
2795 is a CALL_INSN
2796 is a CODE_LABEL
2797 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2798 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2799 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2800 is not valid.
2801
2802 We also do not do this if we find an insn with ASM_OPERANDS. While
2803 this restriction should not be necessary, copying an insn with
2804 ASM_OPERANDS can confuse asm_noperands in some cases.
2805
2806 Also, don't do this if the exit code is more than 20 insns. */
2807
2808 for (insn = exitcode;
2809 insn
2810 && ! (GET_CODE (insn) == NOTE
2811 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2812 insn = NEXT_INSN (insn))
2813 {
2814 switch (GET_CODE (insn))
2815 {
2816 case CODE_LABEL:
2817 case CALL_INSN:
2818 return 0;
2819 case NOTE:
2820 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2821 a jump immediately after the loop start that branches outside
2822 the loop but within an outer loop, near the exit test.
2823 If we copied this exit test and created a phony
2824 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2825 before the exit test look like these could be safely moved
2826 out of the loop even if they actually may be never executed.
2827 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2828
2829 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2830 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2831 return 0;
2832
2833 if (optimize < 2
2834 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2835 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2836 /* If we were to duplicate this code, we would not move
2837 the BLOCK notes, and so debugging the moved code would
2838 be difficult. Thus, we only move the code with -O2 or
2839 higher. */
2840 return 0;
2841
2842 break;
2843 case JUMP_INSN:
2844 case INSN:
2845 /* The code below would grossly mishandle REG_WAS_0 notes,
2846 so get rid of them here. */
2847 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2848 remove_note (insn, p);
2849 if (++num_insns > 20
2850 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2851 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2852 return 0;
2853 break;
2854 default:
2855 break;
2856 }
2857 }
2858
2859 /* Unless INSN is zero, we can do the optimization. */
2860 if (insn == 0)
2861 return 0;
2862
2863 lastexit = insn;
2864
2865 /* See if any insn sets a register only used in the loop exit code and
2866 not a user variable. If so, replace it with a new register. */
2867 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2868 if (GET_CODE (insn) == INSN
2869 && (set = single_set (insn)) != 0
2870 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2871 || (GET_CODE (reg) == SUBREG
2872 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2873 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2874 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2875 {
2876 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2877 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2878 break;
2879
2880 if (p != lastexit)
2881 {
2882 /* We can do the replacement. Allocate reg_map if this is the
2883 first replacement we found. */
2884 if (reg_map == 0)
2885 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
2886
2887 REG_LOOP_TEST_P (reg) = 1;
2888
2889 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2890 }
2891 }
2892
2893 /* Now copy each insn. */
2894 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2895 {
2896 switch (GET_CODE (insn))
2897 {
2898 case BARRIER:
2899 copy = emit_barrier_before (loop_start);
2900 break;
2901 case NOTE:
2902 /* Only copy line-number notes. */
2903 if (NOTE_LINE_NUMBER (insn) >= 0)
2904 {
2905 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2906 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2907 }
2908 break;
2909
2910 case INSN:
2911 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
2912 if (reg_map)
2913 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2914
2915 mark_jump_label (PATTERN (copy), copy, 0);
2916
2917 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2918 make them. */
2919 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2920 if (REG_NOTE_KIND (link) != REG_LABEL)
2921 REG_NOTES (copy)
2922 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2923 XEXP (link, 0),
2924 REG_NOTES (copy)));
2925 if (reg_map && REG_NOTES (copy))
2926 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2927 break;
2928
2929 case JUMP_INSN:
2930 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)), loop_start);
2931 if (reg_map)
2932 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2933 mark_jump_label (PATTERN (copy), copy, 0);
2934 if (REG_NOTES (insn))
2935 {
2936 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
2937 if (reg_map)
2938 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2939 }
2940
2941 /* If this is a simple jump, add it to the jump chain. */
2942
2943 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2944 && simplejump_p (copy))
2945 {
2946 jump_chain[INSN_UID (copy)]
2947 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2948 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2949 }
2950 break;
2951
2952 default:
2953 abort ();
2954 }
2955
2956 /* Record the first insn we copied. We need it so that we can
2957 scan the copied insns for new pseudo registers. */
2958 if (! first_copy)
2959 first_copy = copy;
2960 }
2961
2962 /* Now clean up by emitting a jump to the end label and deleting the jump
2963 at the start of the loop. */
2964 if (! copy || GET_CODE (copy) != BARRIER)
2965 {
2966 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2967 loop_start);
2968
2969 /* Record the first insn we copied. We need it so that we can
2970 scan the copied insns for new pseudo registers. This may not
2971 be strictly necessary since we should have copied at least one
2972 insn above. But I am going to be safe. */
2973 if (! first_copy)
2974 first_copy = copy;
2975
2976 mark_jump_label (PATTERN (copy), copy, 0);
2977 if (INSN_UID (copy) < max_jump_chain
2978 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2979 {
2980 jump_chain[INSN_UID (copy)]
2981 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2982 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2983 }
2984 emit_barrier_before (loop_start);
2985 }
2986
2987 /* Now scan from the first insn we copied to the last insn we copied
2988 (copy) for new pseudo registers. Do this after the code to jump to
2989 the end label since that might create a new pseudo too. */
2990 reg_scan_update (first_copy, copy, max_reg);
2991
2992 /* Mark the exit code as the virtual top of the converted loop. */
2993 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2994
2995 delete_insn (next_nonnote_insn (loop_start));
2996
2997 /* Clean up. */
2998 if (reg_map)
2999 free (reg_map);
3000
3001 return 1;
3002 }
3003 \f
3004 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
3005 loop-end notes between START and END out before START. Assume that
3006 END is not such a note. START may be such a note. Returns the value
3007 of the new starting insn, which may be different if the original start
3008 was such a note. */
3009
3010 rtx
3011 squeeze_notes (start, end)
3012 rtx start, end;
3013 {
3014 rtx insn;
3015 rtx next;
3016
3017 for (insn = start; insn != end; insn = next)
3018 {
3019 next = NEXT_INSN (insn);
3020 if (GET_CODE (insn) == NOTE
3021 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
3022 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
3023 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
3024 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
3025 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
3026 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
3027 {
3028 if (insn == start)
3029 start = next;
3030 else
3031 {
3032 rtx prev = PREV_INSN (insn);
3033 PREV_INSN (insn) = PREV_INSN (start);
3034 NEXT_INSN (insn) = start;
3035 NEXT_INSN (PREV_INSN (insn)) = insn;
3036 PREV_INSN (NEXT_INSN (insn)) = insn;
3037 NEXT_INSN (prev) = next;
3038 PREV_INSN (next) = prev;
3039 }
3040 }
3041 }
3042
3043 return start;
3044 }
3045 \f
3046 /* Compare the instructions before insn E1 with those before E2
3047 to find an opportunity for cross jumping.
3048 (This means detecting identical sequences of insns followed by
3049 jumps to the same place, or followed by a label and a jump
3050 to that label, and replacing one with a jump to the other.)
3051
3052 Assume E1 is a jump that jumps to label E2
3053 (that is not always true but it might as well be).
3054 Find the longest possible equivalent sequences
3055 and store the first insns of those sequences into *F1 and *F2.
3056 Store zero there if no equivalent preceding instructions are found.
3057
3058 We give up if we find a label in stream 1.
3059 Actually we could transfer that label into stream 2. */
3060
3061 static void
3062 find_cross_jump (e1, e2, minimum, f1, f2)
3063 rtx e1, e2;
3064 int minimum;
3065 rtx *f1, *f2;
3066 {
3067 register rtx i1 = e1, i2 = e2;
3068 register rtx p1, p2;
3069 int lose = 0;
3070
3071 rtx last1 = 0, last2 = 0;
3072 rtx afterlast1 = 0, afterlast2 = 0;
3073
3074 *f1 = 0;
3075 *f2 = 0;
3076
3077 while (1)
3078 {
3079 i1 = prev_nonnote_insn (i1);
3080
3081 i2 = PREV_INSN (i2);
3082 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
3083 i2 = PREV_INSN (i2);
3084
3085 if (i1 == 0)
3086 break;
3087
3088 /* Don't allow the range of insns preceding E1 or E2
3089 to include the other (E2 or E1). */
3090 if (i2 == e1 || i1 == e2)
3091 break;
3092
3093 /* If we will get to this code by jumping, those jumps will be
3094 tensioned to go directly to the new label (before I2),
3095 so this cross-jumping won't cost extra. So reduce the minimum. */
3096 if (GET_CODE (i1) == CODE_LABEL)
3097 {
3098 --minimum;
3099 break;
3100 }
3101
3102 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
3103 break;
3104
3105 /* Avoid moving insns across EH regions if either of the insns
3106 can throw. */
3107 if (flag_exceptions
3108 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
3109 && !in_same_eh_region (i1, i2))
3110 break;
3111
3112 p1 = PATTERN (i1);
3113 p2 = PATTERN (i2);
3114
3115 /* If this is a CALL_INSN, compare register usage information.
3116 If we don't check this on stack register machines, the two
3117 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3118 numbers of stack registers in the same basic block.
3119 If we don't check this on machines with delay slots, a delay slot may
3120 be filled that clobbers a parameter expected by the subroutine.
3121
3122 ??? We take the simple route for now and assume that if they're
3123 equal, they were constructed identically. */
3124
3125 if (GET_CODE (i1) == CALL_INSN
3126 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3127 CALL_INSN_FUNCTION_USAGE (i2)))
3128 lose = 1;
3129
3130 #ifdef STACK_REGS
3131 /* If cross_jump_death_matters is not 0, the insn's mode
3132 indicates whether or not the insn contains any stack-like
3133 regs. */
3134
3135 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
3136 {
3137 /* If register stack conversion has already been done, then
3138 death notes must also be compared before it is certain that
3139 the two instruction streams match. */
3140
3141 rtx note;
3142 HARD_REG_SET i1_regset, i2_regset;
3143
3144 CLEAR_HARD_REG_SET (i1_regset);
3145 CLEAR_HARD_REG_SET (i2_regset);
3146
3147 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3148 if (REG_NOTE_KIND (note) == REG_DEAD
3149 && STACK_REG_P (XEXP (note, 0)))
3150 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3151
3152 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3153 if (REG_NOTE_KIND (note) == REG_DEAD
3154 && STACK_REG_P (XEXP (note, 0)))
3155 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3156
3157 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3158
3159 lose = 1;
3160
3161 done:
3162 ;
3163 }
3164 #endif
3165
3166 /* Don't allow old-style asm or volatile extended asms to be accepted
3167 for cross jumping purposes. It is conceptually correct to allow
3168 them, since cross-jumping preserves the dynamic instruction order
3169 even though it is changing the static instruction order. However,
3170 if an asm is being used to emit an assembler pseudo-op, such as
3171 the MIPS `.set reorder' pseudo-op, then the static instruction order
3172 matters and it must be preserved. */
3173 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
3174 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
3175 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
3176 lose = 1;
3177
3178 if (lose || GET_CODE (p1) != GET_CODE (p2)
3179 || ! rtx_renumbered_equal_p (p1, p2))
3180 {
3181 /* The following code helps take care of G++ cleanups. */
3182 rtx equiv1;
3183 rtx equiv2;
3184
3185 if (!lose && GET_CODE (p1) == GET_CODE (p2)
3186 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
3187 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
3188 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
3189 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
3190 /* If the equivalences are not to a constant, they may
3191 reference pseudos that no longer exist, so we can't
3192 use them. */
3193 && CONSTANT_P (XEXP (equiv1, 0))
3194 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3195 {
3196 rtx s1 = single_set (i1);
3197 rtx s2 = single_set (i2);
3198 if (s1 != 0 && s2 != 0
3199 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3200 {
3201 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3202 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3203 if (! rtx_renumbered_equal_p (p1, p2))
3204 cancel_changes (0);
3205 else if (apply_change_group ())
3206 goto win;
3207 }
3208 }
3209
3210 /* Insns fail to match; cross jumping is limited to the following
3211 insns. */
3212
3213 #ifdef HAVE_cc0
3214 /* Don't allow the insn after a compare to be shared by
3215 cross-jumping unless the compare is also shared.
3216 Here, if either of these non-matching insns is a compare,
3217 exclude the following insn from possible cross-jumping. */
3218 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3219 last1 = afterlast1, last2 = afterlast2, ++minimum;
3220 #endif
3221
3222 /* If cross-jumping here will feed a jump-around-jump
3223 optimization, this jump won't cost extra, so reduce
3224 the minimum. */
3225 if (GET_CODE (i1) == JUMP_INSN
3226 && JUMP_LABEL (i1)
3227 && prev_real_insn (JUMP_LABEL (i1)) == e1)
3228 --minimum;
3229 break;
3230 }
3231
3232 win:
3233 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3234 {
3235 /* Ok, this insn is potentially includable in a cross-jump here. */
3236 afterlast1 = last1, afterlast2 = last2;
3237 last1 = i1, last2 = i2, --minimum;
3238 }
3239 }
3240
3241 if (minimum <= 0 && last1 != 0 && last1 != e1)
3242 *f1 = last1, *f2 = last2;
3243 }
3244
3245 static void
3246 do_cross_jump (insn, newjpos, newlpos)
3247 rtx insn, newjpos, newlpos;
3248 {
3249 /* Find an existing label at this point
3250 or make a new one if there is none. */
3251 register rtx label = get_label_before (newlpos);
3252
3253 /* Make the same jump insn jump to the new point. */
3254 if (GET_CODE (PATTERN (insn)) == RETURN)
3255 {
3256 /* Remove from jump chain of returns. */
3257 delete_from_jump_chain (insn);
3258 /* Change the insn. */
3259 PATTERN (insn) = gen_jump (label);
3260 INSN_CODE (insn) = -1;
3261 JUMP_LABEL (insn) = label;
3262 LABEL_NUSES (label)++;
3263 /* Add to new the jump chain. */
3264 if (INSN_UID (label) < max_jump_chain
3265 && INSN_UID (insn) < max_jump_chain)
3266 {
3267 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3268 jump_chain[INSN_UID (label)] = insn;
3269 }
3270 }
3271 else
3272 redirect_jump (insn, label);
3273
3274 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3275 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3276 the NEWJPOS stream. */
3277
3278 while (newjpos != insn)
3279 {
3280 rtx lnote;
3281
3282 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3283 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3284 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3285 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3286 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3287 remove_note (newlpos, lnote);
3288
3289 delete_insn (newjpos);
3290 newjpos = next_real_insn (newjpos);
3291 newlpos = next_real_insn (newlpos);
3292 }
3293 }
3294 \f
3295 /* Return the label before INSN, or put a new label there. */
3296
3297 rtx
3298 get_label_before (insn)
3299 rtx insn;
3300 {
3301 rtx label;
3302
3303 /* Find an existing label at this point
3304 or make a new one if there is none. */
3305 label = prev_nonnote_insn (insn);
3306
3307 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3308 {
3309 rtx prev = PREV_INSN (insn);
3310
3311 label = gen_label_rtx ();
3312 emit_label_after (label, prev);
3313 LABEL_NUSES (label) = 0;
3314 }
3315 return label;
3316 }
3317
3318 /* Return the label after INSN, or put a new label there. */
3319
3320 rtx
3321 get_label_after (insn)
3322 rtx insn;
3323 {
3324 rtx label;
3325
3326 /* Find an existing label at this point
3327 or make a new one if there is none. */
3328 label = next_nonnote_insn (insn);
3329
3330 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3331 {
3332 label = gen_label_rtx ();
3333 emit_label_after (label, insn);
3334 LABEL_NUSES (label) = 0;
3335 }
3336 return label;
3337 }
3338 \f
3339 /* Return 1 if INSN is a jump that jumps to right after TARGET
3340 only on the condition that TARGET itself would drop through.
3341 Assumes that TARGET is a conditional jump. */
3342
3343 static int
3344 jump_back_p (insn, target)
3345 rtx insn, target;
3346 {
3347 rtx cinsn, ctarget;
3348 enum rtx_code codei, codet;
3349
3350 if (simplejump_p (insn) || ! condjump_p (insn)
3351 || simplejump_p (target)
3352 || target != prev_real_insn (JUMP_LABEL (insn)))
3353 return 0;
3354
3355 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3356 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3357
3358 codei = GET_CODE (cinsn);
3359 codet = GET_CODE (ctarget);
3360
3361 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3362 {
3363 if (! can_reverse_comparison_p (cinsn, insn))
3364 return 0;
3365 codei = reverse_condition (codei);
3366 }
3367
3368 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3369 {
3370 if (! can_reverse_comparison_p (ctarget, target))
3371 return 0;
3372 codet = reverse_condition (codet);
3373 }
3374
3375 return (codei == codet
3376 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3377 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3378 }
3379 \f
3380 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3381 return non-zero if it is safe to reverse this comparison. It is if our
3382 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3383 this is known to be an integer comparison. */
3384
3385 int
3386 can_reverse_comparison_p (comparison, insn)
3387 rtx comparison;
3388 rtx insn;
3389 {
3390 rtx arg0;
3391
3392 /* If this is not actually a comparison, we can't reverse it. */
3393 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3394 return 0;
3395
3396 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3397 /* If this is an NE comparison, it is safe to reverse it to an EQ
3398 comparison and vice versa, even for floating point. If no operands
3399 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3400 always false and NE is always true, so the reversal is also valid. */
3401 || flag_fast_math
3402 || GET_CODE (comparison) == NE
3403 || GET_CODE (comparison) == EQ)
3404 return 1;
3405
3406 arg0 = XEXP (comparison, 0);
3407
3408 /* Make sure ARG0 is one of the actual objects being compared. If we
3409 can't do this, we can't be sure the comparison can be reversed.
3410
3411 Handle cc0 and a MODE_CC register. */
3412 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3413 #ifdef HAVE_cc0
3414 || arg0 == cc0_rtx
3415 #endif
3416 )
3417 {
3418 rtx prev = prev_nonnote_insn (insn);
3419 rtx set;
3420
3421 /* First see if the condition code mode alone if enough to say we can
3422 reverse the condition. If not, then search backwards for a set of
3423 ARG0. We do not need to check for an insn clobbering it since valid
3424 code will contain set a set with no intervening clobber. But
3425 stop when we reach a label. */
3426 #ifdef REVERSIBLE_CC_MODE
3427 if (GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC
3428 && REVERSIBLE_CC_MODE (GET_MODE (arg0)))
3429 return 1;
3430 #endif
3431
3432 for (prev = prev_nonnote_insn (insn);
3433 prev != 0 && GET_CODE (prev) != CODE_LABEL;
3434 prev = prev_nonnote_insn (prev))
3435 if ((set = single_set (prev)) != 0
3436 && rtx_equal_p (SET_DEST (set), arg0))
3437 {
3438 arg0 = SET_SRC (set);
3439
3440 if (GET_CODE (arg0) == COMPARE)
3441 arg0 = XEXP (arg0, 0);
3442 break;
3443 }
3444 }
3445
3446 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3447 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3448 return (GET_CODE (arg0) == CONST_INT
3449 || (GET_MODE (arg0) != VOIDmode
3450 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3451 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3452 }
3453
3454 /* Given an rtx-code for a comparison, return the code
3455 for the negated comparison.
3456 WATCH OUT! reverse_condition is not safe to use on a jump
3457 that might be acting on the results of an IEEE floating point comparison,
3458 because of the special treatment of non-signaling nans in comparisons.
3459 Use can_reverse_comparison_p to be sure. */
3460
3461 enum rtx_code
3462 reverse_condition (code)
3463 enum rtx_code code;
3464 {
3465 switch (code)
3466 {
3467 case EQ:
3468 return NE;
3469
3470 case NE:
3471 return EQ;
3472
3473 case GT:
3474 return LE;
3475
3476 case GE:
3477 return LT;
3478
3479 case LT:
3480 return GE;
3481
3482 case LE:
3483 return GT;
3484
3485 case GTU:
3486 return LEU;
3487
3488 case GEU:
3489 return LTU;
3490
3491 case LTU:
3492 return GEU;
3493
3494 case LEU:
3495 return GTU;
3496
3497 default:
3498 abort ();
3499 return UNKNOWN;
3500 }
3501 }
3502
3503 /* Similar, but return the code when two operands of a comparison are swapped.
3504 This IS safe for IEEE floating-point. */
3505
3506 enum rtx_code
3507 swap_condition (code)
3508 enum rtx_code code;
3509 {
3510 switch (code)
3511 {
3512 case EQ:
3513 case NE:
3514 return code;
3515
3516 case GT:
3517 return LT;
3518
3519 case GE:
3520 return LE;
3521
3522 case LT:
3523 return GT;
3524
3525 case LE:
3526 return GE;
3527
3528 case GTU:
3529 return LTU;
3530
3531 case GEU:
3532 return LEU;
3533
3534 case LTU:
3535 return GTU;
3536
3537 case LEU:
3538 return GEU;
3539
3540 default:
3541 abort ();
3542 return UNKNOWN;
3543 }
3544 }
3545
3546 /* Given a comparison CODE, return the corresponding unsigned comparison.
3547 If CODE is an equality comparison or already an unsigned comparison,
3548 CODE is returned. */
3549
3550 enum rtx_code
3551 unsigned_condition (code)
3552 enum rtx_code code;
3553 {
3554 switch (code)
3555 {
3556 case EQ:
3557 case NE:
3558 case GTU:
3559 case GEU:
3560 case LTU:
3561 case LEU:
3562 return code;
3563
3564 case GT:
3565 return GTU;
3566
3567 case GE:
3568 return GEU;
3569
3570 case LT:
3571 return LTU;
3572
3573 case LE:
3574 return LEU;
3575
3576 default:
3577 abort ();
3578 }
3579 }
3580
3581 /* Similarly, return the signed version of a comparison. */
3582
3583 enum rtx_code
3584 signed_condition (code)
3585 enum rtx_code code;
3586 {
3587 switch (code)
3588 {
3589 case EQ:
3590 case NE:
3591 case GT:
3592 case GE:
3593 case LT:
3594 case LE:
3595 return code;
3596
3597 case GTU:
3598 return GT;
3599
3600 case GEU:
3601 return GE;
3602
3603 case LTU:
3604 return LT;
3605
3606 case LEU:
3607 return LE;
3608
3609 default:
3610 abort ();
3611 }
3612 }
3613 \f
3614 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3615 truth of CODE1 implies the truth of CODE2. */
3616
3617 int
3618 comparison_dominates_p (code1, code2)
3619 enum rtx_code code1, code2;
3620 {
3621 if (code1 == code2)
3622 return 1;
3623
3624 switch (code1)
3625 {
3626 case EQ:
3627 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3628 return 1;
3629 break;
3630
3631 case LT:
3632 if (code2 == LE || code2 == NE)
3633 return 1;
3634 break;
3635
3636 case GT:
3637 if (code2 == GE || code2 == NE)
3638 return 1;
3639 break;
3640
3641 case LTU:
3642 if (code2 == LEU || code2 == NE)
3643 return 1;
3644 break;
3645
3646 case GTU:
3647 if (code2 == GEU || code2 == NE)
3648 return 1;
3649 break;
3650
3651 default:
3652 break;
3653 }
3654
3655 return 0;
3656 }
3657 \f
3658 /* Return 1 if INSN is an unconditional jump and nothing else. */
3659
3660 int
3661 simplejump_p (insn)
3662 rtx insn;
3663 {
3664 return (GET_CODE (insn) == JUMP_INSN
3665 && GET_CODE (PATTERN (insn)) == SET
3666 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3667 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3668 }
3669
3670 /* Return nonzero if INSN is a (possibly) conditional jump
3671 and nothing more. */
3672
3673 int
3674 condjump_p (insn)
3675 rtx insn;
3676 {
3677 register rtx x = PATTERN (insn);
3678
3679 if (GET_CODE (x) != SET
3680 || GET_CODE (SET_DEST (x)) != PC)
3681 return 0;
3682
3683 x = SET_SRC (x);
3684 if (GET_CODE (x) == LABEL_REF)
3685 return 1;
3686 else return (GET_CODE (x) == IF_THEN_ELSE
3687 && ((GET_CODE (XEXP (x, 2)) == PC
3688 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
3689 || GET_CODE (XEXP (x, 1)) == RETURN))
3690 || (GET_CODE (XEXP (x, 1)) == PC
3691 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
3692 || GET_CODE (XEXP (x, 2)) == RETURN))));
3693
3694 return 0;
3695 }
3696
3697 /* Return nonzero if INSN is a (possibly) conditional jump inside a
3698 PARALLEL. */
3699
3700 int
3701 condjump_in_parallel_p (insn)
3702 rtx insn;
3703 {
3704 register rtx x = PATTERN (insn);
3705
3706 if (GET_CODE (x) != PARALLEL)
3707 return 0;
3708 else
3709 x = XVECEXP (x, 0, 0);
3710
3711 if (GET_CODE (x) != SET)
3712 return 0;
3713 if (GET_CODE (SET_DEST (x)) != PC)
3714 return 0;
3715 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3716 return 1;
3717 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3718 return 0;
3719 if (XEXP (SET_SRC (x), 2) == pc_rtx
3720 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3721 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3722 return 1;
3723 if (XEXP (SET_SRC (x), 1) == pc_rtx
3724 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3725 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3726 return 1;
3727 return 0;
3728 }
3729
3730 /* Return the label of a conditional jump. */
3731
3732 rtx
3733 condjump_label (insn)
3734 rtx insn;
3735 {
3736 register rtx x = PATTERN (insn);
3737
3738 if (GET_CODE (x) == PARALLEL)
3739 x = XVECEXP (x, 0, 0);
3740 if (GET_CODE (x) != SET)
3741 return NULL_RTX;
3742 if (GET_CODE (SET_DEST (x)) != PC)
3743 return NULL_RTX;
3744 x = SET_SRC (x);
3745 if (GET_CODE (x) == LABEL_REF)
3746 return x;
3747 if (GET_CODE (x) != IF_THEN_ELSE)
3748 return NULL_RTX;
3749 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3750 return XEXP (x, 1);
3751 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3752 return XEXP (x, 2);
3753 return NULL_RTX;
3754 }
3755
3756 /* Return true if INSN is a (possibly conditional) return insn. */
3757
3758 static int
3759 returnjump_p_1 (loc, data)
3760 rtx *loc;
3761 void *data ATTRIBUTE_UNUSED;
3762 {
3763 rtx x = *loc;
3764 return GET_CODE (x) == RETURN;
3765 }
3766
3767 int
3768 returnjump_p (insn)
3769 rtx insn;
3770 {
3771 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3772 }
3773
3774 /* Return true if INSN is a jump that only transfers control and
3775 nothing more. */
3776
3777 int
3778 onlyjump_p (insn)
3779 rtx insn;
3780 {
3781 rtx set;
3782
3783 if (GET_CODE (insn) != JUMP_INSN)
3784 return 0;
3785
3786 set = single_set (insn);
3787 if (set == NULL)
3788 return 0;
3789 if (GET_CODE (SET_DEST (set)) != PC)
3790 return 0;
3791 if (side_effects_p (SET_SRC (set)))
3792 return 0;
3793
3794 return 1;
3795 }
3796
3797 #ifdef HAVE_cc0
3798
3799 /* Return 1 if X is an RTX that does nothing but set the condition codes
3800 and CLOBBER or USE registers.
3801 Return -1 if X does explicitly set the condition codes,
3802 but also does other things. */
3803
3804 int
3805 sets_cc0_p (x)
3806 rtx x ATTRIBUTE_UNUSED;
3807 {
3808 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3809 return 1;
3810 if (GET_CODE (x) == PARALLEL)
3811 {
3812 int i;
3813 int sets_cc0 = 0;
3814 int other_things = 0;
3815 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3816 {
3817 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3818 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3819 sets_cc0 = 1;
3820 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3821 other_things = 1;
3822 }
3823 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3824 }
3825 return 0;
3826 }
3827 #endif
3828 \f
3829 /* Follow any unconditional jump at LABEL;
3830 return the ultimate label reached by any such chain of jumps.
3831 If LABEL is not followed by a jump, return LABEL.
3832 If the chain loops or we can't find end, return LABEL,
3833 since that tells caller to avoid changing the insn.
3834
3835 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3836 a USE or CLOBBER. */
3837
3838 rtx
3839 follow_jumps (label)
3840 rtx label;
3841 {
3842 register rtx insn;
3843 register rtx next;
3844 register rtx value = label;
3845 register int depth;
3846
3847 for (depth = 0;
3848 (depth < 10
3849 && (insn = next_active_insn (value)) != 0
3850 && GET_CODE (insn) == JUMP_INSN
3851 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3852 || GET_CODE (PATTERN (insn)) == RETURN)
3853 && (next = NEXT_INSN (insn))
3854 && GET_CODE (next) == BARRIER);
3855 depth++)
3856 {
3857 /* Don't chain through the insn that jumps into a loop
3858 from outside the loop,
3859 since that would create multiple loop entry jumps
3860 and prevent loop optimization. */
3861 rtx tem;
3862 if (!reload_completed)
3863 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3864 if (GET_CODE (tem) == NOTE
3865 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3866 /* ??? Optional. Disables some optimizations, but makes
3867 gcov output more accurate with -O. */
3868 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3869 return value;
3870
3871 /* If we have found a cycle, make the insn jump to itself. */
3872 if (JUMP_LABEL (insn) == label)
3873 return label;
3874
3875 tem = next_active_insn (JUMP_LABEL (insn));
3876 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3877 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3878 break;
3879
3880 value = JUMP_LABEL (insn);
3881 }
3882 if (depth == 10)
3883 return label;
3884 return value;
3885 }
3886
3887 /* Assuming that field IDX of X is a vector of label_refs,
3888 replace each of them by the ultimate label reached by it.
3889 Return nonzero if a change is made.
3890 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3891
3892 static int
3893 tension_vector_labels (x, idx)
3894 register rtx x;
3895 register int idx;
3896 {
3897 int changed = 0;
3898 register int i;
3899 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3900 {
3901 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3902 register rtx nlabel = follow_jumps (olabel);
3903 if (nlabel && nlabel != olabel)
3904 {
3905 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3906 ++LABEL_NUSES (nlabel);
3907 if (--LABEL_NUSES (olabel) == 0)
3908 delete_insn (olabel);
3909 changed = 1;
3910 }
3911 }
3912 return changed;
3913 }
3914 \f
3915 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3916 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3917 in INSN, then store one of them in JUMP_LABEL (INSN).
3918 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3919 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3920 Also, when there are consecutive labels, canonicalize on the last of them.
3921
3922 Note that two labels separated by a loop-beginning note
3923 must be kept distinct if we have not yet done loop-optimization,
3924 because the gap between them is where loop-optimize
3925 will want to move invariant code to. CROSS_JUMP tells us
3926 that loop-optimization is done with.
3927
3928 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3929 two labels distinct if they are separated by only USE or CLOBBER insns. */
3930
3931 static void
3932 mark_jump_label (x, insn, cross_jump)
3933 register rtx x;
3934 rtx insn;
3935 int cross_jump;
3936 {
3937 register RTX_CODE code = GET_CODE (x);
3938 register int i;
3939 register const char *fmt;
3940
3941 switch (code)
3942 {
3943 case PC:
3944 case CC0:
3945 case REG:
3946 case SUBREG:
3947 case CONST_INT:
3948 case SYMBOL_REF:
3949 case CONST_DOUBLE:
3950 case CLOBBER:
3951 case CALL:
3952 return;
3953
3954 case MEM:
3955 /* If this is a constant-pool reference, see if it is a label. */
3956 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3957 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3958 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3959 break;
3960
3961 case LABEL_REF:
3962 {
3963 rtx label = XEXP (x, 0);
3964 rtx olabel = label;
3965 rtx note;
3966 rtx next;
3967
3968 if (GET_CODE (label) != CODE_LABEL)
3969 abort ();
3970
3971 /* Ignore references to labels of containing functions. */
3972 if (LABEL_REF_NONLOCAL_P (x))
3973 break;
3974
3975 /* If there are other labels following this one,
3976 replace it with the last of the consecutive labels. */
3977 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3978 {
3979 if (GET_CODE (next) == CODE_LABEL)
3980 label = next;
3981 else if (cross_jump && GET_CODE (next) == INSN
3982 && (GET_CODE (PATTERN (next)) == USE
3983 || GET_CODE (PATTERN (next)) == CLOBBER))
3984 continue;
3985 else if (GET_CODE (next) != NOTE)
3986 break;
3987 else if (! cross_jump
3988 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3989 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3990 /* ??? Optional. Disables some optimizations, but
3991 makes gcov output more accurate with -O. */
3992 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3993 break;
3994 }
3995
3996 XEXP (x, 0) = label;
3997 if (! insn || ! INSN_DELETED_P (insn))
3998 ++LABEL_NUSES (label);
3999
4000 if (insn)
4001 {
4002 if (GET_CODE (insn) == JUMP_INSN)
4003 JUMP_LABEL (insn) = label;
4004
4005 /* If we've changed OLABEL and we had a REG_LABEL note
4006 for it, update it as well. */
4007 else if (label != olabel
4008 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
4009 XEXP (note, 0) = label;
4010
4011 /* Otherwise, add a REG_LABEL note for LABEL unless there already
4012 is one. */
4013 else if (! find_reg_note (insn, REG_LABEL, label))
4014 {
4015 /* This code used to ignore labels which refered to dispatch
4016 tables to avoid flow.c generating worse code.
4017
4018 However, in the presense of global optimizations like
4019 gcse which call find_basic_blocks without calling
4020 life_analysis, not recording such labels will lead
4021 to compiler aborts because of inconsistencies in the
4022 flow graph. So we go ahead and record the label.
4023
4024 It may also be the case that the optimization argument
4025 is no longer valid because of the more accurate cfg
4026 we build in find_basic_blocks -- it no longer pessimizes
4027 code when it finds a REG_LABEL note. */
4028 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
4029 REG_NOTES (insn));
4030 }
4031 }
4032 return;
4033 }
4034
4035 /* Do walk the labels in a vector, but not the first operand of an
4036 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
4037 case ADDR_VEC:
4038 case ADDR_DIFF_VEC:
4039 if (! INSN_DELETED_P (insn))
4040 {
4041 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
4042
4043 for (i = 0; i < XVECLEN (x, eltnum); i++)
4044 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
4045 }
4046 return;
4047
4048 default:
4049 break;
4050 }
4051
4052 fmt = GET_RTX_FORMAT (code);
4053 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4054 {
4055 if (fmt[i] == 'e')
4056 mark_jump_label (XEXP (x, i), insn, cross_jump);
4057 else if (fmt[i] == 'E')
4058 {
4059 register int j;
4060 for (j = 0; j < XVECLEN (x, i); j++)
4061 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
4062 }
4063 }
4064 }
4065
4066 /* If all INSN does is set the pc, delete it,
4067 and delete the insn that set the condition codes for it
4068 if that's what the previous thing was. */
4069
4070 void
4071 delete_jump (insn)
4072 rtx insn;
4073 {
4074 register rtx set = single_set (insn);
4075
4076 if (set && GET_CODE (SET_DEST (set)) == PC)
4077 delete_computation (insn);
4078 }
4079
4080 /* Verify INSN is a BARRIER and delete it. */
4081
4082 void
4083 delete_barrier (insn)
4084 rtx insn;
4085 {
4086 if (GET_CODE (insn) != BARRIER)
4087 abort ();
4088
4089 delete_insn (insn);
4090 }
4091
4092 /* Recursively delete prior insns that compute the value (used only by INSN
4093 which the caller is deleting) stored in the register mentioned by NOTE
4094 which is a REG_DEAD note associated with INSN. */
4095
4096 static void
4097 delete_prior_computation (note, insn)
4098 rtx note;
4099 rtx insn;
4100 {
4101 rtx our_prev;
4102 rtx reg = XEXP (note, 0);
4103
4104 for (our_prev = prev_nonnote_insn (insn);
4105 our_prev && (GET_CODE (our_prev) == INSN
4106 || GET_CODE (our_prev) == CALL_INSN);
4107 our_prev = prev_nonnote_insn (our_prev))
4108 {
4109 rtx pat = PATTERN (our_prev);
4110
4111 /* If we reach a CALL which is not calling a const function
4112 or the callee pops the arguments, then give up. */
4113 if (GET_CODE (our_prev) == CALL_INSN
4114 && (! CONST_CALL_P (our_prev)
4115 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
4116 break;
4117
4118 /* If we reach a SEQUENCE, it is too complex to try to
4119 do anything with it, so give up. */
4120 if (GET_CODE (pat) == SEQUENCE)
4121 break;
4122
4123 if (GET_CODE (pat) == USE
4124 && GET_CODE (XEXP (pat, 0)) == INSN)
4125 /* reorg creates USEs that look like this. We leave them
4126 alone because reorg needs them for its own purposes. */
4127 break;
4128
4129 if (reg_set_p (reg, pat))
4130 {
4131 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
4132 break;
4133
4134 if (GET_CODE (pat) == PARALLEL)
4135 {
4136 /* If we find a SET of something else, we can't
4137 delete the insn. */
4138
4139 int i;
4140
4141 for (i = 0; i < XVECLEN (pat, 0); i++)
4142 {
4143 rtx part = XVECEXP (pat, 0, i);
4144
4145 if (GET_CODE (part) == SET
4146 && SET_DEST (part) != reg)
4147 break;
4148 }
4149
4150 if (i == XVECLEN (pat, 0))
4151 delete_computation (our_prev);
4152 }
4153 else if (GET_CODE (pat) == SET
4154 && GET_CODE (SET_DEST (pat)) == REG)
4155 {
4156 int dest_regno = REGNO (SET_DEST (pat));
4157 int dest_endregno
4158 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4159 ? HARD_REGNO_NREGS (dest_regno,
4160 GET_MODE (SET_DEST (pat))) : 1);
4161 int regno = REGNO (reg);
4162 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
4163 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
4164
4165 if (dest_regno >= regno
4166 && dest_endregno <= endregno)
4167 delete_computation (our_prev);
4168
4169 /* We may have a multi-word hard register and some, but not
4170 all, of the words of the register are needed in subsequent
4171 insns. Write REG_UNUSED notes for those parts that were not
4172 needed. */
4173 else if (dest_regno <= regno
4174 && dest_endregno >= endregno)
4175 {
4176 int i;
4177
4178 REG_NOTES (our_prev)
4179 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
4180
4181 for (i = dest_regno; i < dest_endregno; i++)
4182 if (! find_regno_note (our_prev, REG_UNUSED, i))
4183 break;
4184
4185 if (i == dest_endregno)
4186 delete_computation (our_prev);
4187 }
4188 }
4189
4190 break;
4191 }
4192
4193 /* If PAT references the register that dies here, it is an
4194 additional use. Hence any prior SET isn't dead. However, this
4195 insn becomes the new place for the REG_DEAD note. */
4196 if (reg_overlap_mentioned_p (reg, pat))
4197 {
4198 XEXP (note, 1) = REG_NOTES (our_prev);
4199 REG_NOTES (our_prev) = note;
4200 break;
4201 }
4202 }
4203 }
4204
4205 /* Delete INSN and recursively delete insns that compute values used only
4206 by INSN. This uses the REG_DEAD notes computed during flow analysis.
4207 If we are running before flow.c, we need do nothing since flow.c will
4208 delete dead code. We also can't know if the registers being used are
4209 dead or not at this point.
4210
4211 Otherwise, look at all our REG_DEAD notes. If a previous insn does
4212 nothing other than set a register that dies in this insn, we can delete
4213 that insn as well.
4214
4215 On machines with CC0, if CC0 is used in this insn, we may be able to
4216 delete the insn that set it. */
4217
4218 static void
4219 delete_computation (insn)
4220 rtx insn;
4221 {
4222 rtx note, next;
4223 rtx set;
4224
4225 #ifdef HAVE_cc0
4226 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
4227 {
4228 rtx prev = prev_nonnote_insn (insn);
4229 /* We assume that at this stage
4230 CC's are always set explicitly
4231 and always immediately before the jump that
4232 will use them. So if the previous insn
4233 exists to set the CC's, delete it
4234 (unless it performs auto-increments, etc.). */
4235 if (prev && GET_CODE (prev) == INSN
4236 && sets_cc0_p (PATTERN (prev)))
4237 {
4238 if (sets_cc0_p (PATTERN (prev)) > 0
4239 && ! side_effects_p (PATTERN (prev)))
4240 delete_computation (prev);
4241 else
4242 /* Otherwise, show that cc0 won't be used. */
4243 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
4244 cc0_rtx, REG_NOTES (prev));
4245 }
4246 }
4247 #endif
4248
4249 #ifdef INSN_SCHEDULING
4250 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4251 reload has completed. The schedulers need to be fixed. Until
4252 they are, we must not rely on the death notes here. */
4253 if (reload_completed && flag_schedule_insns_after_reload)
4254 {
4255 delete_insn (insn);
4256 return;
4257 }
4258 #endif
4259
4260 /* The REG_DEAD note may have been omitted for a register
4261 which is both set and used by the insn. */
4262 set = single_set (insn);
4263 if (set && GET_CODE (SET_DEST (set)) == REG)
4264 {
4265 int dest_regno = REGNO (SET_DEST (set));
4266 int dest_endregno
4267 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4268 ? HARD_REGNO_NREGS (dest_regno,
4269 GET_MODE (SET_DEST (set))) : 1);
4270 int i;
4271
4272 for (i = dest_regno; i < dest_endregno; i++)
4273 {
4274 if (! refers_to_regno_p (i, i + 1, SET_SRC (set), NULL_PTR)
4275 || find_regno_note (insn, REG_DEAD, i))
4276 continue;
4277
4278 note = gen_rtx_EXPR_LIST (REG_DEAD, (i < FIRST_PSEUDO_REGISTER
4279 ? gen_rtx_REG (reg_raw_mode[i], i)
4280 : SET_DEST (set)), NULL_RTX);
4281 delete_prior_computation (note, insn);
4282 }
4283 }
4284
4285 for (note = REG_NOTES (insn); note; note = next)
4286 {
4287 next = XEXP (note, 1);
4288
4289 if (REG_NOTE_KIND (note) != REG_DEAD
4290 /* Verify that the REG_NOTE is legitimate. */
4291 || GET_CODE (XEXP (note, 0)) != REG)
4292 continue;
4293
4294 delete_prior_computation (note, insn);
4295 }
4296
4297 delete_insn (insn);
4298 }
4299 \f
4300 /* Delete insn INSN from the chain of insns and update label ref counts.
4301 May delete some following insns as a consequence; may even delete
4302 a label elsewhere and insns that follow it.
4303
4304 Returns the first insn after INSN that was not deleted. */
4305
4306 rtx
4307 delete_insn (insn)
4308 register rtx insn;
4309 {
4310 register rtx next = NEXT_INSN (insn);
4311 register rtx prev = PREV_INSN (insn);
4312 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4313 register int dont_really_delete = 0;
4314
4315 while (next && INSN_DELETED_P (next))
4316 next = NEXT_INSN (next);
4317
4318 /* This insn is already deleted => return first following nondeleted. */
4319 if (INSN_DELETED_P (insn))
4320 return next;
4321
4322 if (was_code_label)
4323 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4324
4325 /* Don't delete user-declared labels. Convert them to special NOTEs
4326 instead. */
4327 if (was_code_label && LABEL_NAME (insn) != 0
4328 && optimize && ! dont_really_delete)
4329 {
4330 PUT_CODE (insn, NOTE);
4331 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4332 NOTE_SOURCE_FILE (insn) = 0;
4333 dont_really_delete = 1;
4334 }
4335 else
4336 /* Mark this insn as deleted. */
4337 INSN_DELETED_P (insn) = 1;
4338
4339 /* If this is an unconditional jump, delete it from the jump chain. */
4340 if (simplejump_p (insn))
4341 delete_from_jump_chain (insn);
4342
4343 /* If instruction is followed by a barrier,
4344 delete the barrier too. */
4345
4346 if (next != 0 && GET_CODE (next) == BARRIER)
4347 {
4348 INSN_DELETED_P (next) = 1;
4349 next = NEXT_INSN (next);
4350 }
4351
4352 /* Patch out INSN (and the barrier if any) */
4353
4354 if (optimize && ! dont_really_delete)
4355 {
4356 if (prev)
4357 {
4358 NEXT_INSN (prev) = next;
4359 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4360 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4361 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4362 }
4363
4364 if (next)
4365 {
4366 PREV_INSN (next) = prev;
4367 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4368 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4369 }
4370
4371 if (prev && NEXT_INSN (prev) == 0)
4372 set_last_insn (prev);
4373 }
4374
4375 /* If deleting a jump, decrement the count of the label,
4376 and delete the label if it is now unused. */
4377
4378 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4379 {
4380 rtx lab = JUMP_LABEL (insn), lab_next;
4381
4382 if (--LABEL_NUSES (lab) == 0)
4383 {
4384 /* This can delete NEXT or PREV,
4385 either directly if NEXT is JUMP_LABEL (INSN),
4386 or indirectly through more levels of jumps. */
4387 delete_insn (lab);
4388
4389 /* I feel a little doubtful about this loop,
4390 but I see no clean and sure alternative way
4391 to find the first insn after INSN that is not now deleted.
4392 I hope this works. */
4393 while (next && INSN_DELETED_P (next))
4394 next = NEXT_INSN (next);
4395 return next;
4396 }
4397 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4398 && GET_CODE (lab_next) == JUMP_INSN
4399 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4400 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4401 {
4402 /* If we're deleting the tablejump, delete the dispatch table.
4403 We may not be able to kill the label immediately preceeding
4404 just yet, as it might be referenced in code leading up to
4405 the tablejump. */
4406 delete_insn (lab_next);
4407 }
4408 }
4409
4410 /* Likewise if we're deleting a dispatch table. */
4411
4412 if (GET_CODE (insn) == JUMP_INSN
4413 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4414 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4415 {
4416 rtx pat = PATTERN (insn);
4417 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4418 int len = XVECLEN (pat, diff_vec_p);
4419
4420 for (i = 0; i < len; i++)
4421 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4422 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4423 while (next && INSN_DELETED_P (next))
4424 next = NEXT_INSN (next);
4425 return next;
4426 }
4427
4428 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4429 prev = PREV_INSN (prev);
4430
4431 /* If INSN was a label and a dispatch table follows it,
4432 delete the dispatch table. The tablejump must have gone already.
4433 It isn't useful to fall through into a table. */
4434
4435 if (was_code_label
4436 && NEXT_INSN (insn) != 0
4437 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4438 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4439 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4440 next = delete_insn (NEXT_INSN (insn));
4441
4442 /* If INSN was a label, delete insns following it if now unreachable. */
4443
4444 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4445 {
4446 register RTX_CODE code;
4447 while (next != 0
4448 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4449 || code == NOTE || code == BARRIER
4450 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4451 {
4452 if (code == NOTE
4453 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4454 next = NEXT_INSN (next);
4455 /* Keep going past other deleted labels to delete what follows. */
4456 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4457 next = NEXT_INSN (next);
4458 else
4459 /* Note: if this deletes a jump, it can cause more
4460 deletion of unreachable code, after a different label.
4461 As long as the value from this recursive call is correct,
4462 this invocation functions correctly. */
4463 next = delete_insn (next);
4464 }
4465 }
4466
4467 return next;
4468 }
4469
4470 /* Advance from INSN till reaching something not deleted
4471 then return that. May return INSN itself. */
4472
4473 rtx
4474 next_nondeleted_insn (insn)
4475 rtx insn;
4476 {
4477 while (INSN_DELETED_P (insn))
4478 insn = NEXT_INSN (insn);
4479 return insn;
4480 }
4481 \f
4482 /* Delete a range of insns from FROM to TO, inclusive.
4483 This is for the sake of peephole optimization, so assume
4484 that whatever these insns do will still be done by a new
4485 peephole insn that will replace them. */
4486
4487 void
4488 delete_for_peephole (from, to)
4489 register rtx from, to;
4490 {
4491 register rtx insn = from;
4492
4493 while (1)
4494 {
4495 register rtx next = NEXT_INSN (insn);
4496 register rtx prev = PREV_INSN (insn);
4497
4498 if (GET_CODE (insn) != NOTE)
4499 {
4500 INSN_DELETED_P (insn) = 1;
4501
4502 /* Patch this insn out of the chain. */
4503 /* We don't do this all at once, because we
4504 must preserve all NOTEs. */
4505 if (prev)
4506 NEXT_INSN (prev) = next;
4507
4508 if (next)
4509 PREV_INSN (next) = prev;
4510 }
4511
4512 if (insn == to)
4513 break;
4514 insn = next;
4515 }
4516
4517 /* Note that if TO is an unconditional jump
4518 we *do not* delete the BARRIER that follows,
4519 since the peephole that replaces this sequence
4520 is also an unconditional jump in that case. */
4521 }
4522 \f
4523 /* We have determined that INSN is never reached, and are about to
4524 delete it. Print a warning if the user asked for one.
4525
4526 To try to make this warning more useful, this should only be called
4527 once per basic block not reached, and it only warns when the basic
4528 block contains more than one line from the current function, and
4529 contains at least one operation. CSE and inlining can duplicate insns,
4530 so it's possible to get spurious warnings from this. */
4531
4532 void
4533 never_reached_warning (avoided_insn)
4534 rtx avoided_insn;
4535 {
4536 rtx insn;
4537 rtx a_line_note = NULL;
4538 int two_avoided_lines = 0;
4539 int contains_insn = 0;
4540
4541 if (! warn_notreached)
4542 return;
4543
4544 /* Scan forwards, looking at LINE_NUMBER notes, until
4545 we hit a LABEL or we run out of insns. */
4546
4547 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
4548 {
4549 if (GET_CODE (insn) == CODE_LABEL)
4550 break;
4551 else if (GET_CODE (insn) == NOTE /* A line number note? */
4552 && NOTE_LINE_NUMBER (insn) >= 0)
4553 {
4554 if (a_line_note == NULL)
4555 a_line_note = insn;
4556 else
4557 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
4558 != NOTE_LINE_NUMBER (insn));
4559 }
4560 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4561 contains_insn = 1;
4562 }
4563 if (two_avoided_lines && contains_insn)
4564 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
4565 NOTE_LINE_NUMBER (a_line_note),
4566 "will never be executed");
4567 }
4568 \f
4569 /* Invert the condition of the jump JUMP, and make it jump
4570 to label NLABEL instead of where it jumps now. */
4571
4572 int
4573 invert_jump (jump, nlabel)
4574 rtx jump, nlabel;
4575 {
4576 /* We have to either invert the condition and change the label or
4577 do neither. Either operation could fail. We first try to invert
4578 the jump. If that succeeds, we try changing the label. If that fails,
4579 we invert the jump back to what it was. */
4580
4581 if (! invert_exp (PATTERN (jump), jump))
4582 return 0;
4583
4584 if (redirect_jump (jump, nlabel))
4585 {
4586 if (flag_branch_probabilities)
4587 {
4588 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4589
4590 /* An inverted jump means that a probability taken becomes a
4591 probability not taken. Subtract the branch probability from the
4592 probability base to convert it back to a taken probability.
4593 (We don't flip the probability on a branch that's never taken. */
4594 if (note && XINT (XEXP (note, 0), 0) >= 0)
4595 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4596 }
4597
4598 return 1;
4599 }
4600
4601 if (! invert_exp (PATTERN (jump), jump))
4602 /* This should just be putting it back the way it was. */
4603 abort ();
4604
4605 return 0;
4606 }
4607
4608 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4609
4610 Return 1 if we can do so, 0 if we cannot find a way to do so that
4611 matches a pattern. */
4612
4613 int
4614 invert_exp (x, insn)
4615 rtx x;
4616 rtx insn;
4617 {
4618 register RTX_CODE code;
4619 register int i;
4620 register const char *fmt;
4621
4622 code = GET_CODE (x);
4623
4624 if (code == IF_THEN_ELSE)
4625 {
4626 register rtx comp = XEXP (x, 0);
4627 register rtx tem;
4628
4629 /* We can do this in two ways: The preferable way, which can only
4630 be done if this is not an integer comparison, is to reverse
4631 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4632 of the IF_THEN_ELSE. If we can't do either, fail. */
4633
4634 if (can_reverse_comparison_p (comp, insn)
4635 && validate_change (insn, &XEXP (x, 0),
4636 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4637 GET_MODE (comp), XEXP (comp, 0),
4638 XEXP (comp, 1)), 0))
4639 return 1;
4640
4641 tem = XEXP (x, 1);
4642 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4643 validate_change (insn, &XEXP (x, 2), tem, 1);
4644 return apply_change_group ();
4645 }
4646
4647 fmt = GET_RTX_FORMAT (code);
4648 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4649 {
4650 if (fmt[i] == 'e')
4651 if (! invert_exp (XEXP (x, i), insn))
4652 return 0;
4653 if (fmt[i] == 'E')
4654 {
4655 register int j;
4656 for (j = 0; j < XVECLEN (x, i); j++)
4657 if (!invert_exp (XVECEXP (x, i, j), insn))
4658 return 0;
4659 }
4660 }
4661
4662 return 1;
4663 }
4664 \f
4665 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4666 If the old jump target label is unused as a result,
4667 it and the code following it may be deleted.
4668
4669 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4670 RETURN insn.
4671
4672 The return value will be 1 if the change was made, 0 if it wasn't (this
4673 can only occur for NLABEL == 0). */
4674
4675 int
4676 redirect_jump (jump, nlabel)
4677 rtx jump, nlabel;
4678 {
4679 register rtx olabel = JUMP_LABEL (jump);
4680
4681 if (nlabel == olabel)
4682 return 1;
4683
4684 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4685 return 0;
4686
4687 /* If this is an unconditional branch, delete it from the jump_chain of
4688 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4689 have UID's in range and JUMP_CHAIN is valid). */
4690 if (jump_chain && (simplejump_p (jump)
4691 || GET_CODE (PATTERN (jump)) == RETURN))
4692 {
4693 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4694
4695 delete_from_jump_chain (jump);
4696 if (label_index < max_jump_chain
4697 && INSN_UID (jump) < max_jump_chain)
4698 {
4699 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4700 jump_chain[label_index] = jump;
4701 }
4702 }
4703
4704 JUMP_LABEL (jump) = nlabel;
4705 if (nlabel)
4706 ++LABEL_NUSES (nlabel);
4707
4708 if (olabel && --LABEL_NUSES (olabel) == 0)
4709 delete_insn (olabel);
4710
4711 return 1;
4712 }
4713
4714 /* Delete the instruction JUMP from any jump chain it might be on. */
4715
4716 static void
4717 delete_from_jump_chain (jump)
4718 rtx jump;
4719 {
4720 int index;
4721 rtx olabel = JUMP_LABEL (jump);
4722
4723 /* Handle unconditional jumps. */
4724 if (jump_chain && olabel != 0
4725 && INSN_UID (olabel) < max_jump_chain
4726 && simplejump_p (jump))
4727 index = INSN_UID (olabel);
4728 /* Handle return insns. */
4729 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4730 index = 0;
4731 else return;
4732
4733 if (jump_chain[index] == jump)
4734 jump_chain[index] = jump_chain[INSN_UID (jump)];
4735 else
4736 {
4737 rtx insn;
4738
4739 for (insn = jump_chain[index];
4740 insn != 0;
4741 insn = jump_chain[INSN_UID (insn)])
4742 if (jump_chain[INSN_UID (insn)] == jump)
4743 {
4744 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4745 break;
4746 }
4747 }
4748 }
4749
4750 /* If NLABEL is nonzero, throughout the rtx at LOC,
4751 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4752 zero, alter (RETURN) to (LABEL_REF NLABEL).
4753
4754 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4755 validity with validate_change. Convert (set (pc) (label_ref olabel))
4756 to (return).
4757
4758 Return 0 if we found a change we would like to make but it is invalid.
4759 Otherwise, return 1. */
4760
4761 int
4762 redirect_exp (loc, olabel, nlabel, insn)
4763 rtx *loc;
4764 rtx olabel, nlabel;
4765 rtx insn;
4766 {
4767 register rtx x = *loc;
4768 register RTX_CODE code = GET_CODE (x);
4769 register int i;
4770 register const char *fmt;
4771
4772 if (code == LABEL_REF)
4773 {
4774 if (XEXP (x, 0) == olabel)
4775 {
4776 if (nlabel)
4777 XEXP (x, 0) = nlabel;
4778 else
4779 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4780 return 1;
4781 }
4782 }
4783 else if (code == RETURN && olabel == 0)
4784 {
4785 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4786 if (loc == &PATTERN (insn))
4787 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4788 return validate_change (insn, loc, x, 0);
4789 }
4790
4791 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4792 && GET_CODE (SET_SRC (x)) == LABEL_REF
4793 && XEXP (SET_SRC (x), 0) == olabel)
4794 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4795
4796 fmt = GET_RTX_FORMAT (code);
4797 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4798 {
4799 if (fmt[i] == 'e')
4800 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4801 return 0;
4802 if (fmt[i] == 'E')
4803 {
4804 register int j;
4805 for (j = 0; j < XVECLEN (x, i); j++)
4806 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4807 return 0;
4808 }
4809 }
4810
4811 return 1;
4812 }
4813 \f
4814 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4815
4816 If the old jump target label (before the dispatch table) becomes unused,
4817 it and the dispatch table may be deleted. In that case, find the insn
4818 before the jump references that label and delete it and logical successors
4819 too. */
4820
4821 static void
4822 redirect_tablejump (jump, nlabel)
4823 rtx jump, nlabel;
4824 {
4825 register rtx olabel = JUMP_LABEL (jump);
4826
4827 /* Add this jump to the jump_chain of NLABEL. */
4828 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4829 && INSN_UID (jump) < max_jump_chain)
4830 {
4831 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4832 jump_chain[INSN_UID (nlabel)] = jump;
4833 }
4834
4835 PATTERN (jump) = gen_jump (nlabel);
4836 JUMP_LABEL (jump) = nlabel;
4837 ++LABEL_NUSES (nlabel);
4838 INSN_CODE (jump) = -1;
4839
4840 if (--LABEL_NUSES (olabel) == 0)
4841 {
4842 delete_labelref_insn (jump, olabel, 0);
4843 delete_insn (olabel);
4844 }
4845 }
4846
4847 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4848 If we found one, delete it and then delete this insn if DELETE_THIS is
4849 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4850
4851 static int
4852 delete_labelref_insn (insn, label, delete_this)
4853 rtx insn, label;
4854 int delete_this;
4855 {
4856 int deleted = 0;
4857 rtx link;
4858
4859 if (GET_CODE (insn) != NOTE
4860 && reg_mentioned_p (label, PATTERN (insn)))
4861 {
4862 if (delete_this)
4863 {
4864 delete_insn (insn);
4865 deleted = 1;
4866 }
4867 else
4868 return 1;
4869 }
4870
4871 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4872 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4873 {
4874 if (delete_this)
4875 {
4876 delete_insn (insn);
4877 deleted = 1;
4878 }
4879 else
4880 return 1;
4881 }
4882
4883 return deleted;
4884 }
4885 \f
4886 /* Like rtx_equal_p except that it considers two REGs as equal
4887 if they renumber to the same value and considers two commutative
4888 operations to be the same if the order of the operands has been
4889 reversed.
4890
4891 ??? Addition is not commutative on the PA due to the weird implicit
4892 space register selection rules for memory addresses. Therefore, we
4893 don't consider a + b == b + a.
4894
4895 We could/should make this test a little tighter. Possibly only
4896 disabling it on the PA via some backend macro or only disabling this
4897 case when the PLUS is inside a MEM. */
4898
4899 int
4900 rtx_renumbered_equal_p (x, y)
4901 rtx x, y;
4902 {
4903 register int i;
4904 register RTX_CODE code = GET_CODE (x);
4905 register const char *fmt;
4906
4907 if (x == y)
4908 return 1;
4909
4910 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4911 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4912 && GET_CODE (SUBREG_REG (y)) == REG)))
4913 {
4914 int reg_x = -1, reg_y = -1;
4915 int word_x = 0, word_y = 0;
4916
4917 if (GET_MODE (x) != GET_MODE (y))
4918 return 0;
4919
4920 /* If we haven't done any renumbering, don't
4921 make any assumptions. */
4922 if (reg_renumber == 0)
4923 return rtx_equal_p (x, y);
4924
4925 if (code == SUBREG)
4926 {
4927 reg_x = REGNO (SUBREG_REG (x));
4928 word_x = SUBREG_WORD (x);
4929
4930 if (reg_renumber[reg_x] >= 0)
4931 {
4932 reg_x = reg_renumber[reg_x] + word_x;
4933 word_x = 0;
4934 }
4935 }
4936
4937 else
4938 {
4939 reg_x = REGNO (x);
4940 if (reg_renumber[reg_x] >= 0)
4941 reg_x = reg_renumber[reg_x];
4942 }
4943
4944 if (GET_CODE (y) == SUBREG)
4945 {
4946 reg_y = REGNO (SUBREG_REG (y));
4947 word_y = SUBREG_WORD (y);
4948
4949 if (reg_renumber[reg_y] >= 0)
4950 {
4951 reg_y = reg_renumber[reg_y];
4952 word_y = 0;
4953 }
4954 }
4955
4956 else
4957 {
4958 reg_y = REGNO (y);
4959 if (reg_renumber[reg_y] >= 0)
4960 reg_y = reg_renumber[reg_y];
4961 }
4962
4963 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4964 }
4965
4966 /* Now we have disposed of all the cases
4967 in which different rtx codes can match. */
4968 if (code != GET_CODE (y))
4969 return 0;
4970
4971 switch (code)
4972 {
4973 case PC:
4974 case CC0:
4975 case ADDR_VEC:
4976 case ADDR_DIFF_VEC:
4977 return 0;
4978
4979 case CONST_INT:
4980 return INTVAL (x) == INTVAL (y);
4981
4982 case LABEL_REF:
4983 /* We can't assume nonlocal labels have their following insns yet. */
4984 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4985 return XEXP (x, 0) == XEXP (y, 0);
4986
4987 /* Two label-refs are equivalent if they point at labels
4988 in the same position in the instruction stream. */
4989 return (next_real_insn (XEXP (x, 0))
4990 == next_real_insn (XEXP (y, 0)));
4991
4992 case SYMBOL_REF:
4993 return XSTR (x, 0) == XSTR (y, 0);
4994
4995 case CODE_LABEL:
4996 /* If we didn't match EQ equality above, they aren't the same. */
4997 return 0;
4998
4999 default:
5000 break;
5001 }
5002
5003 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
5004
5005 if (GET_MODE (x) != GET_MODE (y))
5006 return 0;
5007
5008 /* For commutative operations, the RTX match if the operand match in any
5009 order. Also handle the simple binary and unary cases without a loop.
5010
5011 ??? Don't consider PLUS a commutative operator; see comments above. */
5012 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5013 && code != PLUS)
5014 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
5015 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
5016 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
5017 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
5018 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5019 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
5020 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
5021 else if (GET_RTX_CLASS (code) == '1')
5022 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
5023
5024 /* Compare the elements. If any pair of corresponding elements
5025 fail to match, return 0 for the whole things. */
5026
5027 fmt = GET_RTX_FORMAT (code);
5028 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5029 {
5030 register int j;
5031 switch (fmt[i])
5032 {
5033 case 'w':
5034 if (XWINT (x, i) != XWINT (y, i))
5035 return 0;
5036 break;
5037
5038 case 'i':
5039 if (XINT (x, i) != XINT (y, i))
5040 return 0;
5041 break;
5042
5043 case 's':
5044 if (strcmp (XSTR (x, i), XSTR (y, i)))
5045 return 0;
5046 break;
5047
5048 case 'e':
5049 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
5050 return 0;
5051 break;
5052
5053 case 'u':
5054 if (XEXP (x, i) != XEXP (y, i))
5055 return 0;
5056 /* fall through. */
5057 case '0':
5058 break;
5059
5060 case 'E':
5061 if (XVECLEN (x, i) != XVECLEN (y, i))
5062 return 0;
5063 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5064 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
5065 return 0;
5066 break;
5067
5068 default:
5069 abort ();
5070 }
5071 }
5072 return 1;
5073 }
5074 \f
5075 /* If X is a hard register or equivalent to one or a subregister of one,
5076 return the hard register number. If X is a pseudo register that was not
5077 assigned a hard register, return the pseudo register number. Otherwise,
5078 return -1. Any rtx is valid for X. */
5079
5080 int
5081 true_regnum (x)
5082 rtx x;
5083 {
5084 if (GET_CODE (x) == REG)
5085 {
5086 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
5087 return reg_renumber[REGNO (x)];
5088 return REGNO (x);
5089 }
5090 if (GET_CODE (x) == SUBREG)
5091 {
5092 int base = true_regnum (SUBREG_REG (x));
5093 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
5094 return SUBREG_WORD (x) + base;
5095 }
5096 return -1;
5097 }
5098 \f
5099 /* Optimize code of the form:
5100
5101 for (x = a[i]; x; ...)
5102 ...
5103 for (x = a[i]; x; ...)
5104 ...
5105 foo:
5106
5107 Loop optimize will change the above code into
5108
5109 if (x = a[i])
5110 for (;;)
5111 { ...; if (! (x = ...)) break; }
5112 if (x = a[i])
5113 for (;;)
5114 { ...; if (! (x = ...)) break; }
5115 foo:
5116
5117 In general, if the first test fails, the program can branch
5118 directly to `foo' and skip the second try which is doomed to fail.
5119 We run this after loop optimization and before flow analysis. */
5120
5121 /* When comparing the insn patterns, we track the fact that different
5122 pseudo-register numbers may have been used in each computation.
5123 The following array stores an equivalence -- same_regs[I] == J means
5124 that pseudo register I was used in the first set of tests in a context
5125 where J was used in the second set. We also count the number of such
5126 pending equivalences. If nonzero, the expressions really aren't the
5127 same. */
5128
5129 static int *same_regs;
5130
5131 static int num_same_regs;
5132
5133 /* Track any registers modified between the target of the first jump and
5134 the second jump. They never compare equal. */
5135
5136 static char *modified_regs;
5137
5138 /* Record if memory was modified. */
5139
5140 static int modified_mem;
5141
5142 /* Called via note_stores on each insn between the target of the first
5143 branch and the second branch. It marks any changed registers. */
5144
5145 static void
5146 mark_modified_reg (dest, x, data)
5147 rtx dest;
5148 rtx x ATTRIBUTE_UNUSED;
5149 void *data ATTRIBUTE_UNUSED;
5150 {
5151 int regno, i;
5152
5153 if (GET_CODE (dest) == SUBREG)
5154 dest = SUBREG_REG (dest);
5155
5156 if (GET_CODE (dest) == MEM)
5157 modified_mem = 1;
5158
5159 if (GET_CODE (dest) != REG)
5160 return;
5161
5162 regno = REGNO (dest);
5163 if (regno >= FIRST_PSEUDO_REGISTER)
5164 modified_regs[regno] = 1;
5165 else
5166 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
5167 modified_regs[regno + i] = 1;
5168 }
5169
5170 /* F is the first insn in the chain of insns. */
5171
5172 void
5173 thread_jumps (f, max_reg, flag_before_loop)
5174 rtx f;
5175 int max_reg;
5176 int flag_before_loop;
5177 {
5178 /* Basic algorithm is to find a conditional branch,
5179 the label it may branch to, and the branch after
5180 that label. If the two branches test the same condition,
5181 walk back from both branch paths until the insn patterns
5182 differ, or code labels are hit. If we make it back to
5183 the target of the first branch, then we know that the first branch
5184 will either always succeed or always fail depending on the relative
5185 senses of the two branches. So adjust the first branch accordingly
5186 in this case. */
5187
5188 rtx label, b1, b2, t1, t2;
5189 enum rtx_code code1, code2;
5190 rtx b1op0, b1op1, b2op0, b2op1;
5191 int changed = 1;
5192 int i;
5193 int *all_reset;
5194
5195 /* Allocate register tables and quick-reset table. */
5196 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
5197 same_regs = (int *) xmalloc (max_reg * sizeof (int));
5198 all_reset = (int *) xmalloc (max_reg * sizeof (int));
5199 for (i = 0; i < max_reg; i++)
5200 all_reset[i] = -1;
5201
5202 while (changed)
5203 {
5204 changed = 0;
5205
5206 for (b1 = f; b1; b1 = NEXT_INSN (b1))
5207 {
5208 /* Get to a candidate branch insn. */
5209 if (GET_CODE (b1) != JUMP_INSN
5210 || ! condjump_p (b1) || simplejump_p (b1)
5211 || JUMP_LABEL (b1) == 0)
5212 continue;
5213
5214 bzero (modified_regs, max_reg * sizeof (char));
5215 modified_mem = 0;
5216
5217 bcopy ((char *) all_reset, (char *) same_regs,
5218 max_reg * sizeof (int));
5219 num_same_regs = 0;
5220
5221 label = JUMP_LABEL (b1);
5222
5223 /* Look for a branch after the target. Record any registers and
5224 memory modified between the target and the branch. Stop when we
5225 get to a label since we can't know what was changed there. */
5226 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
5227 {
5228 if (GET_CODE (b2) == CODE_LABEL)
5229 break;
5230
5231 else if (GET_CODE (b2) == JUMP_INSN)
5232 {
5233 /* If this is an unconditional jump and is the only use of
5234 its target label, we can follow it. */
5235 if (simplejump_p (b2)
5236 && JUMP_LABEL (b2) != 0
5237 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
5238 {
5239 b2 = JUMP_LABEL (b2);
5240 continue;
5241 }
5242 else
5243 break;
5244 }
5245
5246 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
5247 continue;
5248
5249 if (GET_CODE (b2) == CALL_INSN)
5250 {
5251 modified_mem = 1;
5252 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5253 if (call_used_regs[i] && ! fixed_regs[i]
5254 && i != STACK_POINTER_REGNUM
5255 && i != FRAME_POINTER_REGNUM
5256 && i != HARD_FRAME_POINTER_REGNUM
5257 && i != ARG_POINTER_REGNUM)
5258 modified_regs[i] = 1;
5259 }
5260
5261 note_stores (PATTERN (b2), mark_modified_reg, NULL);
5262 }
5263
5264 /* Check the next candidate branch insn from the label
5265 of the first. */
5266 if (b2 == 0
5267 || GET_CODE (b2) != JUMP_INSN
5268 || b2 == b1
5269 || ! condjump_p (b2)
5270 || simplejump_p (b2))
5271 continue;
5272
5273 /* Get the comparison codes and operands, reversing the
5274 codes if appropriate. If we don't have comparison codes,
5275 we can't do anything. */
5276 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
5277 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
5278 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
5279 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
5280 code1 = reverse_condition (code1);
5281
5282 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
5283 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
5284 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
5285 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
5286 code2 = reverse_condition (code2);
5287
5288 /* If they test the same things and knowing that B1 branches
5289 tells us whether or not B2 branches, check if we
5290 can thread the branch. */
5291 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
5292 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
5293 && (comparison_dominates_p (code1, code2)
5294 || (comparison_dominates_p (code1, reverse_condition (code2))
5295 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
5296 0),
5297 b1))))
5298 {
5299 t1 = prev_nonnote_insn (b1);
5300 t2 = prev_nonnote_insn (b2);
5301
5302 while (t1 != 0 && t2 != 0)
5303 {
5304 if (t2 == label)
5305 {
5306 /* We have reached the target of the first branch.
5307 If there are no pending register equivalents,
5308 we know that this branch will either always
5309 succeed (if the senses of the two branches are
5310 the same) or always fail (if not). */
5311 rtx new_label;
5312
5313 if (num_same_regs != 0)
5314 break;
5315
5316 if (comparison_dominates_p (code1, code2))
5317 new_label = JUMP_LABEL (b2);
5318 else
5319 new_label = get_label_after (b2);
5320
5321 if (JUMP_LABEL (b1) != new_label)
5322 {
5323 rtx prev = PREV_INSN (new_label);
5324
5325 if (flag_before_loop
5326 && GET_CODE (prev) == NOTE
5327 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5328 {
5329 /* Don't thread to the loop label. If a loop
5330 label is reused, loop optimization will
5331 be disabled for that loop. */
5332 new_label = gen_label_rtx ();
5333 emit_label_after (new_label, PREV_INSN (prev));
5334 }
5335 changed |= redirect_jump (b1, new_label);
5336 }
5337 break;
5338 }
5339
5340 /* If either of these is not a normal insn (it might be
5341 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5342 have already been skipped above.) Similarly, fail
5343 if the insns are different. */
5344 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5345 || recog_memoized (t1) != recog_memoized (t2)
5346 || ! rtx_equal_for_thread_p (PATTERN (t1),
5347 PATTERN (t2), t2))
5348 break;
5349
5350 t1 = prev_nonnote_insn (t1);
5351 t2 = prev_nonnote_insn (t2);
5352 }
5353 }
5354 }
5355 }
5356
5357 /* Clean up. */
5358 free (modified_regs);
5359 free (same_regs);
5360 free (all_reset);
5361 }
5362 \f
5363 /* This is like RTX_EQUAL_P except that it knows about our handling of
5364 possibly equivalent registers and knows to consider volatile and
5365 modified objects as not equal.
5366
5367 YINSN is the insn containing Y. */
5368
5369 int
5370 rtx_equal_for_thread_p (x, y, yinsn)
5371 rtx x, y;
5372 rtx yinsn;
5373 {
5374 register int i;
5375 register int j;
5376 register enum rtx_code code;
5377 register const char *fmt;
5378
5379 code = GET_CODE (x);
5380 /* Rtx's of different codes cannot be equal. */
5381 if (code != GET_CODE (y))
5382 return 0;
5383
5384 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5385 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5386
5387 if (GET_MODE (x) != GET_MODE (y))
5388 return 0;
5389
5390 /* For floating-point, consider everything unequal. This is a bit
5391 pessimistic, but this pass would only rarely do anything for FP
5392 anyway. */
5393 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5394 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5395 return 0;
5396
5397 /* For commutative operations, the RTX match if the operand match in any
5398 order. Also handle the simple binary and unary cases without a loop. */
5399 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5400 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5401 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5402 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5403 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5404 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5405 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5406 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5407 else if (GET_RTX_CLASS (code) == '1')
5408 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5409
5410 /* Handle special-cases first. */
5411 switch (code)
5412 {
5413 case REG:
5414 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5415 return 1;
5416
5417 /* If neither is user variable or hard register, check for possible
5418 equivalence. */
5419 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5420 || REGNO (x) < FIRST_PSEUDO_REGISTER
5421 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5422 return 0;
5423
5424 if (same_regs[REGNO (x)] == -1)
5425 {
5426 same_regs[REGNO (x)] = REGNO (y);
5427 num_same_regs++;
5428
5429 /* If this is the first time we are seeing a register on the `Y'
5430 side, see if it is the last use. If not, we can't thread the
5431 jump, so mark it as not equivalent. */
5432 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5433 return 0;
5434
5435 return 1;
5436 }
5437 else
5438 return (same_regs[REGNO (x)] == REGNO (y));
5439
5440 break;
5441
5442 case MEM:
5443 /* If memory modified or either volatile, not equivalent.
5444 Else, check address. */
5445 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5446 return 0;
5447
5448 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5449
5450 case ASM_INPUT:
5451 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5452 return 0;
5453
5454 break;
5455
5456 case SET:
5457 /* Cancel a pending `same_regs' if setting equivalenced registers.
5458 Then process source. */
5459 if (GET_CODE (SET_DEST (x)) == REG
5460 && GET_CODE (SET_DEST (y)) == REG)
5461 {
5462 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
5463 {
5464 same_regs[REGNO (SET_DEST (x))] = -1;
5465 num_same_regs--;
5466 }
5467 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5468 return 0;
5469 }
5470 else
5471 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5472 return 0;
5473
5474 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5475
5476 case LABEL_REF:
5477 return XEXP (x, 0) == XEXP (y, 0);
5478
5479 case SYMBOL_REF:
5480 return XSTR (x, 0) == XSTR (y, 0);
5481
5482 default:
5483 break;
5484 }
5485
5486 if (x == y)
5487 return 1;
5488
5489 fmt = GET_RTX_FORMAT (code);
5490 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5491 {
5492 switch (fmt[i])
5493 {
5494 case 'w':
5495 if (XWINT (x, i) != XWINT (y, i))
5496 return 0;
5497 break;
5498
5499 case 'n':
5500 case 'i':
5501 if (XINT (x, i) != XINT (y, i))
5502 return 0;
5503 break;
5504
5505 case 'V':
5506 case 'E':
5507 /* Two vectors must have the same length. */
5508 if (XVECLEN (x, i) != XVECLEN (y, i))
5509 return 0;
5510
5511 /* And the corresponding elements must match. */
5512 for (j = 0; j < XVECLEN (x, i); j++)
5513 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5514 XVECEXP (y, i, j), yinsn) == 0)
5515 return 0;
5516 break;
5517
5518 case 'e':
5519 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5520 return 0;
5521 break;
5522
5523 case 'S':
5524 case 's':
5525 if (strcmp (XSTR (x, i), XSTR (y, i)))
5526 return 0;
5527 break;
5528
5529 case 'u':
5530 /* These are just backpointers, so they don't matter. */
5531 break;
5532
5533 case '0':
5534 case 't':
5535 break;
5536
5537 /* It is believed that rtx's at this level will never
5538 contain anything but integers and other rtx's,
5539 except for within LABEL_REFs and SYMBOL_REFs. */
5540 default:
5541 abort ();
5542 }
5543 }
5544 return 1;
5545 }
5546 \f
5547
5548 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
5549 /* Return the insn that NEW can be safely inserted in front of starting at
5550 the jump insn INSN. Return 0 if it is not safe to do this jump
5551 optimization. Note that NEW must contain a single set. */
5552
5553 static rtx
5554 find_insert_position (insn, new)
5555 rtx insn;
5556 rtx new;
5557 {
5558 int i;
5559 rtx prev;
5560
5561 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5562 if (GET_CODE (PATTERN (new)) != PARALLEL)
5563 return insn;
5564
5565 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5566 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5567 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5568 insn))
5569 break;
5570
5571 if (i < 0)
5572 return insn;
5573
5574 /* There is a good chance that the previous insn PREV sets the thing
5575 being clobbered (often the CC in a hard reg). If PREV does not
5576 use what NEW sets, we can insert NEW before PREV. */
5577
5578 prev = prev_active_insn (insn);
5579 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5580 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5581 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5582 insn)
5583 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5584 prev))
5585 return 0;
5586
5587 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5588 }
5589 #endif /* !HAVE_cc0 */