jump.c (jump_optimize_1): Don't call delete_barrier_successors if only marking labels.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This is the jump-optimization pass of the compiler.
24 It is run two or three times: once before cse, sometimes once after cse,
25 and once after reload (before final).
26
27 jump_optimize deletes unreachable code and labels that are not used.
28 It also deletes jumps that jump to the following insn,
29 and simplifies jumps around unconditional jumps and jumps
30 to unconditional jumps.
31
32 Each CODE_LABEL has a count of the times it is used
33 stored in the LABEL_NUSES internal field, and each JUMP_INSN
34 has one label that it refers to stored in the
35 JUMP_LABEL internal field. With this we can detect labels that
36 become unused because of the deletion of all the jumps that
37 formerly used them. The JUMP_LABEL info is sometimes looked
38 at by later passes.
39
40 Optionally, cross-jumping can be done. Currently it is done
41 only the last time (when after reload and before final).
42 In fact, the code for cross-jumping now assumes that register
43 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44
45 Jump optimization is done after cse when cse's constant-propagation
46 causes jumps to become unconditional or to be deleted.
47
48 Unreachable loops are not detected here, because the labels
49 have references and the insns appear reachable from the labels.
50 find_basic_blocks in flow.c finds and deletes such loops.
51
52 The subroutines delete_insn, redirect_jump, and invert_jump are used
53 from other passes as well. */
54
55 #include "config.h"
56 #include "system.h"
57 #include "rtl.h"
58 #include "tm_p.h"
59 #include "flags.h"
60 #include "hard-reg-set.h"
61 #include "regs.h"
62 #include "insn-config.h"
63 #include "insn-flags.h"
64 #include "insn-attr.h"
65 #include "recog.h"
66 #include "function.h"
67 #include "expr.h"
68 #include "real.h"
69 #include "except.h"
70 #include "toplev.h"
71
72 /* ??? Eventually must record somehow the labels used by jumps
73 from nested functions. */
74 /* Pre-record the next or previous real insn for each label?
75 No, this pass is very fast anyway. */
76 /* Condense consecutive labels?
77 This would make life analysis faster, maybe. */
78 /* Optimize jump y; x: ... y: jumpif... x?
79 Don't know if it is worth bothering with. */
80 /* Optimize two cases of conditional jump to conditional jump?
81 This can never delete any instruction or make anything dead,
82 or even change what is live at any point.
83 So perhaps let combiner do it. */
84
85 /* Vector indexed by uid.
86 For each CODE_LABEL, index by its uid to get first unconditional jump
87 that jumps to the label.
88 For each JUMP_INSN, index by its uid to get the next unconditional jump
89 that jumps to the same label.
90 Element 0 is the start of a chain of all return insns.
91 (It is safe to use element 0 because insn uid 0 is not used. */
92
93 static rtx *jump_chain;
94
95 /* Maximum index in jump_chain. */
96
97 static int max_jump_chain;
98
99 /* Set nonzero by jump_optimize if control can fall through
100 to the end of the function. */
101 int can_reach_end;
102
103 /* Indicates whether death notes are significant in cross jump analysis.
104 Normally they are not significant, because of A and B jump to C,
105 and R dies in A, it must die in B. But this might not be true after
106 stack register conversion, and we must compare death notes in that
107 case. */
108
109 static int cross_jump_death_matters = 0;
110
111 static int init_label_info PARAMS ((rtx));
112 static void delete_barrier_successors PARAMS ((rtx));
113 static void mark_all_labels PARAMS ((rtx, int));
114 static rtx delete_unreferenced_labels PARAMS ((rtx));
115 static void delete_noop_moves PARAMS ((rtx));
116 static int calculate_can_reach_end PARAMS ((rtx, int));
117 static int duplicate_loop_exit_test PARAMS ((rtx));
118 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
119 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
120 static int jump_back_p PARAMS ((rtx, rtx));
121 static int tension_vector_labels PARAMS ((rtx, int));
122 static void mark_jump_label PARAMS ((rtx, rtx, int, int));
123 static void delete_computation PARAMS ((rtx));
124 static void delete_from_jump_chain PARAMS ((rtx));
125 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
126 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
127 static void redirect_tablejump PARAMS ((rtx, rtx));
128 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int));
129 #if ! defined(HAVE_cc0) && ! defined(HAVE_conditional_arithmetic)
130 static rtx find_insert_position PARAMS ((rtx, rtx));
131 #endif
132 static int returnjump_p_1 PARAMS ((rtx *, void *));
133 static void delete_prior_computation PARAMS ((rtx, rtx));
134
135 /* Main external entry point into the jump optimizer. See comments before
136 jump_optimize_1 for descriptions of the arguments. */
137 void
138 jump_optimize (f, cross_jump, noop_moves, after_regscan)
139 rtx f;
140 int cross_jump;
141 int noop_moves;
142 int after_regscan;
143 {
144 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
145 }
146
147 /* Alternate entry into the jump optimizer. This entry point only rebuilds
148 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
149 instructions. */
150 void
151 rebuild_jump_labels (f)
152 rtx f;
153 {
154 jump_optimize_1 (f, 0, 0, 0, 1);
155 }
156
157 \f
158 /* Delete no-op jumps and optimize jumps to jumps
159 and jumps around jumps.
160 Delete unused labels and unreachable code.
161
162 If CROSS_JUMP is 1, detect matching code
163 before a jump and its destination and unify them.
164 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
165
166 If NOOP_MOVES is nonzero, delete no-op move insns.
167
168 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
169 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
170
171 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
172 and JUMP_LABEL field for jumping insns.
173
174 If `optimize' is zero, don't change any code,
175 just determine whether control drops off the end of the function.
176 This case occurs when we have -W and not -O.
177 It works because `delete_insn' checks the value of `optimize'
178 and refrains from actually deleting when that is 0. */
179
180 static void
181 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
182 rtx f;
183 int cross_jump;
184 int noop_moves;
185 int after_regscan;
186 int mark_labels_only;
187 {
188 register rtx insn, next;
189 int changed;
190 int old_max_reg;
191 int first = 1;
192 int max_uid = 0;
193 rtx last_insn;
194
195 cross_jump_death_matters = (cross_jump == 2);
196 max_uid = init_label_info (f) + 1;
197
198 /* If we are performing cross jump optimizations, then initialize
199 tables mapping UIDs to EH regions to avoid incorrect movement
200 of insns from one EH region to another. */
201 if (flag_exceptions && cross_jump)
202 init_insn_eh_region (f, max_uid);
203
204 if (! mark_labels_only)
205 delete_barrier_successors (f);
206
207 /* Leave some extra room for labels and duplicate exit test insns
208 we make. */
209 max_jump_chain = max_uid * 14 / 10;
210 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
211
212 mark_all_labels (f, cross_jump);
213
214 /* Keep track of labels used from static data;
215 they cannot ever be deleted. */
216
217 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
218 LABEL_NUSES (XEXP (insn, 0))++;
219
220 check_exception_handler_labels ();
221
222 /* Keep track of labels used for marking handlers for exception
223 regions; they cannot usually be deleted. */
224
225 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
226 LABEL_NUSES (XEXP (insn, 0))++;
227
228 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
229 notes and recompute LABEL_NUSES. */
230 if (mark_labels_only)
231 goto end;
232
233 exception_optimize ();
234
235 last_insn = delete_unreferenced_labels (f);
236
237 if (noop_moves)
238 delete_noop_moves (f);
239
240 /* If we haven't yet gotten to reload and we have just run regscan,
241 delete any insn that sets a register that isn't used elsewhere.
242 This helps some of the optimizations below by having less insns
243 being jumped around. */
244
245 if (optimize && ! reload_completed && after_regscan)
246 for (insn = f; insn; insn = next)
247 {
248 rtx set = single_set (insn);
249
250 next = NEXT_INSN (insn);
251
252 if (set && GET_CODE (SET_DEST (set)) == REG
253 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
254 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
255 /* We use regno_last_note_uid so as not to delete the setting
256 of a reg that's used in notes. A subsequent optimization
257 might arrange to use that reg for real. */
258 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
259 && ! side_effects_p (SET_SRC (set))
260 && ! find_reg_note (insn, REG_RETVAL, 0)
261 /* An ADDRESSOF expression can turn into a use of the internal arg
262 pointer, so do not delete the initialization of the internal
263 arg pointer yet. If it is truly dead, flow will delete the
264 initializing insn. */
265 && SET_DEST (set) != current_function_internal_arg_pointer)
266 delete_insn (insn);
267 }
268
269 /* Now iterate optimizing jumps until nothing changes over one pass. */
270 changed = 1;
271 old_max_reg = max_reg_num ();
272 while (changed)
273 {
274 changed = 0;
275
276 for (insn = f; insn; insn = next)
277 {
278 rtx reallabelprev;
279 rtx temp, temp1, temp2 = NULL_RTX, temp3, temp4, temp5, temp6;
280 rtx nlabel;
281 int this_is_simplejump, this_is_condjump, reversep = 0;
282 int this_is_condjump_in_parallel;
283
284 next = NEXT_INSN (insn);
285
286 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
287 jump. Try to optimize by duplicating the loop exit test if so.
288 This is only safe immediately after regscan, because it uses
289 the values of regno_first_uid and regno_last_uid. */
290 if (after_regscan && GET_CODE (insn) == NOTE
291 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
292 && (temp1 = next_nonnote_insn (insn)) != 0
293 && simplejump_p (temp1))
294 {
295 temp = PREV_INSN (insn);
296 if (duplicate_loop_exit_test (insn))
297 {
298 changed = 1;
299 next = NEXT_INSN (temp);
300 continue;
301 }
302 }
303
304 if (GET_CODE (insn) != JUMP_INSN)
305 continue;
306
307 this_is_simplejump = simplejump_p (insn);
308 this_is_condjump = condjump_p (insn);
309 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
310
311 /* Tension the labels in dispatch tables. */
312
313 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
314 changed |= tension_vector_labels (PATTERN (insn), 0);
315 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
316 changed |= tension_vector_labels (PATTERN (insn), 1);
317
318 /* See if this jump goes to another jump and redirect if so. */
319 nlabel = follow_jumps (JUMP_LABEL (insn));
320 if (nlabel != JUMP_LABEL (insn))
321 changed |= redirect_jump (insn, nlabel);
322
323 if (! optimize)
324 continue;
325
326 /* If a dispatch table always goes to the same place,
327 get rid of it and replace the insn that uses it. */
328
329 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
330 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
331 {
332 int i;
333 rtx pat = PATTERN (insn);
334 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
335 int len = XVECLEN (pat, diff_vec_p);
336 rtx dispatch = prev_real_insn (insn);
337 rtx set;
338
339 for (i = 0; i < len; i++)
340 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
341 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
342 break;
343
344 if (i == len
345 && dispatch != 0
346 && GET_CODE (dispatch) == JUMP_INSN
347 && JUMP_LABEL (dispatch) != 0
348 /* Don't mess with a casesi insn.
349 XXX according to the comment before computed_jump_p(),
350 all casesi insns should be a parallel of the jump
351 and a USE of a LABEL_REF. */
352 && ! ((set = single_set (dispatch)) != NULL
353 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
354 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
355 {
356 redirect_tablejump (dispatch,
357 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
358 changed = 1;
359 }
360 }
361
362 /* If a jump references the end of the function, try to turn
363 it into a RETURN insn, possibly a conditional one. */
364 if (JUMP_LABEL (insn) != 0
365 && (next_active_insn (JUMP_LABEL (insn)) == 0
366 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
367 == RETURN))
368 changed |= redirect_jump (insn, NULL_RTX);
369
370 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
371
372 /* Detect jump to following insn. */
373 if (reallabelprev == insn && this_is_condjump)
374 {
375 next = next_real_insn (JUMP_LABEL (insn));
376 delete_jump (insn);
377 changed = 1;
378 continue;
379 }
380
381 /* Detect a conditional jump going to the same place
382 as an immediately following unconditional jump. */
383 else if (this_is_condjump
384 && (temp = next_active_insn (insn)) != 0
385 && simplejump_p (temp)
386 && (next_active_insn (JUMP_LABEL (insn))
387 == next_active_insn (JUMP_LABEL (temp))))
388 {
389 /* Don't mess up test coverage analysis. */
390 temp2 = temp;
391 if (flag_test_coverage && !reload_completed)
392 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
393 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
394 break;
395
396 if (temp2 == temp)
397 {
398 delete_jump (insn);
399 changed = 1;
400 continue;
401 }
402 }
403
404 /* Detect a conditional jump jumping over an unconditional jump. */
405
406 else if ((this_is_condjump || this_is_condjump_in_parallel)
407 && ! this_is_simplejump
408 && reallabelprev != 0
409 && GET_CODE (reallabelprev) == JUMP_INSN
410 && prev_active_insn (reallabelprev) == insn
411 && no_labels_between_p (insn, reallabelprev)
412 && simplejump_p (reallabelprev))
413 {
414 /* When we invert the unconditional jump, we will be
415 decrementing the usage count of its old label.
416 Make sure that we don't delete it now because that
417 might cause the following code to be deleted. */
418 rtx prev_uses = prev_nonnote_insn (reallabelprev);
419 rtx prev_label = JUMP_LABEL (insn);
420
421 if (prev_label)
422 ++LABEL_NUSES (prev_label);
423
424 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
425 {
426 /* It is very likely that if there are USE insns before
427 this jump, they hold REG_DEAD notes. These REG_DEAD
428 notes are no longer valid due to this optimization,
429 and will cause the life-analysis that following passes
430 (notably delayed-branch scheduling) to think that
431 these registers are dead when they are not.
432
433 To prevent this trouble, we just remove the USE insns
434 from the insn chain. */
435
436 while (prev_uses && GET_CODE (prev_uses) == INSN
437 && GET_CODE (PATTERN (prev_uses)) == USE)
438 {
439 rtx useless = prev_uses;
440 prev_uses = prev_nonnote_insn (prev_uses);
441 delete_insn (useless);
442 }
443
444 delete_insn (reallabelprev);
445 changed = 1;
446 }
447
448 /* We can now safely delete the label if it is unreferenced
449 since the delete_insn above has deleted the BARRIER. */
450 if (prev_label && --LABEL_NUSES (prev_label) == 0)
451 delete_insn (prev_label);
452
453 next = NEXT_INSN (insn);
454 }
455
456 /* If we have an unconditional jump preceded by a USE, try to put
457 the USE before the target and jump there. This simplifies many
458 of the optimizations below since we don't have to worry about
459 dealing with these USE insns. We only do this if the label
460 being branch to already has the identical USE or if code
461 never falls through to that label. */
462
463 else if (this_is_simplejump
464 && (temp = prev_nonnote_insn (insn)) != 0
465 && GET_CODE (temp) == INSN
466 && GET_CODE (PATTERN (temp)) == USE
467 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
468 && (GET_CODE (temp1) == BARRIER
469 || (GET_CODE (temp1) == INSN
470 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
471 /* Don't do this optimization if we have a loop containing
472 only the USE instruction, and the loop start label has
473 a usage count of 1. This is because we will redo this
474 optimization everytime through the outer loop, and jump
475 opt will never exit. */
476 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
477 && temp2 == JUMP_LABEL (insn)
478 && LABEL_NUSES (temp2) == 1))
479 {
480 if (GET_CODE (temp1) == BARRIER)
481 {
482 emit_insn_after (PATTERN (temp), temp1);
483 temp1 = NEXT_INSN (temp1);
484 }
485
486 delete_insn (temp);
487 redirect_jump (insn, get_label_before (temp1));
488 reallabelprev = prev_real_insn (temp1);
489 changed = 1;
490 next = NEXT_INSN (insn);
491 }
492
493 /* Simplify if (...) x = a; else x = b; by converting it
494 to x = b; if (...) x = a;
495 if B is sufficiently simple, the test doesn't involve X,
496 and nothing in the test modifies B or X.
497
498 If we have small register classes, we also can't do this if X
499 is a hard register.
500
501 If the "x = b;" insn has any REG_NOTES, we don't do this because
502 of the possibility that we are running after CSE and there is a
503 REG_EQUAL note that is only valid if the branch has already been
504 taken. If we move the insn with the REG_EQUAL note, we may
505 fold the comparison to always be false in a later CSE pass.
506 (We could also delete the REG_NOTES when moving the insn, but it
507 seems simpler to not move it.) An exception is that we can move
508 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
509 value is the same as "b".
510
511 INSN is the branch over the `else' part.
512
513 We set:
514
515 TEMP to the jump insn preceding "x = a;"
516 TEMP1 to X
517 TEMP2 to the insn that sets "x = b;"
518 TEMP3 to the insn that sets "x = a;"
519 TEMP4 to the set of "x = b"; */
520
521 if (this_is_simplejump
522 && (temp3 = prev_active_insn (insn)) != 0
523 && GET_CODE (temp3) == INSN
524 && (temp4 = single_set (temp3)) != 0
525 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
526 && (! SMALL_REGISTER_CLASSES
527 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
528 && (temp2 = next_active_insn (insn)) != 0
529 && GET_CODE (temp2) == INSN
530 && (temp4 = single_set (temp2)) != 0
531 && rtx_equal_p (SET_DEST (temp4), temp1)
532 && ! side_effects_p (SET_SRC (temp4))
533 && ! may_trap_p (SET_SRC (temp4))
534 && (REG_NOTES (temp2) == 0
535 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
536 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
537 && XEXP (REG_NOTES (temp2), 1) == 0
538 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
539 SET_SRC (temp4))))
540 && (temp = prev_active_insn (temp3)) != 0
541 && condjump_p (temp) && ! simplejump_p (temp)
542 /* TEMP must skip over the "x = a;" insn */
543 && prev_real_insn (JUMP_LABEL (temp)) == insn
544 && no_labels_between_p (insn, JUMP_LABEL (temp))
545 /* There must be no other entries to the "x = b;" insn. */
546 && no_labels_between_p (JUMP_LABEL (temp), temp2)
547 /* INSN must either branch to the insn after TEMP2 or the insn
548 after TEMP2 must branch to the same place as INSN. */
549 && (reallabelprev == temp2
550 || ((temp5 = next_active_insn (temp2)) != 0
551 && simplejump_p (temp5)
552 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
553 {
554 /* The test expression, X, may be a complicated test with
555 multiple branches. See if we can find all the uses of
556 the label that TEMP branches to without hitting a CALL_INSN
557 or a jump to somewhere else. */
558 rtx target = JUMP_LABEL (temp);
559 int nuses = LABEL_NUSES (target);
560 rtx p;
561 #ifdef HAVE_cc0
562 rtx q;
563 #endif
564
565 /* Set P to the first jump insn that goes around "x = a;". */
566 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
567 {
568 if (GET_CODE (p) == JUMP_INSN)
569 {
570 if (condjump_p (p) && ! simplejump_p (p)
571 && JUMP_LABEL (p) == target)
572 {
573 nuses--;
574 if (nuses == 0)
575 break;
576 }
577 else
578 break;
579 }
580 else if (GET_CODE (p) == CALL_INSN)
581 break;
582 }
583
584 #ifdef HAVE_cc0
585 /* We cannot insert anything between a set of cc and its use
586 so if P uses cc0, we must back up to the previous insn. */
587 q = prev_nonnote_insn (p);
588 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
589 && sets_cc0_p (PATTERN (q)))
590 p = q;
591 #endif
592
593 if (p)
594 p = PREV_INSN (p);
595
596 /* If we found all the uses and there was no data conflict, we
597 can move the assignment unless we can branch into the middle
598 from somewhere. */
599 if (nuses == 0 && p
600 && no_labels_between_p (p, insn)
601 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
602 && ! reg_set_between_p (temp1, p, temp3)
603 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
604 || ! modified_between_p (SET_SRC (temp4), p, temp2))
605 /* Verify that registers used by the jump are not clobbered
606 by the instruction being moved. */
607 && ! regs_set_between_p (PATTERN (temp),
608 PREV_INSN (temp2),
609 NEXT_INSN (temp2)))
610 {
611 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
612 delete_insn (temp2);
613
614 /* Set NEXT to an insn that we know won't go away. */
615 next = next_active_insn (insn);
616
617 /* Delete the jump around the set. Note that we must do
618 this before we redirect the test jumps so that it won't
619 delete the code immediately following the assignment
620 we moved (which might be a jump). */
621
622 delete_insn (insn);
623
624 /* We either have two consecutive labels or a jump to
625 a jump, so adjust all the JUMP_INSNs to branch to where
626 INSN branches to. */
627 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
628 if (GET_CODE (p) == JUMP_INSN)
629 redirect_jump (p, target);
630
631 changed = 1;
632 next = NEXT_INSN (insn);
633 continue;
634 }
635 }
636
637 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
638 to x = a; if (...) goto l; x = b;
639 if A is sufficiently simple, the test doesn't involve X,
640 and nothing in the test modifies A or X.
641
642 If we have small register classes, we also can't do this if X
643 is a hard register.
644
645 If the "x = a;" insn has any REG_NOTES, we don't do this because
646 of the possibility that we are running after CSE and there is a
647 REG_EQUAL note that is only valid if the branch has already been
648 taken. If we move the insn with the REG_EQUAL note, we may
649 fold the comparison to always be false in a later CSE pass.
650 (We could also delete the REG_NOTES when moving the insn, but it
651 seems simpler to not move it.) An exception is that we can move
652 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
653 value is the same as "a".
654
655 INSN is the goto.
656
657 We set:
658
659 TEMP to the jump insn preceding "x = a;"
660 TEMP1 to X
661 TEMP2 to the insn that sets "x = b;"
662 TEMP3 to the insn that sets "x = a;"
663 TEMP4 to the set of "x = a"; */
664
665 if (this_is_simplejump
666 && (temp2 = next_active_insn (insn)) != 0
667 && GET_CODE (temp2) == INSN
668 && (temp4 = single_set (temp2)) != 0
669 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
670 && (! SMALL_REGISTER_CLASSES
671 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
672 && (temp3 = prev_active_insn (insn)) != 0
673 && GET_CODE (temp3) == INSN
674 && (temp4 = single_set (temp3)) != 0
675 && rtx_equal_p (SET_DEST (temp4), temp1)
676 && ! side_effects_p (SET_SRC (temp4))
677 && ! may_trap_p (SET_SRC (temp4))
678 && (REG_NOTES (temp3) == 0
679 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
680 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
681 && XEXP (REG_NOTES (temp3), 1) == 0
682 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
683 SET_SRC (temp4))))
684 && (temp = prev_active_insn (temp3)) != 0
685 && condjump_p (temp) && ! simplejump_p (temp)
686 /* TEMP must skip over the "x = a;" insn */
687 && prev_real_insn (JUMP_LABEL (temp)) == insn
688 && no_labels_between_p (temp, insn))
689 {
690 rtx prev_label = JUMP_LABEL (temp);
691 rtx insert_after = prev_nonnote_insn (temp);
692
693 #ifdef HAVE_cc0
694 /* We cannot insert anything between a set of cc and its use. */
695 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
696 && sets_cc0_p (PATTERN (insert_after)))
697 insert_after = prev_nonnote_insn (insert_after);
698 #endif
699 ++LABEL_NUSES (prev_label);
700
701 if (insert_after
702 && no_labels_between_p (insert_after, temp)
703 && ! reg_referenced_between_p (temp1, insert_after, temp3)
704 && ! reg_referenced_between_p (temp1, temp3,
705 NEXT_INSN (temp2))
706 && ! reg_set_between_p (temp1, insert_after, temp)
707 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
708 /* Verify that registers used by the jump are not clobbered
709 by the instruction being moved. */
710 && ! regs_set_between_p (PATTERN (temp),
711 PREV_INSN (temp3),
712 NEXT_INSN (temp3))
713 && invert_jump (temp, JUMP_LABEL (insn)))
714 {
715 emit_insn_after_with_line_notes (PATTERN (temp3),
716 insert_after, temp3);
717 delete_insn (temp3);
718 delete_insn (insn);
719 /* Set NEXT to an insn that we know won't go away. */
720 next = temp2;
721 changed = 1;
722 }
723 if (prev_label && --LABEL_NUSES (prev_label) == 0)
724 delete_insn (prev_label);
725 if (changed)
726 continue;
727 }
728
729 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
730
731 /* If we have if (...) x = exp; and branches are expensive,
732 EXP is a single insn, does not have any side effects, cannot
733 trap, and is not too costly, convert this to
734 t = exp; if (...) x = t;
735
736 Don't do this when we have CC0 because it is unlikely to help
737 and we'd need to worry about where to place the new insn and
738 the potential for conflicts. We also can't do this when we have
739 notes on the insn for the same reason as above.
740
741 If we have conditional arithmetic, this will make this
742 harder to optimize later and isn't needed, so don't do it
743 in that case either.
744
745 We set:
746
747 TEMP to the "x = exp;" insn.
748 TEMP1 to the single set in the "x = exp;" insn.
749 TEMP2 to "x". */
750
751 if (! reload_completed
752 && this_is_condjump && ! this_is_simplejump
753 && BRANCH_COST >= 3
754 && (temp = next_nonnote_insn (insn)) != 0
755 && GET_CODE (temp) == INSN
756 && REG_NOTES (temp) == 0
757 && (reallabelprev == temp
758 || ((temp2 = next_active_insn (temp)) != 0
759 && simplejump_p (temp2)
760 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
761 && (temp1 = single_set (temp)) != 0
762 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
763 && (! SMALL_REGISTER_CLASSES
764 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
765 && GET_CODE (SET_SRC (temp1)) != REG
766 && GET_CODE (SET_SRC (temp1)) != SUBREG
767 && GET_CODE (SET_SRC (temp1)) != CONST_INT
768 && ! side_effects_p (SET_SRC (temp1))
769 && ! may_trap_p (SET_SRC (temp1))
770 && rtx_cost (SET_SRC (temp1), SET) < 10)
771 {
772 rtx new = gen_reg_rtx (GET_MODE (temp2));
773
774 if ((temp3 = find_insert_position (insn, temp))
775 && validate_change (temp, &SET_DEST (temp1), new, 0))
776 {
777 next = emit_insn_after (gen_move_insn (temp2, new), insn);
778 emit_insn_after_with_line_notes (PATTERN (temp),
779 PREV_INSN (temp3), temp);
780 delete_insn (temp);
781 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
782
783 if (after_regscan)
784 {
785 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
786 old_max_reg = max_reg_num ();
787 }
788 }
789 }
790
791 /* Similarly, if it takes two insns to compute EXP but they
792 have the same destination. Here TEMP3 will be the second
793 insn and TEMP4 the SET from that insn. */
794
795 if (! reload_completed
796 && this_is_condjump && ! this_is_simplejump
797 && BRANCH_COST >= 4
798 && (temp = next_nonnote_insn (insn)) != 0
799 && GET_CODE (temp) == INSN
800 && REG_NOTES (temp) == 0
801 && (temp3 = next_nonnote_insn (temp)) != 0
802 && GET_CODE (temp3) == INSN
803 && REG_NOTES (temp3) == 0
804 && (reallabelprev == temp3
805 || ((temp2 = next_active_insn (temp3)) != 0
806 && simplejump_p (temp2)
807 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
808 && (temp1 = single_set (temp)) != 0
809 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
810 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
811 && (! SMALL_REGISTER_CLASSES
812 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
813 && ! side_effects_p (SET_SRC (temp1))
814 && ! may_trap_p (SET_SRC (temp1))
815 && rtx_cost (SET_SRC (temp1), SET) < 10
816 && (temp4 = single_set (temp3)) != 0
817 && rtx_equal_p (SET_DEST (temp4), temp2)
818 && ! side_effects_p (SET_SRC (temp4))
819 && ! may_trap_p (SET_SRC (temp4))
820 && rtx_cost (SET_SRC (temp4), SET) < 10)
821 {
822 rtx new = gen_reg_rtx (GET_MODE (temp2));
823
824 if ((temp5 = find_insert_position (insn, temp))
825 && (temp6 = find_insert_position (insn, temp3))
826 && validate_change (temp, &SET_DEST (temp1), new, 0))
827 {
828 /* Use the earliest of temp5 and temp6. */
829 if (temp5 != insn)
830 temp6 = temp5;
831 next = emit_insn_after (gen_move_insn (temp2, new), insn);
832 emit_insn_after_with_line_notes (PATTERN (temp),
833 PREV_INSN (temp6), temp);
834 emit_insn_after_with_line_notes
835 (replace_rtx (PATTERN (temp3), temp2, new),
836 PREV_INSN (temp6), temp3);
837 delete_insn (temp);
838 delete_insn (temp3);
839 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
840
841 if (after_regscan)
842 {
843 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
844 old_max_reg = max_reg_num ();
845 }
846 }
847 }
848
849 /* Finally, handle the case where two insns are used to
850 compute EXP but a temporary register is used. Here we must
851 ensure that the temporary register is not used anywhere else. */
852
853 if (! reload_completed
854 && after_regscan
855 && this_is_condjump && ! this_is_simplejump
856 && BRANCH_COST >= 4
857 && (temp = next_nonnote_insn (insn)) != 0
858 && GET_CODE (temp) == INSN
859 && REG_NOTES (temp) == 0
860 && (temp3 = next_nonnote_insn (temp)) != 0
861 && GET_CODE (temp3) == INSN
862 && REG_NOTES (temp3) == 0
863 && (reallabelprev == temp3
864 || ((temp2 = next_active_insn (temp3)) != 0
865 && simplejump_p (temp2)
866 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
867 && (temp1 = single_set (temp)) != 0
868 && (temp5 = SET_DEST (temp1),
869 (GET_CODE (temp5) == REG
870 || (GET_CODE (temp5) == SUBREG
871 && (temp5 = SUBREG_REG (temp5),
872 GET_CODE (temp5) == REG))))
873 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
874 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
875 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
876 && ! side_effects_p (SET_SRC (temp1))
877 && ! may_trap_p (SET_SRC (temp1))
878 && rtx_cost (SET_SRC (temp1), SET) < 10
879 && (temp4 = single_set (temp3)) != 0
880 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
881 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
882 && (! SMALL_REGISTER_CLASSES
883 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
884 && rtx_equal_p (SET_DEST (temp4), temp2)
885 && ! side_effects_p (SET_SRC (temp4))
886 && ! may_trap_p (SET_SRC (temp4))
887 && rtx_cost (SET_SRC (temp4), SET) < 10)
888 {
889 rtx new = gen_reg_rtx (GET_MODE (temp2));
890
891 if ((temp5 = find_insert_position (insn, temp))
892 && (temp6 = find_insert_position (insn, temp3))
893 && validate_change (temp3, &SET_DEST (temp4), new, 0))
894 {
895 /* Use the earliest of temp5 and temp6. */
896 if (temp5 != insn)
897 temp6 = temp5;
898 next = emit_insn_after (gen_move_insn (temp2, new), insn);
899 emit_insn_after_with_line_notes (PATTERN (temp),
900 PREV_INSN (temp6), temp);
901 emit_insn_after_with_line_notes (PATTERN (temp3),
902 PREV_INSN (temp6), temp3);
903 delete_insn (temp);
904 delete_insn (temp3);
905 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
906
907 if (after_regscan)
908 {
909 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
910 old_max_reg = max_reg_num ();
911 }
912 }
913 }
914 #endif /* HAVE_cc0 */
915
916 #ifdef HAVE_conditional_arithmetic
917 /* ??? This is disabled in genconfig, as this simple-minded
918 transformation can incredibly lengthen register lifetimes.
919
920 Consider this example from cexp.c's yyparse:
921
922 234 (set (pc)
923 (if_then_else (ne (reg:DI 149) (const_int 0 [0x0]))
924 (label_ref 248) (pc)))
925 237 (set (reg/i:DI 0 $0) (const_int 1 [0x1]))
926 239 (set (pc) (label_ref 2382))
927 248 (code_label ("yybackup"))
928
929 This will be transformed to:
930
931 237 (set (reg/i:DI 0 $0)
932 (if_then_else:DI (eq (reg:DI 149) (const_int 0 [0x0]))
933 (const_int 1 [0x1]) (reg/i:DI 0 $0)))
934 239 (set (pc)
935 (if_then_else (eq (reg:DI 149) (const_int 0 [0x0]))
936 (label_ref 2382) (pc)))
937
938 which, from this narrow viewpoint looks fine. Except that
939 between this and 3 other ocurrences of the same pattern, $0
940 is now live for basically the entire function, and we'll
941 get an abort in caller_save.
942
943 Any replacement for this code should recall that a set of
944 a register that is not live need not, and indeed should not,
945 be conditionalized. Either that, or delay the transformation
946 until after register allocation. */
947
948 /* See if this is a conditional jump around a small number of
949 instructions that we can conditionalize. Don't do this before
950 the initial CSE pass or after reload.
951
952 We reject any insns that have side effects or may trap.
953 Strictly speaking, this is not needed since the machine may
954 support conditionalizing these too, but we won't deal with that
955 now. Specifically, this means that we can't conditionalize a
956 CALL_INSN, which some machines, such as the ARC, can do, but
957 this is a very minor optimization. */
958 if (this_is_condjump && ! this_is_simplejump
959 && cse_not_expected && ! reload_completed
960 && BRANCH_COST > 2
961 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (insn)), 0),
962 insn))
963 {
964 rtx ourcond = XEXP (SET_SRC (PATTERN (insn)), 0);
965 int num_insns = 0;
966 char *storage = (char *) oballoc (0);
967 int last_insn = 0, failed = 0;
968 rtx changed_jump = 0;
969
970 ourcond = gen_rtx (reverse_condition (GET_CODE (ourcond)),
971 VOIDmode, XEXP (ourcond, 0),
972 XEXP (ourcond, 1));
973
974 /* Scan forward BRANCH_COST real insns looking for the JUMP_LABEL
975 of this insn. We see if we think we can conditionalize the
976 insns we pass. For now, we only deal with insns that have
977 one SET. We stop after an insn that modifies anything in
978 OURCOND, if we have too many insns, or if we have an insn
979 with a side effect or that may trip. Note that we will
980 be modifying any unconditional jumps we encounter to be
981 conditional; this will have the effect of also doing this
982 optimization on the "else" the next time around. */
983 for (temp1 = NEXT_INSN (insn);
984 num_insns <= BRANCH_COST && ! failed && temp1 != 0
985 && GET_CODE (temp1) != CODE_LABEL;
986 temp1 = NEXT_INSN (temp1))
987 {
988 /* Ignore everything but an active insn. */
989 if (GET_RTX_CLASS (GET_CODE (temp1)) != 'i'
990 || GET_CODE (PATTERN (temp1)) == USE
991 || GET_CODE (PATTERN (temp1)) == CLOBBER)
992 continue;
993
994 /* If this was an unconditional jump, record it since we'll
995 need to remove the BARRIER if we succeed. We can only
996 have one such jump since there must be a label after
997 the BARRIER and it's either ours, in which case it's the
998 only one or some other, in which case we'd fail.
999 Likewise if it's a CALL_INSN followed by a BARRIER. */
1000
1001 if (simplejump_p (temp1)
1002 || (GET_CODE (temp1) == CALL_INSN
1003 && NEXT_INSN (temp1) != 0
1004 && GET_CODE (NEXT_INSN (temp1)) == BARRIER))
1005 {
1006 if (changed_jump == 0)
1007 changed_jump = temp1;
1008 else
1009 changed_jump
1010 = gen_rtx_INSN_LIST (VOIDmode, temp1, changed_jump);
1011 }
1012
1013 /* See if we are allowed another insn and if this insn
1014 if one we think we may be able to handle. */
1015 if (++num_insns > BRANCH_COST
1016 || last_insn
1017 || (((temp2 = single_set (temp1)) == 0
1018 || side_effects_p (SET_SRC (temp2))
1019 || may_trap_p (SET_SRC (temp2)))
1020 && GET_CODE (temp1) != CALL_INSN))
1021 failed = 1;
1022 else if (temp2 != 0)
1023 validate_change (temp1, &SET_SRC (temp2),
1024 gen_rtx_IF_THEN_ELSE
1025 (GET_MODE (SET_DEST (temp2)),
1026 copy_rtx (ourcond),
1027 SET_SRC (temp2), SET_DEST (temp2)),
1028 1);
1029 else
1030 {
1031 /* This is a CALL_INSN that doesn't have a SET. */
1032 rtx *call_loc = &PATTERN (temp1);
1033
1034 if (GET_CODE (*call_loc) == PARALLEL)
1035 call_loc = &XVECEXP (*call_loc, 0, 0);
1036
1037 validate_change (temp1, call_loc,
1038 gen_rtx_IF_THEN_ELSE
1039 (VOIDmode, copy_rtx (ourcond),
1040 *call_loc, const0_rtx),
1041 1);
1042 }
1043
1044
1045 if (modified_in_p (ourcond, temp1))
1046 last_insn = 1;
1047 }
1048
1049 /* If we've reached our jump label, haven't failed, and all
1050 the changes above are valid, we can delete this jump
1051 insn. Also remove a BARRIER after any jump that used
1052 to be unconditional and remove any REG_EQUAL or REG_EQUIV
1053 that might have previously been present on insns we
1054 made conditional. */
1055 if (temp1 == JUMP_LABEL (insn) && ! failed
1056 && apply_change_group ())
1057 {
1058 for (temp1 = NEXT_INSN (insn); temp1 != JUMP_LABEL (insn);
1059 temp1 = NEXT_INSN (temp1))
1060 if (GET_RTX_CLASS (GET_CODE (temp1)) == 'i')
1061 for (temp2 = REG_NOTES (temp1); temp2 != 0;
1062 temp2 = XEXP (temp2, 1))
1063 if (REG_NOTE_KIND (temp2) == REG_EQUAL
1064 || REG_NOTE_KIND (temp2) == REG_EQUIV)
1065 remove_note (temp1, temp2);
1066
1067 if (changed_jump != 0)
1068 {
1069 while (GET_CODE (changed_jump) == INSN_LIST)
1070 {
1071 delete_barrier (NEXT_INSN (XEXP (changed_jump, 0)));
1072 changed_jump = XEXP (changed_jump, 1);
1073 }
1074
1075 delete_barrier (NEXT_INSN (changed_jump));
1076 }
1077
1078 delete_insn (insn);
1079 changed = 1;
1080 continue;
1081 }
1082 else
1083 {
1084 cancel_changes (0);
1085 obfree (storage);
1086 }
1087 }
1088 #endif
1089 /* If branches are expensive, convert
1090 if (foo) bar++; to bar += (foo != 0);
1091 and similarly for "bar--;"
1092
1093 INSN is the conditional branch around the arithmetic. We set:
1094
1095 TEMP is the arithmetic insn.
1096 TEMP1 is the SET doing the arithmetic.
1097 TEMP2 is the operand being incremented or decremented.
1098 TEMP3 to the condition being tested.
1099 TEMP4 to the earliest insn used to find the condition. */
1100
1101 if ((BRANCH_COST >= 2
1102 #ifdef HAVE_incscc
1103 || HAVE_incscc
1104 #endif
1105 #ifdef HAVE_decscc
1106 || HAVE_decscc
1107 #endif
1108 )
1109 && ! reload_completed
1110 && this_is_condjump && ! this_is_simplejump
1111 && (temp = next_nonnote_insn (insn)) != 0
1112 && (temp1 = single_set (temp)) != 0
1113 && (temp2 = SET_DEST (temp1),
1114 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1115 && GET_CODE (SET_SRC (temp1)) == PLUS
1116 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1117 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1118 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1119 && ! side_effects_p (temp2)
1120 && ! may_trap_p (temp2)
1121 /* INSN must either branch to the insn after TEMP or the insn
1122 after TEMP must branch to the same place as INSN. */
1123 && (reallabelprev == temp
1124 || ((temp3 = next_active_insn (temp)) != 0
1125 && simplejump_p (temp3)
1126 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1127 && (temp3 = get_condition (insn, &temp4)) != 0
1128 /* We must be comparing objects whose modes imply the size.
1129 We could handle BLKmode if (1) emit_store_flag could
1130 and (2) we could find the size reliably. */
1131 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1132 && can_reverse_comparison_p (temp3, insn))
1133 {
1134 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1135 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1136
1137 start_sequence ();
1138
1139 /* It must be the case that TEMP2 is not modified in the range
1140 [TEMP4, INSN). The one exception we make is if the insn
1141 before INSN sets TEMP2 to something which is also unchanged
1142 in that range. In that case, we can move the initialization
1143 into our sequence. */
1144
1145 if ((temp5 = prev_active_insn (insn)) != 0
1146 && no_labels_between_p (temp5, insn)
1147 && GET_CODE (temp5) == INSN
1148 && (temp6 = single_set (temp5)) != 0
1149 && rtx_equal_p (temp2, SET_DEST (temp6))
1150 && (CONSTANT_P (SET_SRC (temp6))
1151 || GET_CODE (SET_SRC (temp6)) == REG
1152 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1153 {
1154 emit_insn (PATTERN (temp5));
1155 init_insn = temp5;
1156 init = SET_SRC (temp6);
1157 }
1158
1159 if (CONSTANT_P (init)
1160 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1161 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1162 XEXP (temp3, 0), XEXP (temp3, 1),
1163 VOIDmode,
1164 (code == LTU || code == LEU
1165 || code == GTU || code == GEU), 1);
1166
1167 /* If we can do the store-flag, do the addition or
1168 subtraction. */
1169
1170 if (target)
1171 target = expand_binop (GET_MODE (temp2),
1172 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1173 ? add_optab : sub_optab),
1174 temp2, target, temp2, 0, OPTAB_WIDEN);
1175
1176 if (target != 0)
1177 {
1178 /* Put the result back in temp2 in case it isn't already.
1179 Then replace the jump, possible a CC0-setting insn in
1180 front of the jump, and TEMP, with the sequence we have
1181 made. */
1182
1183 if (target != temp2)
1184 emit_move_insn (temp2, target);
1185
1186 seq = get_insns ();
1187 end_sequence ();
1188
1189 emit_insns_before (seq, temp4);
1190 delete_insn (temp);
1191
1192 if (init_insn)
1193 delete_insn (init_insn);
1194
1195 next = NEXT_INSN (insn);
1196 #ifdef HAVE_cc0
1197 delete_insn (prev_nonnote_insn (insn));
1198 #endif
1199 delete_insn (insn);
1200
1201 if (after_regscan)
1202 {
1203 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1204 old_max_reg = max_reg_num ();
1205 }
1206
1207 changed = 1;
1208 continue;
1209 }
1210 else
1211 end_sequence ();
1212 }
1213
1214 /* Try to use a conditional move (if the target has them), or a
1215 store-flag insn. If the target has conditional arithmetic as
1216 well as conditional move, the above code will have done something.
1217 Note that we prefer the above code since it is more general: the
1218 code below can make changes that require work to undo.
1219
1220 The general case here is:
1221
1222 1) x = a; if (...) x = b; and
1223 2) if (...) x = b;
1224
1225 If the jump would be faster, the machine should not have defined
1226 the movcc or scc insns!. These cases are often made by the
1227 previous optimization.
1228
1229 The second case is treated as x = x; if (...) x = b;.
1230
1231 INSN here is the jump around the store. We set:
1232
1233 TEMP to the "x op= b;" insn.
1234 TEMP1 to X.
1235 TEMP2 to B.
1236 TEMP3 to A (X in the second case).
1237 TEMP4 to the condition being tested.
1238 TEMP5 to the earliest insn used to find the condition.
1239 TEMP6 to the SET of TEMP. */
1240
1241 if (/* We can't do this after reload has completed. */
1242 ! reload_completed
1243 #ifdef HAVE_conditional_arithmetic
1244 /* Defer this until after CSE so the above code gets the
1245 first crack at it. */
1246 && cse_not_expected
1247 #endif
1248 && this_is_condjump && ! this_is_simplejump
1249 /* Set TEMP to the "x = b;" insn. */
1250 && (temp = next_nonnote_insn (insn)) != 0
1251 && GET_CODE (temp) == INSN
1252 && (temp6 = single_set (temp)) != NULL_RTX
1253 && GET_CODE (temp1 = SET_DEST (temp6)) == REG
1254 && (! SMALL_REGISTER_CLASSES
1255 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
1256 && ! side_effects_p (temp2 = SET_SRC (temp6))
1257 && ! may_trap_p (temp2)
1258 /* Allow either form, but prefer the former if both apply.
1259 There is no point in using the old value of TEMP1 if
1260 it is a register, since cse will alias them. It can
1261 lose if the old value were a hard register since CSE
1262 won't replace hard registers. Avoid using TEMP3 if
1263 small register classes and it is a hard register. */
1264 && (((temp3 = reg_set_last (temp1, insn)) != 0
1265 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
1266 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
1267 /* Make the latter case look like x = x; if (...) x = b; */
1268 || (temp3 = temp1, 1))
1269 /* INSN must either branch to the insn after TEMP or the insn
1270 after TEMP must branch to the same place as INSN. */
1271 && (reallabelprev == temp
1272 || ((temp4 = next_active_insn (temp)) != 0
1273 && simplejump_p (temp4)
1274 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1275 && (temp4 = get_condition (insn, &temp5)) != 0
1276 /* We must be comparing objects whose modes imply the size.
1277 We could handle BLKmode if (1) emit_store_flag could
1278 and (2) we could find the size reliably. */
1279 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1280 /* Even if branches are cheap, the store_flag optimization
1281 can win when the operation to be performed can be
1282 expressed directly. */
1283 #ifdef HAVE_cc0
1284 /* If the previous insn sets CC0 and something else, we can't
1285 do this since we are going to delete that insn. */
1286
1287 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1288 && GET_CODE (temp6) == INSN
1289 && (sets_cc0_p (PATTERN (temp6)) == -1
1290 || (sets_cc0_p (PATTERN (temp6)) == 1
1291 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1292 #endif
1293 )
1294 {
1295 #ifdef HAVE_conditional_move
1296 /* First try a conditional move. */
1297 {
1298 enum rtx_code code = GET_CODE (temp4);
1299 rtx var = temp1;
1300 rtx cond0, cond1, aval, bval;
1301 rtx target, new_insn;
1302
1303 /* Copy the compared variables into cond0 and cond1, so that
1304 any side effects performed in or after the old comparison,
1305 will not affect our compare which will come later. */
1306 /* ??? Is it possible to just use the comparison in the jump
1307 insn? After all, we're going to delete it. We'd have
1308 to modify emit_conditional_move to take a comparison rtx
1309 instead or write a new function. */
1310
1311 /* We want the target to be able to simplify comparisons with
1312 zero (and maybe other constants as well), so don't create
1313 pseudos for them. There's no need to either. */
1314 if (GET_CODE (XEXP (temp4, 0)) == CONST_INT
1315 || GET_CODE (XEXP (temp4, 0)) == CONST_DOUBLE)
1316 cond0 = XEXP (temp4, 0);
1317 else
1318 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1319
1320 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1321 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1322 cond1 = XEXP (temp4, 1);
1323 else
1324 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1325
1326 /* Careful about copying these values -- an IOR or what may
1327 need to do other things, like clobber flags. */
1328 /* ??? Assume for the moment that AVAL is ok. */
1329 aval = temp3;
1330
1331 start_sequence ();
1332
1333 /* We're dealing with a single_set insn with no side effects
1334 on SET_SRC. We do need to be reasonably certain that if
1335 we need to force BVAL into a register that we won't
1336 clobber the flags -- general_operand should suffice. */
1337 if (general_operand (temp2, GET_MODE (var)))
1338 bval = temp2;
1339 else
1340 {
1341 bval = gen_reg_rtx (GET_MODE (var));
1342 new_insn = copy_rtx (temp);
1343 temp6 = single_set (new_insn);
1344 SET_DEST (temp6) = bval;
1345 emit_insn (PATTERN (new_insn));
1346 }
1347
1348 target = emit_conditional_move (var, code,
1349 cond0, cond1, VOIDmode,
1350 aval, bval, GET_MODE (var),
1351 (code == LTU || code == GEU
1352 || code == LEU || code == GTU));
1353
1354 if (target)
1355 {
1356 rtx seq1, seq2, last;
1357 int copy_ok;
1358
1359 /* Save the conditional move sequence but don't emit it
1360 yet. On some machines, like the alpha, it is possible
1361 that temp5 == insn, so next generate the sequence that
1362 saves the compared values and then emit both
1363 sequences ensuring seq1 occurs before seq2. */
1364 seq2 = get_insns ();
1365 end_sequence ();
1366
1367 /* "Now that we can't fail..." Famous last words.
1368 Generate the copy insns that preserve the compared
1369 values. */
1370 start_sequence ();
1371 emit_move_insn (cond0, XEXP (temp4, 0));
1372 if (cond1 != XEXP (temp4, 1))
1373 emit_move_insn (cond1, XEXP (temp4, 1));
1374 seq1 = get_insns ();
1375 end_sequence ();
1376
1377 /* Validate the sequence -- this may be some weird
1378 bit-extract-and-test instruction for which there
1379 exists no complimentary bit-extract insn. */
1380 copy_ok = 1;
1381 for (last = seq1; last ; last = NEXT_INSN (last))
1382 if (recog_memoized (last) < 0)
1383 {
1384 copy_ok = 0;
1385 break;
1386 }
1387
1388 if (copy_ok)
1389 {
1390 emit_insns_before (seq1, temp5);
1391
1392 /* Insert conditional move after insn, to be sure
1393 that the jump and a possible compare won't be
1394 separated. */
1395 last = emit_insns_after (seq2, insn);
1396
1397 /* ??? We can also delete the insn that sets X to A.
1398 Flow will do it too though. */
1399 delete_insn (temp);
1400 next = NEXT_INSN (insn);
1401 delete_jump (insn);
1402
1403 if (after_regscan)
1404 {
1405 reg_scan_update (seq1, NEXT_INSN (last),
1406 old_max_reg);
1407 old_max_reg = max_reg_num ();
1408 }
1409
1410 changed = 1;
1411 continue;
1412 }
1413 }
1414 else
1415 end_sequence ();
1416 }
1417 #endif
1418
1419 /* That didn't work, try a store-flag insn.
1420
1421 We further divide the cases into:
1422
1423 1) x = a; if (...) x = b; and either A or B is zero,
1424 2) if (...) x = 0; and jumps are expensive,
1425 3) x = a; if (...) x = b; and A and B are constants where all
1426 the set bits in A are also set in B and jumps are expensive,
1427 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1428 more expensive, and
1429 5) if (...) x = b; if jumps are even more expensive. */
1430
1431 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1432 /* We will be passing this as operand into expand_and. No
1433 good if it's not valid as an operand. */
1434 && general_operand (temp2, GET_MODE (temp2))
1435 && ((GET_CODE (temp3) == CONST_INT)
1436 /* Make the latter case look like
1437 x = x; if (...) x = 0; */
1438 || (temp3 = temp1,
1439 ((BRANCH_COST >= 2
1440 && temp2 == const0_rtx)
1441 || BRANCH_COST >= 3)))
1442 /* If B is zero, OK; if A is zero, can only do (1) if we
1443 can reverse the condition. See if (3) applies possibly
1444 by reversing the condition. Prefer reversing to (4) when
1445 branches are very expensive. */
1446 && (((BRANCH_COST >= 2
1447 || STORE_FLAG_VALUE == -1
1448 || (STORE_FLAG_VALUE == 1
1449 /* Check that the mask is a power of two,
1450 so that it can probably be generated
1451 with a shift. */
1452 && GET_CODE (temp3) == CONST_INT
1453 && exact_log2 (INTVAL (temp3)) >= 0))
1454 && (reversep = 0, temp2 == const0_rtx))
1455 || ((BRANCH_COST >= 2
1456 || STORE_FLAG_VALUE == -1
1457 || (STORE_FLAG_VALUE == 1
1458 && GET_CODE (temp2) == CONST_INT
1459 && exact_log2 (INTVAL (temp2)) >= 0))
1460 && temp3 == const0_rtx
1461 && (reversep = can_reverse_comparison_p (temp4, insn)))
1462 || (BRANCH_COST >= 2
1463 && GET_CODE (temp2) == CONST_INT
1464 && GET_CODE (temp3) == CONST_INT
1465 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1466 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1467 && (reversep = can_reverse_comparison_p (temp4,
1468 insn)))))
1469 || BRANCH_COST >= 3)
1470 )
1471 {
1472 enum rtx_code code = GET_CODE (temp4);
1473 rtx uval, cval, var = temp1;
1474 int normalizep;
1475 rtx target;
1476
1477 /* If necessary, reverse the condition. */
1478 if (reversep)
1479 code = reverse_condition (code), uval = temp2, cval = temp3;
1480 else
1481 uval = temp3, cval = temp2;
1482
1483 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1484 is the constant 1, it is best to just compute the result
1485 directly. If UVAL is constant and STORE_FLAG_VALUE
1486 includes all of its bits, it is best to compute the flag
1487 value unnormalized and `and' it with UVAL. Otherwise,
1488 normalize to -1 and `and' with UVAL. */
1489 normalizep = (cval != const0_rtx ? -1
1490 : (uval == const1_rtx ? 1
1491 : (GET_CODE (uval) == CONST_INT
1492 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1493 ? 0 : -1));
1494
1495 /* We will be putting the store-flag insn immediately in
1496 front of the comparison that was originally being done,
1497 so we know all the variables in TEMP4 will be valid.
1498 However, this might be in front of the assignment of
1499 A to VAR. If it is, it would clobber the store-flag
1500 we will be emitting.
1501
1502 Therefore, emit into a temporary which will be copied to
1503 VAR immediately after TEMP. */
1504
1505 start_sequence ();
1506 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1507 XEXP (temp4, 0), XEXP (temp4, 1),
1508 VOIDmode,
1509 (code == LTU || code == LEU
1510 || code == GEU || code == GTU),
1511 normalizep);
1512 if (target)
1513 {
1514 rtx seq;
1515 rtx before = insn;
1516
1517 seq = get_insns ();
1518 end_sequence ();
1519
1520 /* Put the store-flag insns in front of the first insn
1521 used to compute the condition to ensure that we
1522 use the same values of them as the current
1523 comparison. However, the remainder of the insns we
1524 generate will be placed directly in front of the
1525 jump insn, in case any of the pseudos we use
1526 are modified earlier. */
1527
1528 emit_insns_before (seq, temp5);
1529
1530 start_sequence ();
1531
1532 /* Both CVAL and UVAL are non-zero. */
1533 if (cval != const0_rtx && uval != const0_rtx)
1534 {
1535 rtx tem1, tem2;
1536
1537 tem1 = expand_and (uval, target, NULL_RTX);
1538 if (GET_CODE (cval) == CONST_INT
1539 && GET_CODE (uval) == CONST_INT
1540 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1541 tem2 = cval;
1542 else
1543 {
1544 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1545 target, NULL_RTX, 0);
1546 tem2 = expand_and (cval, tem2,
1547 (GET_CODE (tem2) == REG
1548 ? tem2 : 0));
1549 }
1550
1551 /* If we usually make new pseudos, do so here. This
1552 turns out to help machines that have conditional
1553 move insns. */
1554 /* ??? Conditional moves have already been handled.
1555 This may be obsolete. */
1556
1557 if (flag_expensive_optimizations)
1558 target = 0;
1559
1560 target = expand_binop (GET_MODE (var), ior_optab,
1561 tem1, tem2, target,
1562 1, OPTAB_WIDEN);
1563 }
1564 else if (normalizep != 1)
1565 {
1566 /* We know that either CVAL or UVAL is zero. If
1567 UVAL is zero, negate TARGET and `and' with CVAL.
1568 Otherwise, `and' with UVAL. */
1569 if (uval == const0_rtx)
1570 {
1571 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1572 target, NULL_RTX, 0);
1573 uval = cval;
1574 }
1575
1576 target = expand_and (uval, target,
1577 (GET_CODE (target) == REG
1578 && ! preserve_subexpressions_p ()
1579 ? target : NULL_RTX));
1580 }
1581
1582 emit_move_insn (var, target);
1583 seq = get_insns ();
1584 end_sequence ();
1585 #ifdef HAVE_cc0
1586 /* If INSN uses CC0, we must not separate it from the
1587 insn that sets cc0. */
1588 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1589 before = prev_nonnote_insn (before);
1590 #endif
1591 emit_insns_before (seq, before);
1592
1593 delete_insn (temp);
1594 next = NEXT_INSN (insn);
1595 delete_jump (insn);
1596
1597 if (after_regscan)
1598 {
1599 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1600 old_max_reg = max_reg_num ();
1601 }
1602
1603 changed = 1;
1604 continue;
1605 }
1606 else
1607 end_sequence ();
1608 }
1609 }
1610
1611
1612 /* Simplify if (...) x = 1; else {...} if (x) ...
1613 We recognize this case scanning backwards as well.
1614
1615 TEMP is the assignment to x;
1616 TEMP1 is the label at the head of the second if. */
1617 /* ?? This should call get_condition to find the values being
1618 compared, instead of looking for a COMPARE insn when HAVE_cc0
1619 is not defined. This would allow it to work on the m88k. */
1620 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1621 is not defined and the condition is tested by a separate compare
1622 insn. This is because the code below assumes that the result
1623 of the compare dies in the following branch.
1624
1625 Not only that, but there might be other insns between the
1626 compare and branch whose results are live. Those insns need
1627 to be executed.
1628
1629 A way to fix this is to move the insns at JUMP_LABEL (insn)
1630 to before INSN. If we are running before flow, they will
1631 be deleted if they aren't needed. But this doesn't work
1632 well after flow.
1633
1634 This is really a special-case of jump threading, anyway. The
1635 right thing to do is to replace this and jump threading with
1636 much simpler code in cse.
1637
1638 This code has been turned off in the non-cc0 case in the
1639 meantime. */
1640
1641 #ifdef HAVE_cc0
1642 else if (this_is_simplejump
1643 /* Safe to skip USE and CLOBBER insns here
1644 since they will not be deleted. */
1645 && (temp = prev_active_insn (insn))
1646 && no_labels_between_p (temp, insn)
1647 && GET_CODE (temp) == INSN
1648 && GET_CODE (PATTERN (temp)) == SET
1649 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1650 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1651 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1652 /* If we find that the next value tested is `x'
1653 (TEMP1 is the insn where this happens), win. */
1654 && GET_CODE (temp1) == INSN
1655 && GET_CODE (PATTERN (temp1)) == SET
1656 #ifdef HAVE_cc0
1657 /* Does temp1 `tst' the value of x? */
1658 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1659 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1660 && (temp1 = next_nonnote_insn (temp1))
1661 #else
1662 /* Does temp1 compare the value of x against zero? */
1663 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1664 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1665 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1666 == SET_DEST (PATTERN (temp)))
1667 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1668 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1669 #endif
1670 && condjump_p (temp1))
1671 {
1672 /* Get the if_then_else from the condjump. */
1673 rtx choice = SET_SRC (PATTERN (temp1));
1674 if (GET_CODE (choice) == IF_THEN_ELSE)
1675 {
1676 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1677 rtx val = SET_SRC (PATTERN (temp));
1678 rtx cond
1679 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1680 val, const0_rtx);
1681 rtx ultimate;
1682
1683 if (cond == const_true_rtx)
1684 ultimate = XEXP (choice, 1);
1685 else if (cond == const0_rtx)
1686 ultimate = XEXP (choice, 2);
1687 else
1688 ultimate = 0;
1689
1690 if (ultimate == pc_rtx)
1691 ultimate = get_label_after (temp1);
1692 else if (ultimate && GET_CODE (ultimate) != RETURN)
1693 ultimate = XEXP (ultimate, 0);
1694
1695 if (ultimate && JUMP_LABEL(insn) != ultimate)
1696 changed |= redirect_jump (insn, ultimate);
1697 }
1698 }
1699 #endif
1700
1701 #if 0
1702 /* @@ This needs a bit of work before it will be right.
1703
1704 Any type of comparison can be accepted for the first and
1705 second compare. When rewriting the first jump, we must
1706 compute the what conditions can reach label3, and use the
1707 appropriate code. We can not simply reverse/swap the code
1708 of the first jump. In some cases, the second jump must be
1709 rewritten also.
1710
1711 For example,
1712 < == converts to > ==
1713 < != converts to == >
1714 etc.
1715
1716 If the code is written to only accept an '==' test for the second
1717 compare, then all that needs to be done is to swap the condition
1718 of the first branch.
1719
1720 It is questionable whether we want this optimization anyways,
1721 since if the user wrote code like this because he/she knew that
1722 the jump to label1 is taken most of the time, then rewriting
1723 this gives slower code. */
1724 /* @@ This should call get_condition to find the values being
1725 compared, instead of looking for a COMPARE insn when HAVE_cc0
1726 is not defined. This would allow it to work on the m88k. */
1727 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1728 is not defined and the condition is tested by a separate compare
1729 insn. This is because the code below assumes that the result
1730 of the compare dies in the following branch. */
1731
1732 /* Simplify test a ~= b
1733 condjump label1;
1734 test a == b
1735 condjump label2;
1736 jump label3;
1737 label1:
1738
1739 rewriting as
1740 test a ~~= b
1741 condjump label3
1742 test a == b
1743 condjump label2
1744 label1:
1745
1746 where ~= is an inequality, e.g. >, and ~~= is the swapped
1747 inequality, e.g. <.
1748
1749 We recognize this case scanning backwards.
1750
1751 TEMP is the conditional jump to `label2';
1752 TEMP1 is the test for `a == b';
1753 TEMP2 is the conditional jump to `label1';
1754 TEMP3 is the test for `a ~= b'. */
1755 else if (this_is_simplejump
1756 && (temp = prev_active_insn (insn))
1757 && no_labels_between_p (temp, insn)
1758 && condjump_p (temp)
1759 && (temp1 = prev_active_insn (temp))
1760 && no_labels_between_p (temp1, temp)
1761 && GET_CODE (temp1) == INSN
1762 && GET_CODE (PATTERN (temp1)) == SET
1763 #ifdef HAVE_cc0
1764 && sets_cc0_p (PATTERN (temp1)) == 1
1765 #else
1766 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1767 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1768 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1769 #endif
1770 && (temp2 = prev_active_insn (temp1))
1771 && no_labels_between_p (temp2, temp1)
1772 && condjump_p (temp2)
1773 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1774 && (temp3 = prev_active_insn (temp2))
1775 && no_labels_between_p (temp3, temp2)
1776 && GET_CODE (PATTERN (temp3)) == SET
1777 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1778 SET_DEST (PATTERN (temp1)))
1779 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1780 SET_SRC (PATTERN (temp3)))
1781 && ! inequality_comparisons_p (PATTERN (temp))
1782 && inequality_comparisons_p (PATTERN (temp2)))
1783 {
1784 rtx fallthrough_label = JUMP_LABEL (temp2);
1785
1786 ++LABEL_NUSES (fallthrough_label);
1787 if (swap_jump (temp2, JUMP_LABEL (insn)))
1788 {
1789 delete_insn (insn);
1790 changed = 1;
1791 }
1792
1793 if (--LABEL_NUSES (fallthrough_label) == 0)
1794 delete_insn (fallthrough_label);
1795 }
1796 #endif
1797 /* Simplify if (...) {... x = 1;} if (x) ...
1798
1799 We recognize this case backwards.
1800
1801 TEMP is the test of `x';
1802 TEMP1 is the assignment to `x' at the end of the
1803 previous statement. */
1804 /* @@ This should call get_condition to find the values being
1805 compared, instead of looking for a COMPARE insn when HAVE_cc0
1806 is not defined. This would allow it to work on the m88k. */
1807 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1808 is not defined and the condition is tested by a separate compare
1809 insn. This is because the code below assumes that the result
1810 of the compare dies in the following branch. */
1811
1812 /* ??? This has to be turned off. The problem is that the
1813 unconditional jump might indirectly end up branching to the
1814 label between TEMP1 and TEMP. We can't detect this, in general,
1815 since it may become a jump to there after further optimizations.
1816 If that jump is done, it will be deleted, so we will retry
1817 this optimization in the next pass, thus an infinite loop.
1818
1819 The present code prevents this by putting the jump after the
1820 label, but this is not logically correct. */
1821 #if 0
1822 else if (this_is_condjump
1823 /* Safe to skip USE and CLOBBER insns here
1824 since they will not be deleted. */
1825 && (temp = prev_active_insn (insn))
1826 && no_labels_between_p (temp, insn)
1827 && GET_CODE (temp) == INSN
1828 && GET_CODE (PATTERN (temp)) == SET
1829 #ifdef HAVE_cc0
1830 && sets_cc0_p (PATTERN (temp)) == 1
1831 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1832 #else
1833 /* Temp must be a compare insn, we can not accept a register
1834 to register move here, since it may not be simply a
1835 tst insn. */
1836 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1837 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1838 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1839 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1840 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1841 #endif
1842 /* May skip USE or CLOBBER insns here
1843 for checking for opportunity, since we
1844 take care of them later. */
1845 && (temp1 = prev_active_insn (temp))
1846 && GET_CODE (temp1) == INSN
1847 && GET_CODE (PATTERN (temp1)) == SET
1848 #ifdef HAVE_cc0
1849 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1850 #else
1851 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1852 == SET_DEST (PATTERN (temp1)))
1853 #endif
1854 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1855 /* If this isn't true, cse will do the job. */
1856 && ! no_labels_between_p (temp1, temp))
1857 {
1858 /* Get the if_then_else from the condjump. */
1859 rtx choice = SET_SRC (PATTERN (insn));
1860 if (GET_CODE (choice) == IF_THEN_ELSE
1861 && (GET_CODE (XEXP (choice, 0)) == EQ
1862 || GET_CODE (XEXP (choice, 0)) == NE))
1863 {
1864 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1865 rtx last_insn;
1866 rtx ultimate;
1867 rtx p;
1868
1869 /* Get the place that condjump will jump to
1870 if it is reached from here. */
1871 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1872 == want_nonzero)
1873 ultimate = XEXP (choice, 1);
1874 else
1875 ultimate = XEXP (choice, 2);
1876 /* Get it as a CODE_LABEL. */
1877 if (ultimate == pc_rtx)
1878 ultimate = get_label_after (insn);
1879 else
1880 /* Get the label out of the LABEL_REF. */
1881 ultimate = XEXP (ultimate, 0);
1882
1883 /* Insert the jump immediately before TEMP, specifically
1884 after the label that is between TEMP1 and TEMP. */
1885 last_insn = PREV_INSN (temp);
1886
1887 /* If we would be branching to the next insn, the jump
1888 would immediately be deleted and the re-inserted in
1889 a subsequent pass over the code. So don't do anything
1890 in that case. */
1891 if (next_active_insn (last_insn)
1892 != next_active_insn (ultimate))
1893 {
1894 emit_barrier_after (last_insn);
1895 p = emit_jump_insn_after (gen_jump (ultimate),
1896 last_insn);
1897 JUMP_LABEL (p) = ultimate;
1898 ++LABEL_NUSES (ultimate);
1899 if (INSN_UID (ultimate) < max_jump_chain
1900 && INSN_CODE (p) < max_jump_chain)
1901 {
1902 jump_chain[INSN_UID (p)]
1903 = jump_chain[INSN_UID (ultimate)];
1904 jump_chain[INSN_UID (ultimate)] = p;
1905 }
1906 changed = 1;
1907 continue;
1908 }
1909 }
1910 }
1911 #endif
1912 #ifdef HAVE_trap
1913 /* Detect a conditional jump jumping over an unconditional trap. */
1914 else if (HAVE_trap
1915 && this_is_condjump && ! this_is_simplejump
1916 && reallabelprev != 0
1917 && GET_CODE (reallabelprev) == INSN
1918 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1919 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1920 && prev_active_insn (reallabelprev) == insn
1921 && no_labels_between_p (insn, reallabelprev)
1922 && (temp2 = get_condition (insn, &temp4))
1923 && can_reverse_comparison_p (temp2, insn))
1924 {
1925 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1926 XEXP (temp2, 0), XEXP (temp2, 1),
1927 TRAP_CODE (PATTERN (reallabelprev)));
1928
1929 if (new)
1930 {
1931 emit_insn_before (new, temp4);
1932 delete_insn (reallabelprev);
1933 delete_jump (insn);
1934 changed = 1;
1935 continue;
1936 }
1937 }
1938 /* Detect a jump jumping to an unconditional trap. */
1939 else if (HAVE_trap && this_is_condjump
1940 && (temp = next_active_insn (JUMP_LABEL (insn)))
1941 && GET_CODE (temp) == INSN
1942 && GET_CODE (PATTERN (temp)) == TRAP_IF
1943 && (this_is_simplejump
1944 || (temp2 = get_condition (insn, &temp4))))
1945 {
1946 rtx tc = TRAP_CONDITION (PATTERN (temp));
1947
1948 if (tc == const_true_rtx
1949 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1950 {
1951 rtx new;
1952 /* Replace an unconditional jump to a trap with a trap. */
1953 if (this_is_simplejump)
1954 {
1955 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1956 delete_jump (insn);
1957 changed = 1;
1958 continue;
1959 }
1960 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1961 XEXP (temp2, 1),
1962 TRAP_CODE (PATTERN (temp)));
1963 if (new)
1964 {
1965 emit_insn_before (new, temp4);
1966 delete_jump (insn);
1967 changed = 1;
1968 continue;
1969 }
1970 }
1971 /* If the trap condition and jump condition are mutually
1972 exclusive, redirect the jump to the following insn. */
1973 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1974 && ! this_is_simplejump
1975 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1976 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1977 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1978 && redirect_jump (insn, get_label_after (temp)))
1979 {
1980 changed = 1;
1981 continue;
1982 }
1983 }
1984 #endif
1985 else
1986 {
1987 /* Now that the jump has been tensioned,
1988 try cross jumping: check for identical code
1989 before the jump and before its target label. */
1990
1991 /* First, cross jumping of conditional jumps: */
1992
1993 if (cross_jump && condjump_p (insn))
1994 {
1995 rtx newjpos, newlpos;
1996 rtx x = prev_real_insn (JUMP_LABEL (insn));
1997
1998 /* A conditional jump may be crossjumped
1999 only if the place it jumps to follows
2000 an opposing jump that comes back here. */
2001
2002 if (x != 0 && ! jump_back_p (x, insn))
2003 /* We have no opposing jump;
2004 cannot cross jump this insn. */
2005 x = 0;
2006
2007 newjpos = 0;
2008 /* TARGET is nonzero if it is ok to cross jump
2009 to code before TARGET. If so, see if matches. */
2010 if (x != 0)
2011 find_cross_jump (insn, x, 2,
2012 &newjpos, &newlpos);
2013
2014 if (newjpos != 0)
2015 {
2016 do_cross_jump (insn, newjpos, newlpos);
2017 /* Make the old conditional jump
2018 into an unconditional one. */
2019 SET_SRC (PATTERN (insn))
2020 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
2021 INSN_CODE (insn) = -1;
2022 emit_barrier_after (insn);
2023 /* Add to jump_chain unless this is a new label
2024 whose UID is too large. */
2025 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2026 {
2027 jump_chain[INSN_UID (insn)]
2028 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2029 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2030 }
2031 changed = 1;
2032 next = insn;
2033 }
2034 }
2035
2036 /* Cross jumping of unconditional jumps:
2037 a few differences. */
2038
2039 if (cross_jump && simplejump_p (insn))
2040 {
2041 rtx newjpos, newlpos;
2042 rtx target;
2043
2044 newjpos = 0;
2045
2046 /* TARGET is nonzero if it is ok to cross jump
2047 to code before TARGET. If so, see if matches. */
2048 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2049 &newjpos, &newlpos);
2050
2051 /* If cannot cross jump to code before the label,
2052 see if we can cross jump to another jump to
2053 the same label. */
2054 /* Try each other jump to this label. */
2055 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2056 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2057 target != 0 && newjpos == 0;
2058 target = jump_chain[INSN_UID (target)])
2059 if (target != insn
2060 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2061 /* Ignore TARGET if it's deleted. */
2062 && ! INSN_DELETED_P (target))
2063 find_cross_jump (insn, target, 2,
2064 &newjpos, &newlpos);
2065
2066 if (newjpos != 0)
2067 {
2068 do_cross_jump (insn, newjpos, newlpos);
2069 changed = 1;
2070 next = insn;
2071 }
2072 }
2073
2074 /* This code was dead in the previous jump.c! */
2075 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2076 {
2077 /* Return insns all "jump to the same place"
2078 so we can cross-jump between any two of them. */
2079
2080 rtx newjpos, newlpos, target;
2081
2082 newjpos = 0;
2083
2084 /* If cannot cross jump to code before the label,
2085 see if we can cross jump to another jump to
2086 the same label. */
2087 /* Try each other jump to this label. */
2088 for (target = jump_chain[0];
2089 target != 0 && newjpos == 0;
2090 target = jump_chain[INSN_UID (target)])
2091 if (target != insn
2092 && ! INSN_DELETED_P (target)
2093 && GET_CODE (PATTERN (target)) == RETURN)
2094 find_cross_jump (insn, target, 2,
2095 &newjpos, &newlpos);
2096
2097 if (newjpos != 0)
2098 {
2099 do_cross_jump (insn, newjpos, newlpos);
2100 changed = 1;
2101 next = insn;
2102 }
2103 }
2104 }
2105 }
2106
2107 first = 0;
2108 }
2109
2110 /* Delete extraneous line number notes.
2111 Note that two consecutive notes for different lines are not really
2112 extraneous. There should be some indication where that line belonged,
2113 even if it became empty. */
2114
2115 {
2116 rtx last_note = 0;
2117
2118 for (insn = f; insn; insn = NEXT_INSN (insn))
2119 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2120 {
2121 /* Delete this note if it is identical to previous note. */
2122 if (last_note
2123 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2124 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2125 {
2126 delete_insn (insn);
2127 continue;
2128 }
2129
2130 last_note = insn;
2131 }
2132 }
2133
2134 /* CAN_REACH_END is persistent for each function. Once set it should
2135 not be cleared. This is especially true for the case where we
2136 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2137 the front-end before compiling each function. */
2138 if (calculate_can_reach_end (last_insn, optimize != 0))
2139 can_reach_end = 1;
2140
2141 end:
2142 /* Clean up. */
2143 free (jump_chain);
2144 jump_chain = 0;
2145 }
2146 \f
2147 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2148 notes whose labels don't occur in the insn any more. Returns the
2149 largest INSN_UID found. */
2150 static int
2151 init_label_info (f)
2152 rtx f;
2153 {
2154 int largest_uid = 0;
2155 rtx insn;
2156
2157 for (insn = f; insn; insn = NEXT_INSN (insn))
2158 {
2159 if (GET_CODE (insn) == CODE_LABEL)
2160 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2161 else if (GET_CODE (insn) == JUMP_INSN)
2162 JUMP_LABEL (insn) = 0;
2163 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2164 {
2165 rtx note, next;
2166
2167 for (note = REG_NOTES (insn); note; note = next)
2168 {
2169 next = XEXP (note, 1);
2170 if (REG_NOTE_KIND (note) == REG_LABEL
2171 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2172 remove_note (insn, note);
2173 }
2174 }
2175 if (INSN_UID (insn) > largest_uid)
2176 largest_uid = INSN_UID (insn);
2177 }
2178
2179 return largest_uid;
2180 }
2181
2182 /* Delete insns following barriers, up to next label.
2183
2184 Also delete no-op jumps created by gcse. */
2185
2186 static void
2187 delete_barrier_successors (f)
2188 rtx f;
2189 {
2190 rtx insn;
2191
2192 for (insn = f; insn;)
2193 {
2194 if (GET_CODE (insn) == BARRIER)
2195 {
2196 insn = NEXT_INSN (insn);
2197
2198 never_reached_warning (insn);
2199
2200 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2201 {
2202 if (GET_CODE (insn) == NOTE
2203 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2204 insn = NEXT_INSN (insn);
2205 else
2206 insn = delete_insn (insn);
2207 }
2208 /* INSN is now the code_label. */
2209 }
2210
2211 /* Also remove (set (pc) (pc)) insns which can be created by
2212 gcse. We eliminate such insns now to avoid having them
2213 cause problems later. */
2214 else if (GET_CODE (insn) == JUMP_INSN
2215 && GET_CODE (PATTERN (insn)) == SET
2216 && SET_SRC (PATTERN (insn)) == pc_rtx
2217 && SET_DEST (PATTERN (insn)) == pc_rtx)
2218 insn = delete_insn (insn);
2219
2220 else
2221 insn = NEXT_INSN (insn);
2222 }
2223 }
2224
2225 /* Mark the label each jump jumps to.
2226 Combine consecutive labels, and count uses of labels.
2227
2228 For each label, make a chain (using `jump_chain')
2229 of all the *unconditional* jumps that jump to it;
2230 also make a chain of all returns.
2231
2232 CROSS_JUMP indicates whether we are doing cross jumping
2233 and if we are whether we will be paying attention to
2234 death notes or not. */
2235
2236 static void
2237 mark_all_labels (f, cross_jump)
2238 rtx f;
2239 int cross_jump;
2240 {
2241 rtx insn;
2242
2243 for (insn = f; insn; insn = NEXT_INSN (insn))
2244 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2245 {
2246 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
2247 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2248 {
2249 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2250 {
2251 jump_chain[INSN_UID (insn)]
2252 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2253 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2254 }
2255 if (GET_CODE (PATTERN (insn)) == RETURN)
2256 {
2257 jump_chain[INSN_UID (insn)] = jump_chain[0];
2258 jump_chain[0] = insn;
2259 }
2260 }
2261 }
2262 }
2263
2264 /* Delete all labels already not referenced.
2265 Also find and return the last insn. */
2266
2267 static rtx
2268 delete_unreferenced_labels (f)
2269 rtx f;
2270 {
2271 rtx final = NULL_RTX;
2272 rtx insn;
2273
2274 for (insn = f; insn; )
2275 {
2276 if (GET_CODE (insn) == CODE_LABEL
2277 && LABEL_NUSES (insn) == 0
2278 && LABEL_ALTERNATE_NAME (insn) == NULL)
2279 insn = delete_insn (insn);
2280 else
2281 {
2282 final = insn;
2283 insn = NEXT_INSN (insn);
2284 }
2285 }
2286
2287 return final;
2288 }
2289
2290 /* Delete various simple forms of moves which have no necessary
2291 side effect. */
2292
2293 static void
2294 delete_noop_moves (f)
2295 rtx f;
2296 {
2297 rtx insn, next;
2298
2299 for (insn = f; insn; )
2300 {
2301 next = NEXT_INSN (insn);
2302
2303 if (GET_CODE (insn) == INSN)
2304 {
2305 register rtx body = PATTERN (insn);
2306
2307 /* Combine stack_adjusts with following push_insns. */
2308 #ifdef PUSH_ROUNDING
2309 if (GET_CODE (body) == SET
2310 && SET_DEST (body) == stack_pointer_rtx
2311 && GET_CODE (SET_SRC (body)) == PLUS
2312 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2313 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2314 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2315 {
2316 rtx p;
2317 rtx stack_adjust_insn = insn;
2318 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2319 int total_pushed = 0;
2320 int pushes = 0;
2321
2322 /* Find all successive push insns. */
2323 p = insn;
2324 /* Don't convert more than three pushes;
2325 that starts adding too many displaced addresses
2326 and the whole thing starts becoming a losing
2327 proposition. */
2328 while (pushes < 3)
2329 {
2330 rtx pbody, dest;
2331 p = next_nonnote_insn (p);
2332 if (p == 0 || GET_CODE (p) != INSN)
2333 break;
2334 pbody = PATTERN (p);
2335 if (GET_CODE (pbody) != SET)
2336 break;
2337 dest = SET_DEST (pbody);
2338 /* Allow a no-op move between the adjust and the push. */
2339 if (GET_CODE (dest) == REG
2340 && GET_CODE (SET_SRC (pbody)) == REG
2341 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2342 continue;
2343 if (! (GET_CODE (dest) == MEM
2344 && GET_CODE (XEXP (dest, 0)) == POST_INC
2345 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2346 break;
2347 pushes++;
2348 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2349 > stack_adjust_amount)
2350 break;
2351 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2352 }
2353
2354 /* Discard the amount pushed from the stack adjust;
2355 maybe eliminate it entirely. */
2356 if (total_pushed >= stack_adjust_amount)
2357 {
2358 delete_computation (stack_adjust_insn);
2359 total_pushed = stack_adjust_amount;
2360 }
2361 else
2362 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2363 = GEN_INT (stack_adjust_amount - total_pushed);
2364
2365 /* Change the appropriate push insns to ordinary stores. */
2366 p = insn;
2367 while (total_pushed > 0)
2368 {
2369 rtx pbody, dest;
2370 p = next_nonnote_insn (p);
2371 if (GET_CODE (p) != INSN)
2372 break;
2373 pbody = PATTERN (p);
2374 if (GET_CODE (pbody) != SET)
2375 break;
2376 dest = SET_DEST (pbody);
2377 /* Allow a no-op move between the adjust and the push. */
2378 if (GET_CODE (dest) == REG
2379 && GET_CODE (SET_SRC (pbody)) == REG
2380 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2381 continue;
2382 if (! (GET_CODE (dest) == MEM
2383 && GET_CODE (XEXP (dest, 0)) == POST_INC
2384 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2385 break;
2386 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2387 /* If this push doesn't fully fit in the space
2388 of the stack adjust that we deleted,
2389 make another stack adjust here for what we
2390 didn't use up. There should be peepholes
2391 to recognize the resulting sequence of insns. */
2392 if (total_pushed < 0)
2393 {
2394 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2395 GEN_INT (- total_pushed)),
2396 p);
2397 break;
2398 }
2399 XEXP (dest, 0)
2400 = plus_constant (stack_pointer_rtx, total_pushed);
2401 }
2402 }
2403 #endif
2404
2405 /* Detect and delete no-op move instructions
2406 resulting from not allocating a parameter in a register. */
2407
2408 if (GET_CODE (body) == SET
2409 && (SET_DEST (body) == SET_SRC (body)
2410 || (GET_CODE (SET_DEST (body)) == MEM
2411 && GET_CODE (SET_SRC (body)) == MEM
2412 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2413 && ! (GET_CODE (SET_DEST (body)) == MEM
2414 && MEM_VOLATILE_P (SET_DEST (body)))
2415 && ! (GET_CODE (SET_SRC (body)) == MEM
2416 && MEM_VOLATILE_P (SET_SRC (body))))
2417 delete_computation (insn);
2418
2419 /* Detect and ignore no-op move instructions
2420 resulting from smart or fortuitous register allocation. */
2421
2422 else if (GET_CODE (body) == SET)
2423 {
2424 int sreg = true_regnum (SET_SRC (body));
2425 int dreg = true_regnum (SET_DEST (body));
2426
2427 if (sreg == dreg && sreg >= 0)
2428 delete_insn (insn);
2429 else if (sreg >= 0 && dreg >= 0)
2430 {
2431 rtx trial;
2432 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2433 sreg, NULL_PTR, dreg,
2434 GET_MODE (SET_SRC (body)));
2435
2436 if (tem != 0
2437 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2438 {
2439 /* DREG may have been the target of a REG_DEAD note in
2440 the insn which makes INSN redundant. If so, reorg
2441 would still think it is dead. So search for such a
2442 note and delete it if we find it. */
2443 if (! find_regno_note (insn, REG_UNUSED, dreg))
2444 for (trial = prev_nonnote_insn (insn);
2445 trial && GET_CODE (trial) != CODE_LABEL;
2446 trial = prev_nonnote_insn (trial))
2447 if (find_regno_note (trial, REG_DEAD, dreg))
2448 {
2449 remove_death (dreg, trial);
2450 break;
2451 }
2452
2453 /* Deleting insn could lose a death-note for SREG. */
2454 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2455 {
2456 /* Change this into a USE so that we won't emit
2457 code for it, but still can keep the note. */
2458 PATTERN (insn)
2459 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2460 INSN_CODE (insn) = -1;
2461 /* Remove all reg notes but the REG_DEAD one. */
2462 REG_NOTES (insn) = trial;
2463 XEXP (trial, 1) = NULL_RTX;
2464 }
2465 else
2466 delete_insn (insn);
2467 }
2468 }
2469 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2470 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2471 NULL_PTR, 0,
2472 GET_MODE (SET_DEST (body))))
2473 {
2474 /* This handles the case where we have two consecutive
2475 assignments of the same constant to pseudos that didn't
2476 get a hard reg. Each SET from the constant will be
2477 converted into a SET of the spill register and an
2478 output reload will be made following it. This produces
2479 two loads of the same constant into the same spill
2480 register. */
2481
2482 rtx in_insn = insn;
2483
2484 /* Look back for a death note for the first reg.
2485 If there is one, it is no longer accurate. */
2486 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2487 {
2488 if ((GET_CODE (in_insn) == INSN
2489 || GET_CODE (in_insn) == JUMP_INSN)
2490 && find_regno_note (in_insn, REG_DEAD, dreg))
2491 {
2492 remove_death (dreg, in_insn);
2493 break;
2494 }
2495 in_insn = PREV_INSN (in_insn);
2496 }
2497
2498 /* Delete the second load of the value. */
2499 delete_insn (insn);
2500 }
2501 }
2502 else if (GET_CODE (body) == PARALLEL)
2503 {
2504 /* If each part is a set between two identical registers or
2505 a USE or CLOBBER, delete the insn. */
2506 int i, sreg, dreg;
2507 rtx tem;
2508
2509 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2510 {
2511 tem = XVECEXP (body, 0, i);
2512 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2513 continue;
2514
2515 if (GET_CODE (tem) != SET
2516 || (sreg = true_regnum (SET_SRC (tem))) < 0
2517 || (dreg = true_regnum (SET_DEST (tem))) < 0
2518 || dreg != sreg)
2519 break;
2520 }
2521
2522 if (i < 0)
2523 delete_insn (insn);
2524 }
2525 /* Also delete insns to store bit fields if they are no-ops. */
2526 /* Not worth the hair to detect this in the big-endian case. */
2527 else if (! BYTES_BIG_ENDIAN
2528 && GET_CODE (body) == SET
2529 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2530 && XEXP (SET_DEST (body), 2) == const0_rtx
2531 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2532 && ! (GET_CODE (SET_SRC (body)) == MEM
2533 && MEM_VOLATILE_P (SET_SRC (body))))
2534 delete_insn (insn);
2535 }
2536 insn = next;
2537 }
2538 }
2539
2540 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2541 If so indicate that this function can drop off the end by returning
2542 1, else return 0.
2543
2544 CHECK_DELETED indicates whether we must check if the note being
2545 searched for has the deleted flag set.
2546
2547 DELETE_FINAL_NOTE indicates whether we should delete the note
2548 if we find it. */
2549
2550 static int
2551 calculate_can_reach_end (last, delete_final_note)
2552 rtx last;
2553 int delete_final_note;
2554 {
2555 rtx insn = last;
2556 int n_labels = 1;
2557
2558 while (insn != NULL_RTX)
2559 {
2560 int ok = 0;
2561
2562 /* One label can follow the end-note: the return label. */
2563 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2564 ok = 1;
2565 /* Ordinary insns can follow it if returning a structure. */
2566 else if (GET_CODE (insn) == INSN)
2567 ok = 1;
2568 /* If machine uses explicit RETURN insns, no epilogue,
2569 then one of them follows the note. */
2570 else if (GET_CODE (insn) == JUMP_INSN
2571 && GET_CODE (PATTERN (insn)) == RETURN)
2572 ok = 1;
2573 /* A barrier can follow the return insn. */
2574 else if (GET_CODE (insn) == BARRIER)
2575 ok = 1;
2576 /* Other kinds of notes can follow also. */
2577 else if (GET_CODE (insn) == NOTE
2578 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2579 ok = 1;
2580
2581 if (ok != 1)
2582 break;
2583
2584 insn = PREV_INSN (insn);
2585 }
2586
2587 /* See if we backed up to the appropriate type of note. */
2588 if (insn != NULL_RTX
2589 && GET_CODE (insn) == NOTE
2590 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2591 {
2592 if (delete_final_note)
2593 delete_insn (insn);
2594 return 1;
2595 }
2596
2597 return 0;
2598 }
2599
2600 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2601 jump. Assume that this unconditional jump is to the exit test code. If
2602 the code is sufficiently simple, make a copy of it before INSN,
2603 followed by a jump to the exit of the loop. Then delete the unconditional
2604 jump after INSN.
2605
2606 Return 1 if we made the change, else 0.
2607
2608 This is only safe immediately after a regscan pass because it uses the
2609 values of regno_first_uid and regno_last_uid. */
2610
2611 static int
2612 duplicate_loop_exit_test (loop_start)
2613 rtx loop_start;
2614 {
2615 rtx insn, set, reg, p, link;
2616 rtx copy = 0, first_copy = 0;
2617 int num_insns = 0;
2618 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2619 rtx lastexit;
2620 int max_reg = max_reg_num ();
2621 rtx *reg_map = 0;
2622
2623 /* Scan the exit code. We do not perform this optimization if any insn:
2624
2625 is a CALL_INSN
2626 is a CODE_LABEL
2627 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2628 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2629 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2630 is not valid.
2631
2632 We also do not do this if we find an insn with ASM_OPERANDS. While
2633 this restriction should not be necessary, copying an insn with
2634 ASM_OPERANDS can confuse asm_noperands in some cases.
2635
2636 Also, don't do this if the exit code is more than 20 insns. */
2637
2638 for (insn = exitcode;
2639 insn
2640 && ! (GET_CODE (insn) == NOTE
2641 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2642 insn = NEXT_INSN (insn))
2643 {
2644 switch (GET_CODE (insn))
2645 {
2646 case CODE_LABEL:
2647 case CALL_INSN:
2648 return 0;
2649 case NOTE:
2650 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2651 a jump immediately after the loop start that branches outside
2652 the loop but within an outer loop, near the exit test.
2653 If we copied this exit test and created a phony
2654 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2655 before the exit test look like these could be safely moved
2656 out of the loop even if they actually may be never executed.
2657 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2658
2659 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2660 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2661 return 0;
2662
2663 if (optimize < 2
2664 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2665 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2666 /* If we were to duplicate this code, we would not move
2667 the BLOCK notes, and so debugging the moved code would
2668 be difficult. Thus, we only move the code with -O2 or
2669 higher. */
2670 return 0;
2671
2672 break;
2673 case JUMP_INSN:
2674 case INSN:
2675 /* The code below would grossly mishandle REG_WAS_0 notes,
2676 so get rid of them here. */
2677 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2678 remove_note (insn, p);
2679 if (++num_insns > 20
2680 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2681 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2682 return 0;
2683 break;
2684 default:
2685 break;
2686 }
2687 }
2688
2689 /* Unless INSN is zero, we can do the optimization. */
2690 if (insn == 0)
2691 return 0;
2692
2693 lastexit = insn;
2694
2695 /* See if any insn sets a register only used in the loop exit code and
2696 not a user variable. If so, replace it with a new register. */
2697 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2698 if (GET_CODE (insn) == INSN
2699 && (set = single_set (insn)) != 0
2700 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2701 || (GET_CODE (reg) == SUBREG
2702 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2703 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2704 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2705 {
2706 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2707 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2708 break;
2709
2710 if (p != lastexit)
2711 {
2712 /* We can do the replacement. Allocate reg_map if this is the
2713 first replacement we found. */
2714 if (reg_map == 0)
2715 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
2716
2717 REG_LOOP_TEST_P (reg) = 1;
2718
2719 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2720 }
2721 }
2722
2723 /* Now copy each insn. */
2724 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2725 {
2726 switch (GET_CODE (insn))
2727 {
2728 case BARRIER:
2729 copy = emit_barrier_before (loop_start);
2730 break;
2731 case NOTE:
2732 /* Only copy line-number notes. */
2733 if (NOTE_LINE_NUMBER (insn) >= 0)
2734 {
2735 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2736 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2737 }
2738 break;
2739
2740 case INSN:
2741 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
2742 if (reg_map)
2743 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2744
2745 mark_jump_label (PATTERN (copy), copy, 0, 0);
2746
2747 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2748 make them. */
2749 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2750 if (REG_NOTE_KIND (link) != REG_LABEL)
2751 REG_NOTES (copy)
2752 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2753 XEXP (link, 0),
2754 REG_NOTES (copy)));
2755 if (reg_map && REG_NOTES (copy))
2756 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2757 break;
2758
2759 case JUMP_INSN:
2760 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)), loop_start);
2761 if (reg_map)
2762 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2763 mark_jump_label (PATTERN (copy), copy, 0, 0);
2764 if (REG_NOTES (insn))
2765 {
2766 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
2767 if (reg_map)
2768 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2769 }
2770
2771 /* If this is a simple jump, add it to the jump chain. */
2772
2773 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2774 && simplejump_p (copy))
2775 {
2776 jump_chain[INSN_UID (copy)]
2777 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2778 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2779 }
2780 break;
2781
2782 default:
2783 abort ();
2784 }
2785
2786 /* Record the first insn we copied. We need it so that we can
2787 scan the copied insns for new pseudo registers. */
2788 if (! first_copy)
2789 first_copy = copy;
2790 }
2791
2792 /* Now clean up by emitting a jump to the end label and deleting the jump
2793 at the start of the loop. */
2794 if (! copy || GET_CODE (copy) != BARRIER)
2795 {
2796 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2797 loop_start);
2798
2799 /* Record the first insn we copied. We need it so that we can
2800 scan the copied insns for new pseudo registers. This may not
2801 be strictly necessary since we should have copied at least one
2802 insn above. But I am going to be safe. */
2803 if (! first_copy)
2804 first_copy = copy;
2805
2806 mark_jump_label (PATTERN (copy), copy, 0, 0);
2807 if (INSN_UID (copy) < max_jump_chain
2808 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2809 {
2810 jump_chain[INSN_UID (copy)]
2811 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2812 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2813 }
2814 emit_barrier_before (loop_start);
2815 }
2816
2817 /* Now scan from the first insn we copied to the last insn we copied
2818 (copy) for new pseudo registers. Do this after the code to jump to
2819 the end label since that might create a new pseudo too. */
2820 reg_scan_update (first_copy, copy, max_reg);
2821
2822 /* Mark the exit code as the virtual top of the converted loop. */
2823 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2824
2825 delete_insn (next_nonnote_insn (loop_start));
2826
2827 /* Clean up. */
2828 if (reg_map)
2829 free (reg_map);
2830
2831 return 1;
2832 }
2833 \f
2834 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2835 loop-end notes between START and END out before START. Assume that
2836 END is not such a note. START may be such a note. Returns the value
2837 of the new starting insn, which may be different if the original start
2838 was such a note. */
2839
2840 rtx
2841 squeeze_notes (start, end)
2842 rtx start, end;
2843 {
2844 rtx insn;
2845 rtx next;
2846
2847 for (insn = start; insn != end; insn = next)
2848 {
2849 next = NEXT_INSN (insn);
2850 if (GET_CODE (insn) == NOTE
2851 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2852 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2853 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2854 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2855 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2856 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2857 {
2858 if (insn == start)
2859 start = next;
2860 else
2861 {
2862 rtx prev = PREV_INSN (insn);
2863 PREV_INSN (insn) = PREV_INSN (start);
2864 NEXT_INSN (insn) = start;
2865 NEXT_INSN (PREV_INSN (insn)) = insn;
2866 PREV_INSN (NEXT_INSN (insn)) = insn;
2867 NEXT_INSN (prev) = next;
2868 PREV_INSN (next) = prev;
2869 }
2870 }
2871 }
2872
2873 return start;
2874 }
2875 \f
2876 /* Compare the instructions before insn E1 with those before E2
2877 to find an opportunity for cross jumping.
2878 (This means detecting identical sequences of insns followed by
2879 jumps to the same place, or followed by a label and a jump
2880 to that label, and replacing one with a jump to the other.)
2881
2882 Assume E1 is a jump that jumps to label E2
2883 (that is not always true but it might as well be).
2884 Find the longest possible equivalent sequences
2885 and store the first insns of those sequences into *F1 and *F2.
2886 Store zero there if no equivalent preceding instructions are found.
2887
2888 We give up if we find a label in stream 1.
2889 Actually we could transfer that label into stream 2. */
2890
2891 static void
2892 find_cross_jump (e1, e2, minimum, f1, f2)
2893 rtx e1, e2;
2894 int minimum;
2895 rtx *f1, *f2;
2896 {
2897 register rtx i1 = e1, i2 = e2;
2898 register rtx p1, p2;
2899 int lose = 0;
2900
2901 rtx last1 = 0, last2 = 0;
2902 rtx afterlast1 = 0, afterlast2 = 0;
2903
2904 *f1 = 0;
2905 *f2 = 0;
2906
2907 while (1)
2908 {
2909 i1 = prev_nonnote_insn (i1);
2910
2911 i2 = PREV_INSN (i2);
2912 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2913 i2 = PREV_INSN (i2);
2914
2915 if (i1 == 0)
2916 break;
2917
2918 /* Don't allow the range of insns preceding E1 or E2
2919 to include the other (E2 or E1). */
2920 if (i2 == e1 || i1 == e2)
2921 break;
2922
2923 /* If we will get to this code by jumping, those jumps will be
2924 tensioned to go directly to the new label (before I2),
2925 so this cross-jumping won't cost extra. So reduce the minimum. */
2926 if (GET_CODE (i1) == CODE_LABEL)
2927 {
2928 --minimum;
2929 break;
2930 }
2931
2932 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2933 break;
2934
2935 /* Avoid moving insns across EH regions if either of the insns
2936 can throw. */
2937 if (flag_exceptions
2938 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2939 && !in_same_eh_region (i1, i2))
2940 break;
2941
2942 p1 = PATTERN (i1);
2943 p2 = PATTERN (i2);
2944
2945 /* If this is a CALL_INSN, compare register usage information.
2946 If we don't check this on stack register machines, the two
2947 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2948 numbers of stack registers in the same basic block.
2949 If we don't check this on machines with delay slots, a delay slot may
2950 be filled that clobbers a parameter expected by the subroutine.
2951
2952 ??? We take the simple route for now and assume that if they're
2953 equal, they were constructed identically. */
2954
2955 if (GET_CODE (i1) == CALL_INSN
2956 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2957 CALL_INSN_FUNCTION_USAGE (i2)))
2958 lose = 1;
2959
2960 #ifdef STACK_REGS
2961 /* If cross_jump_death_matters is not 0, the insn's mode
2962 indicates whether or not the insn contains any stack-like
2963 regs. */
2964
2965 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
2966 {
2967 /* If register stack conversion has already been done, then
2968 death notes must also be compared before it is certain that
2969 the two instruction streams match. */
2970
2971 rtx note;
2972 HARD_REG_SET i1_regset, i2_regset;
2973
2974 CLEAR_HARD_REG_SET (i1_regset);
2975 CLEAR_HARD_REG_SET (i2_regset);
2976
2977 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2978 if (REG_NOTE_KIND (note) == REG_DEAD
2979 && STACK_REG_P (XEXP (note, 0)))
2980 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2981
2982 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2983 if (REG_NOTE_KIND (note) == REG_DEAD
2984 && STACK_REG_P (XEXP (note, 0)))
2985 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2986
2987 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2988
2989 lose = 1;
2990
2991 done:
2992 ;
2993 }
2994 #endif
2995
2996 /* Don't allow old-style asm or volatile extended asms to be accepted
2997 for cross jumping purposes. It is conceptually correct to allow
2998 them, since cross-jumping preserves the dynamic instruction order
2999 even though it is changing the static instruction order. However,
3000 if an asm is being used to emit an assembler pseudo-op, such as
3001 the MIPS `.set reorder' pseudo-op, then the static instruction order
3002 matters and it must be preserved. */
3003 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
3004 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
3005 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
3006 lose = 1;
3007
3008 if (lose || GET_CODE (p1) != GET_CODE (p2)
3009 || ! rtx_renumbered_equal_p (p1, p2))
3010 {
3011 /* The following code helps take care of G++ cleanups. */
3012 rtx equiv1;
3013 rtx equiv2;
3014
3015 if (!lose && GET_CODE (p1) == GET_CODE (p2)
3016 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
3017 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
3018 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
3019 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
3020 /* If the equivalences are not to a constant, they may
3021 reference pseudos that no longer exist, so we can't
3022 use them. */
3023 && CONSTANT_P (XEXP (equiv1, 0))
3024 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3025 {
3026 rtx s1 = single_set (i1);
3027 rtx s2 = single_set (i2);
3028 if (s1 != 0 && s2 != 0
3029 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3030 {
3031 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3032 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3033 if (! rtx_renumbered_equal_p (p1, p2))
3034 cancel_changes (0);
3035 else if (apply_change_group ())
3036 goto win;
3037 }
3038 }
3039
3040 /* Insns fail to match; cross jumping is limited to the following
3041 insns. */
3042
3043 #ifdef HAVE_cc0
3044 /* Don't allow the insn after a compare to be shared by
3045 cross-jumping unless the compare is also shared.
3046 Here, if either of these non-matching insns is a compare,
3047 exclude the following insn from possible cross-jumping. */
3048 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3049 last1 = afterlast1, last2 = afterlast2, ++minimum;
3050 #endif
3051
3052 /* If cross-jumping here will feed a jump-around-jump
3053 optimization, this jump won't cost extra, so reduce
3054 the minimum. */
3055 if (GET_CODE (i1) == JUMP_INSN
3056 && JUMP_LABEL (i1)
3057 && prev_real_insn (JUMP_LABEL (i1)) == e1)
3058 --minimum;
3059 break;
3060 }
3061
3062 win:
3063 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3064 {
3065 /* Ok, this insn is potentially includable in a cross-jump here. */
3066 afterlast1 = last1, afterlast2 = last2;
3067 last1 = i1, last2 = i2, --minimum;
3068 }
3069 }
3070
3071 if (minimum <= 0 && last1 != 0 && last1 != e1)
3072 *f1 = last1, *f2 = last2;
3073 }
3074
3075 static void
3076 do_cross_jump (insn, newjpos, newlpos)
3077 rtx insn, newjpos, newlpos;
3078 {
3079 /* Find an existing label at this point
3080 or make a new one if there is none. */
3081 register rtx label = get_label_before (newlpos);
3082
3083 /* Make the same jump insn jump to the new point. */
3084 if (GET_CODE (PATTERN (insn)) == RETURN)
3085 {
3086 /* Remove from jump chain of returns. */
3087 delete_from_jump_chain (insn);
3088 /* Change the insn. */
3089 PATTERN (insn) = gen_jump (label);
3090 INSN_CODE (insn) = -1;
3091 JUMP_LABEL (insn) = label;
3092 LABEL_NUSES (label)++;
3093 /* Add to new the jump chain. */
3094 if (INSN_UID (label) < max_jump_chain
3095 && INSN_UID (insn) < max_jump_chain)
3096 {
3097 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3098 jump_chain[INSN_UID (label)] = insn;
3099 }
3100 }
3101 else
3102 redirect_jump (insn, label);
3103
3104 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3105 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3106 the NEWJPOS stream. */
3107
3108 while (newjpos != insn)
3109 {
3110 rtx lnote;
3111
3112 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3113 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3114 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3115 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3116 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3117 remove_note (newlpos, lnote);
3118
3119 delete_insn (newjpos);
3120 newjpos = next_real_insn (newjpos);
3121 newlpos = next_real_insn (newlpos);
3122 }
3123 }
3124 \f
3125 /* Return the label before INSN, or put a new label there. */
3126
3127 rtx
3128 get_label_before (insn)
3129 rtx insn;
3130 {
3131 rtx label;
3132
3133 /* Find an existing label at this point
3134 or make a new one if there is none. */
3135 label = prev_nonnote_insn (insn);
3136
3137 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3138 {
3139 rtx prev = PREV_INSN (insn);
3140
3141 label = gen_label_rtx ();
3142 emit_label_after (label, prev);
3143 LABEL_NUSES (label) = 0;
3144 }
3145 return label;
3146 }
3147
3148 /* Return the label after INSN, or put a new label there. */
3149
3150 rtx
3151 get_label_after (insn)
3152 rtx insn;
3153 {
3154 rtx label;
3155
3156 /* Find an existing label at this point
3157 or make a new one if there is none. */
3158 label = next_nonnote_insn (insn);
3159
3160 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3161 {
3162 label = gen_label_rtx ();
3163 emit_label_after (label, insn);
3164 LABEL_NUSES (label) = 0;
3165 }
3166 return label;
3167 }
3168 \f
3169 /* Return 1 if INSN is a jump that jumps to right after TARGET
3170 only on the condition that TARGET itself would drop through.
3171 Assumes that TARGET is a conditional jump. */
3172
3173 static int
3174 jump_back_p (insn, target)
3175 rtx insn, target;
3176 {
3177 rtx cinsn, ctarget;
3178 enum rtx_code codei, codet;
3179
3180 if (simplejump_p (insn) || ! condjump_p (insn)
3181 || simplejump_p (target)
3182 || target != prev_real_insn (JUMP_LABEL (insn)))
3183 return 0;
3184
3185 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3186 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3187
3188 codei = GET_CODE (cinsn);
3189 codet = GET_CODE (ctarget);
3190
3191 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3192 {
3193 if (! can_reverse_comparison_p (cinsn, insn))
3194 return 0;
3195 codei = reverse_condition (codei);
3196 }
3197
3198 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3199 {
3200 if (! can_reverse_comparison_p (ctarget, target))
3201 return 0;
3202 codet = reverse_condition (codet);
3203 }
3204
3205 return (codei == codet
3206 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3207 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3208 }
3209 \f
3210 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3211 return non-zero if it is safe to reverse this comparison. It is if our
3212 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3213 this is known to be an integer comparison. */
3214
3215 int
3216 can_reverse_comparison_p (comparison, insn)
3217 rtx comparison;
3218 rtx insn;
3219 {
3220 rtx arg0;
3221
3222 /* If this is not actually a comparison, we can't reverse it. */
3223 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3224 return 0;
3225
3226 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3227 /* If this is an NE comparison, it is safe to reverse it to an EQ
3228 comparison and vice versa, even for floating point. If no operands
3229 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3230 always false and NE is always true, so the reversal is also valid. */
3231 || flag_fast_math
3232 || GET_CODE (comparison) == NE
3233 || GET_CODE (comparison) == EQ)
3234 return 1;
3235
3236 arg0 = XEXP (comparison, 0);
3237
3238 /* Make sure ARG0 is one of the actual objects being compared. If we
3239 can't do this, we can't be sure the comparison can be reversed.
3240
3241 Handle cc0 and a MODE_CC register. */
3242 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3243 #ifdef HAVE_cc0
3244 || arg0 == cc0_rtx
3245 #endif
3246 )
3247 {
3248 rtx prev = prev_nonnote_insn (insn);
3249 rtx set;
3250
3251 /* First see if the condition code mode alone if enough to say we can
3252 reverse the condition. If not, then search backwards for a set of
3253 ARG0. We do not need to check for an insn clobbering it since valid
3254 code will contain set a set with no intervening clobber. But
3255 stop when we reach a label. */
3256 #ifdef REVERSIBLE_CC_MODE
3257 if (GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC
3258 && REVERSIBLE_CC_MODE (GET_MODE (arg0)))
3259 return 1;
3260 #endif
3261
3262 for (prev = prev_nonnote_insn (insn);
3263 prev != 0 && GET_CODE (prev) != CODE_LABEL;
3264 prev = prev_nonnote_insn (prev))
3265 if ((set = single_set (prev)) != 0
3266 && rtx_equal_p (SET_DEST (set), arg0))
3267 {
3268 arg0 = SET_SRC (set);
3269
3270 if (GET_CODE (arg0) == COMPARE)
3271 arg0 = XEXP (arg0, 0);
3272 break;
3273 }
3274 }
3275
3276 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3277 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3278 return (GET_CODE (arg0) == CONST_INT
3279 || (GET_MODE (arg0) != VOIDmode
3280 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3281 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3282 }
3283
3284 /* Given an rtx-code for a comparison, return the code for the negated
3285 comparison. If no such code exists, return UNKNOWN.
3286
3287 WATCH OUT! reverse_condition is not safe to use on a jump that might
3288 be acting on the results of an IEEE floating point comparison, because
3289 of the special treatment of non-signaling nans in comparisons.
3290 Use can_reverse_comparison_p to be sure. */
3291
3292 enum rtx_code
3293 reverse_condition (code)
3294 enum rtx_code code;
3295 {
3296 switch (code)
3297 {
3298 case EQ:
3299 return NE;
3300 case NE:
3301 return EQ;
3302 case GT:
3303 return LE;
3304 case GE:
3305 return LT;
3306 case LT:
3307 return GE;
3308 case LE:
3309 return GT;
3310 case GTU:
3311 return LEU;
3312 case GEU:
3313 return LTU;
3314 case LTU:
3315 return GEU;
3316 case LEU:
3317 return GTU;
3318 case UNORDERED:
3319 return ORDERED;
3320 case ORDERED:
3321 return UNORDERED;
3322
3323 case UNLT:
3324 case UNLE:
3325 case UNGT:
3326 case UNGE:
3327 case UNEQ:
3328 case LTGT:
3329 return UNKNOWN;
3330
3331 default:
3332 abort ();
3333 }
3334 }
3335
3336 /* Similar, but we're allowed to generate unordered comparisons, which
3337 makes it safe for IEEE floating-point. Of course, we have to recognize
3338 that the target will support them too... */
3339
3340 enum rtx_code
3341 reverse_condition_maybe_unordered (code)
3342 enum rtx_code code;
3343 {
3344 /* Non-IEEE formats don't have unordered conditions. */
3345 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
3346 return reverse_condition (code);
3347
3348 switch (code)
3349 {
3350 case EQ:
3351 return NE;
3352 case NE:
3353 return EQ;
3354 case GT:
3355 return UNLE;
3356 case GE:
3357 return UNLT;
3358 case LT:
3359 return UNGE;
3360 case LE:
3361 return UNGT;
3362 case LTGT:
3363 return UNEQ;
3364 case GTU:
3365 return LEU;
3366 case GEU:
3367 return LTU;
3368 case LTU:
3369 return GEU;
3370 case LEU:
3371 return GTU;
3372 case UNORDERED:
3373 return ORDERED;
3374 case ORDERED:
3375 return UNORDERED;
3376 case UNLT:
3377 return GE;
3378 case UNLE:
3379 return GT;
3380 case UNGT:
3381 return LE;
3382 case UNGE:
3383 return LT;
3384 case UNEQ:
3385 return LTGT;
3386
3387 default:
3388 abort ();
3389 }
3390 }
3391
3392 /* Similar, but return the code when two operands of a comparison are swapped.
3393 This IS safe for IEEE floating-point. */
3394
3395 enum rtx_code
3396 swap_condition (code)
3397 enum rtx_code code;
3398 {
3399 switch (code)
3400 {
3401 case EQ:
3402 case NE:
3403 case UNORDERED:
3404 case ORDERED:
3405 case UNEQ:
3406 case LTGT:
3407 return code;
3408
3409 case GT:
3410 return LT;
3411 case GE:
3412 return LE;
3413 case LT:
3414 return GT;
3415 case LE:
3416 return GE;
3417 case GTU:
3418 return LTU;
3419 case GEU:
3420 return LEU;
3421 case LTU:
3422 return GTU;
3423 case LEU:
3424 return GEU;
3425 case UNLT:
3426 return UNGT;
3427 case UNLE:
3428 return UNGE;
3429 case UNGT:
3430 return UNLT;
3431 case UNGE:
3432 return UNLE;
3433
3434 default:
3435 abort ();
3436 }
3437 }
3438
3439 /* Given a comparison CODE, return the corresponding unsigned comparison.
3440 If CODE is an equality comparison or already an unsigned comparison,
3441 CODE is returned. */
3442
3443 enum rtx_code
3444 unsigned_condition (code)
3445 enum rtx_code code;
3446 {
3447 switch (code)
3448 {
3449 case EQ:
3450 case NE:
3451 case GTU:
3452 case GEU:
3453 case LTU:
3454 case LEU:
3455 return code;
3456
3457 case GT:
3458 return GTU;
3459 case GE:
3460 return GEU;
3461 case LT:
3462 return LTU;
3463 case LE:
3464 return LEU;
3465
3466 default:
3467 abort ();
3468 }
3469 }
3470
3471 /* Similarly, return the signed version of a comparison. */
3472
3473 enum rtx_code
3474 signed_condition (code)
3475 enum rtx_code code;
3476 {
3477 switch (code)
3478 {
3479 case EQ:
3480 case NE:
3481 case GT:
3482 case GE:
3483 case LT:
3484 case LE:
3485 return code;
3486
3487 case GTU:
3488 return GT;
3489 case GEU:
3490 return GE;
3491 case LTU:
3492 return LT;
3493 case LEU:
3494 return LE;
3495
3496 default:
3497 abort ();
3498 }
3499 }
3500 \f
3501 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3502 truth of CODE1 implies the truth of CODE2. */
3503
3504 int
3505 comparison_dominates_p (code1, code2)
3506 enum rtx_code code1, code2;
3507 {
3508 if (code1 == code2)
3509 return 1;
3510
3511 switch (code1)
3512 {
3513 case EQ:
3514 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
3515 || code2 == ORDERED)
3516 return 1;
3517 break;
3518
3519 case LT:
3520 if (code2 == LE || code2 == NE || code2 == ORDERED)
3521 return 1;
3522 break;
3523
3524 case GT:
3525 if (code2 == GE || code2 == NE || code2 == ORDERED)
3526 return 1;
3527 break;
3528
3529 case GE:
3530 case LE:
3531 if (code2 == ORDERED)
3532 return 1;
3533 break;
3534
3535 case LTGT:
3536 if (code2 == NE || code2 == ORDERED)
3537 return 1;
3538 break;
3539
3540 case LTU:
3541 if (code2 == LEU || code2 == NE)
3542 return 1;
3543 break;
3544
3545 case GTU:
3546 if (code2 == GEU || code2 == NE)
3547 return 1;
3548 break;
3549
3550 case UNORDERED:
3551 if (code2 == NE)
3552 return 1;
3553 break;
3554
3555 default:
3556 break;
3557 }
3558
3559 return 0;
3560 }
3561 \f
3562 /* Return 1 if INSN is an unconditional jump and nothing else. */
3563
3564 int
3565 simplejump_p (insn)
3566 rtx insn;
3567 {
3568 return (GET_CODE (insn) == JUMP_INSN
3569 && GET_CODE (PATTERN (insn)) == SET
3570 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3571 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3572 }
3573
3574 /* Return nonzero if INSN is a (possibly) conditional jump
3575 and nothing more. */
3576
3577 int
3578 condjump_p (insn)
3579 rtx insn;
3580 {
3581 register rtx x = PATTERN (insn);
3582
3583 if (GET_CODE (x) != SET
3584 || GET_CODE (SET_DEST (x)) != PC)
3585 return 0;
3586
3587 x = SET_SRC (x);
3588 if (GET_CODE (x) == LABEL_REF)
3589 return 1;
3590 else return (GET_CODE (x) == IF_THEN_ELSE
3591 && ((GET_CODE (XEXP (x, 2)) == PC
3592 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
3593 || GET_CODE (XEXP (x, 1)) == RETURN))
3594 || (GET_CODE (XEXP (x, 1)) == PC
3595 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
3596 || GET_CODE (XEXP (x, 2)) == RETURN))));
3597
3598 return 0;
3599 }
3600
3601 /* Return nonzero if INSN is a (possibly) conditional jump inside a
3602 PARALLEL. */
3603
3604 int
3605 condjump_in_parallel_p (insn)
3606 rtx insn;
3607 {
3608 register rtx x = PATTERN (insn);
3609
3610 if (GET_CODE (x) != PARALLEL)
3611 return 0;
3612 else
3613 x = XVECEXP (x, 0, 0);
3614
3615 if (GET_CODE (x) != SET)
3616 return 0;
3617 if (GET_CODE (SET_DEST (x)) != PC)
3618 return 0;
3619 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3620 return 1;
3621 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3622 return 0;
3623 if (XEXP (SET_SRC (x), 2) == pc_rtx
3624 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3625 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3626 return 1;
3627 if (XEXP (SET_SRC (x), 1) == pc_rtx
3628 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3629 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3630 return 1;
3631 return 0;
3632 }
3633
3634 /* Return the label of a conditional jump. */
3635
3636 rtx
3637 condjump_label (insn)
3638 rtx insn;
3639 {
3640 register rtx x = PATTERN (insn);
3641
3642 if (GET_CODE (x) == PARALLEL)
3643 x = XVECEXP (x, 0, 0);
3644 if (GET_CODE (x) != SET)
3645 return NULL_RTX;
3646 if (GET_CODE (SET_DEST (x)) != PC)
3647 return NULL_RTX;
3648 x = SET_SRC (x);
3649 if (GET_CODE (x) == LABEL_REF)
3650 return x;
3651 if (GET_CODE (x) != IF_THEN_ELSE)
3652 return NULL_RTX;
3653 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3654 return XEXP (x, 1);
3655 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3656 return XEXP (x, 2);
3657 return NULL_RTX;
3658 }
3659
3660 /* Return true if INSN is a (possibly conditional) return insn. */
3661
3662 static int
3663 returnjump_p_1 (loc, data)
3664 rtx *loc;
3665 void *data ATTRIBUTE_UNUSED;
3666 {
3667 rtx x = *loc;
3668 return x && GET_CODE (x) == RETURN;
3669 }
3670
3671 int
3672 returnjump_p (insn)
3673 rtx insn;
3674 {
3675 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3676 }
3677
3678 /* Return true if INSN is a jump that only transfers control and
3679 nothing more. */
3680
3681 int
3682 onlyjump_p (insn)
3683 rtx insn;
3684 {
3685 rtx set;
3686
3687 if (GET_CODE (insn) != JUMP_INSN)
3688 return 0;
3689
3690 set = single_set (insn);
3691 if (set == NULL)
3692 return 0;
3693 if (GET_CODE (SET_DEST (set)) != PC)
3694 return 0;
3695 if (side_effects_p (SET_SRC (set)))
3696 return 0;
3697
3698 return 1;
3699 }
3700
3701 #ifdef HAVE_cc0
3702
3703 /* Return 1 if X is an RTX that does nothing but set the condition codes
3704 and CLOBBER or USE registers.
3705 Return -1 if X does explicitly set the condition codes,
3706 but also does other things. */
3707
3708 int
3709 sets_cc0_p (x)
3710 rtx x ATTRIBUTE_UNUSED;
3711 {
3712 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3713 return 1;
3714 if (GET_CODE (x) == PARALLEL)
3715 {
3716 int i;
3717 int sets_cc0 = 0;
3718 int other_things = 0;
3719 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3720 {
3721 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3722 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3723 sets_cc0 = 1;
3724 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3725 other_things = 1;
3726 }
3727 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3728 }
3729 return 0;
3730 }
3731 #endif
3732 \f
3733 /* Follow any unconditional jump at LABEL;
3734 return the ultimate label reached by any such chain of jumps.
3735 If LABEL is not followed by a jump, return LABEL.
3736 If the chain loops or we can't find end, return LABEL,
3737 since that tells caller to avoid changing the insn.
3738
3739 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3740 a USE or CLOBBER. */
3741
3742 rtx
3743 follow_jumps (label)
3744 rtx label;
3745 {
3746 register rtx insn;
3747 register rtx next;
3748 register rtx value = label;
3749 register int depth;
3750
3751 for (depth = 0;
3752 (depth < 10
3753 && (insn = next_active_insn (value)) != 0
3754 && GET_CODE (insn) == JUMP_INSN
3755 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3756 || GET_CODE (PATTERN (insn)) == RETURN)
3757 && (next = NEXT_INSN (insn))
3758 && GET_CODE (next) == BARRIER);
3759 depth++)
3760 {
3761 /* Don't chain through the insn that jumps into a loop
3762 from outside the loop,
3763 since that would create multiple loop entry jumps
3764 and prevent loop optimization. */
3765 rtx tem;
3766 if (!reload_completed)
3767 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3768 if (GET_CODE (tem) == NOTE
3769 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3770 /* ??? Optional. Disables some optimizations, but makes
3771 gcov output more accurate with -O. */
3772 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3773 return value;
3774
3775 /* If we have found a cycle, make the insn jump to itself. */
3776 if (JUMP_LABEL (insn) == label)
3777 return label;
3778
3779 tem = next_active_insn (JUMP_LABEL (insn));
3780 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3781 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3782 break;
3783
3784 value = JUMP_LABEL (insn);
3785 }
3786 if (depth == 10)
3787 return label;
3788 return value;
3789 }
3790
3791 /* Assuming that field IDX of X is a vector of label_refs,
3792 replace each of them by the ultimate label reached by it.
3793 Return nonzero if a change is made.
3794 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3795
3796 static int
3797 tension_vector_labels (x, idx)
3798 register rtx x;
3799 register int idx;
3800 {
3801 int changed = 0;
3802 register int i;
3803 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3804 {
3805 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3806 register rtx nlabel = follow_jumps (olabel);
3807 if (nlabel && nlabel != olabel)
3808 {
3809 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3810 ++LABEL_NUSES (nlabel);
3811 if (--LABEL_NUSES (olabel) == 0)
3812 delete_insn (olabel);
3813 changed = 1;
3814 }
3815 }
3816 return changed;
3817 }
3818 \f
3819 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3820 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3821 in INSN, then store one of them in JUMP_LABEL (INSN).
3822 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3823 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3824 Also, when there are consecutive labels, canonicalize on the last of them.
3825
3826 Note that two labels separated by a loop-beginning note
3827 must be kept distinct if we have not yet done loop-optimization,
3828 because the gap between them is where loop-optimize
3829 will want to move invariant code to. CROSS_JUMP tells us
3830 that loop-optimization is done with.
3831
3832 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3833 two labels distinct if they are separated by only USE or CLOBBER insns. */
3834
3835 static void
3836 mark_jump_label (x, insn, cross_jump, in_mem)
3837 register rtx x;
3838 rtx insn;
3839 int cross_jump;
3840 int in_mem;
3841 {
3842 register RTX_CODE code = GET_CODE (x);
3843 register int i;
3844 register const char *fmt;
3845
3846 switch (code)
3847 {
3848 case PC:
3849 case CC0:
3850 case REG:
3851 case SUBREG:
3852 case CONST_INT:
3853 case CONST_DOUBLE:
3854 case CLOBBER:
3855 case CALL:
3856 return;
3857
3858 case MEM:
3859 in_mem = 1;
3860 break;
3861
3862 case SYMBOL_REF:
3863 if (!in_mem)
3864 return;
3865
3866 /* If this is a constant-pool reference, see if it is a label. */
3867 if (CONSTANT_POOL_ADDRESS_P (x))
3868 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
3869 break;
3870
3871 case LABEL_REF:
3872 {
3873 rtx label = XEXP (x, 0);
3874 rtx olabel = label;
3875 rtx note;
3876 rtx next;
3877
3878 if (GET_CODE (label) != CODE_LABEL)
3879 abort ();
3880
3881 /* Ignore references to labels of containing functions. */
3882 if (LABEL_REF_NONLOCAL_P (x))
3883 break;
3884
3885 /* If there are other labels following this one,
3886 replace it with the last of the consecutive labels. */
3887 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3888 {
3889 if (GET_CODE (next) == CODE_LABEL)
3890 label = next;
3891 else if (cross_jump && GET_CODE (next) == INSN
3892 && (GET_CODE (PATTERN (next)) == USE
3893 || GET_CODE (PATTERN (next)) == CLOBBER))
3894 continue;
3895 else if (GET_CODE (next) != NOTE)
3896 break;
3897 else if (! cross_jump
3898 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3899 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3900 /* ??? Optional. Disables some optimizations, but
3901 makes gcov output more accurate with -O. */
3902 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3903 break;
3904 }
3905
3906 XEXP (x, 0) = label;
3907 if (! insn || ! INSN_DELETED_P (insn))
3908 ++LABEL_NUSES (label);
3909
3910 if (insn)
3911 {
3912 if (GET_CODE (insn) == JUMP_INSN)
3913 JUMP_LABEL (insn) = label;
3914
3915 /* If we've changed OLABEL and we had a REG_LABEL note
3916 for it, update it as well. */
3917 else if (label != olabel
3918 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3919 XEXP (note, 0) = label;
3920
3921 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3922 is one. */
3923 else if (! find_reg_note (insn, REG_LABEL, label))
3924 {
3925 /* This code used to ignore labels which refered to dispatch
3926 tables to avoid flow.c generating worse code.
3927
3928 However, in the presense of global optimizations like
3929 gcse which call find_basic_blocks without calling
3930 life_analysis, not recording such labels will lead
3931 to compiler aborts because of inconsistencies in the
3932 flow graph. So we go ahead and record the label.
3933
3934 It may also be the case that the optimization argument
3935 is no longer valid because of the more accurate cfg
3936 we build in find_basic_blocks -- it no longer pessimizes
3937 code when it finds a REG_LABEL note. */
3938 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3939 REG_NOTES (insn));
3940 }
3941 }
3942 return;
3943 }
3944
3945 /* Do walk the labels in a vector, but not the first operand of an
3946 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3947 case ADDR_VEC:
3948 case ADDR_DIFF_VEC:
3949 if (! INSN_DELETED_P (insn))
3950 {
3951 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3952
3953 for (i = 0; i < XVECLEN (x, eltnum); i++)
3954 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
3955 cross_jump, in_mem);
3956 }
3957 return;
3958
3959 default:
3960 break;
3961 }
3962
3963 fmt = GET_RTX_FORMAT (code);
3964 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3965 {
3966 if (fmt[i] == 'e')
3967 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
3968 else if (fmt[i] == 'E')
3969 {
3970 register int j;
3971 for (j = 0; j < XVECLEN (x, i); j++)
3972 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
3973 }
3974 }
3975 }
3976
3977 /* If all INSN does is set the pc, delete it,
3978 and delete the insn that set the condition codes for it
3979 if that's what the previous thing was. */
3980
3981 void
3982 delete_jump (insn)
3983 rtx insn;
3984 {
3985 register rtx set = single_set (insn);
3986
3987 if (set && GET_CODE (SET_DEST (set)) == PC)
3988 delete_computation (insn);
3989 }
3990
3991 /* Verify INSN is a BARRIER and delete it. */
3992
3993 void
3994 delete_barrier (insn)
3995 rtx insn;
3996 {
3997 if (GET_CODE (insn) != BARRIER)
3998 abort ();
3999
4000 delete_insn (insn);
4001 }
4002
4003 /* Recursively delete prior insns that compute the value (used only by INSN
4004 which the caller is deleting) stored in the register mentioned by NOTE
4005 which is a REG_DEAD note associated with INSN. */
4006
4007 static void
4008 delete_prior_computation (note, insn)
4009 rtx note;
4010 rtx insn;
4011 {
4012 rtx our_prev;
4013 rtx reg = XEXP (note, 0);
4014
4015 for (our_prev = prev_nonnote_insn (insn);
4016 our_prev && (GET_CODE (our_prev) == INSN
4017 || GET_CODE (our_prev) == CALL_INSN);
4018 our_prev = prev_nonnote_insn (our_prev))
4019 {
4020 rtx pat = PATTERN (our_prev);
4021
4022 /* If we reach a CALL which is not calling a const function
4023 or the callee pops the arguments, then give up. */
4024 if (GET_CODE (our_prev) == CALL_INSN
4025 && (! CONST_CALL_P (our_prev)
4026 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
4027 break;
4028
4029 /* If we reach a SEQUENCE, it is too complex to try to
4030 do anything with it, so give up. */
4031 if (GET_CODE (pat) == SEQUENCE)
4032 break;
4033
4034 if (GET_CODE (pat) == USE
4035 && GET_CODE (XEXP (pat, 0)) == INSN)
4036 /* reorg creates USEs that look like this. We leave them
4037 alone because reorg needs them for its own purposes. */
4038 break;
4039
4040 if (reg_set_p (reg, pat))
4041 {
4042 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
4043 break;
4044
4045 if (GET_CODE (pat) == PARALLEL)
4046 {
4047 /* If we find a SET of something else, we can't
4048 delete the insn. */
4049
4050 int i;
4051
4052 for (i = 0; i < XVECLEN (pat, 0); i++)
4053 {
4054 rtx part = XVECEXP (pat, 0, i);
4055
4056 if (GET_CODE (part) == SET
4057 && SET_DEST (part) != reg)
4058 break;
4059 }
4060
4061 if (i == XVECLEN (pat, 0))
4062 delete_computation (our_prev);
4063 }
4064 else if (GET_CODE (pat) == SET
4065 && GET_CODE (SET_DEST (pat)) == REG)
4066 {
4067 int dest_regno = REGNO (SET_DEST (pat));
4068 int dest_endregno
4069 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4070 ? HARD_REGNO_NREGS (dest_regno,
4071 GET_MODE (SET_DEST (pat))) : 1);
4072 int regno = REGNO (reg);
4073 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
4074 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
4075
4076 if (dest_regno >= regno
4077 && dest_endregno <= endregno)
4078 delete_computation (our_prev);
4079
4080 /* We may have a multi-word hard register and some, but not
4081 all, of the words of the register are needed in subsequent
4082 insns. Write REG_UNUSED notes for those parts that were not
4083 needed. */
4084 else if (dest_regno <= regno
4085 && dest_endregno >= endregno)
4086 {
4087 int i;
4088
4089 REG_NOTES (our_prev)
4090 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
4091
4092 for (i = dest_regno; i < dest_endregno; i++)
4093 if (! find_regno_note (our_prev, REG_UNUSED, i))
4094 break;
4095
4096 if (i == dest_endregno)
4097 delete_computation (our_prev);
4098 }
4099 }
4100
4101 break;
4102 }
4103
4104 /* If PAT references the register that dies here, it is an
4105 additional use. Hence any prior SET isn't dead. However, this
4106 insn becomes the new place for the REG_DEAD note. */
4107 if (reg_overlap_mentioned_p (reg, pat))
4108 {
4109 XEXP (note, 1) = REG_NOTES (our_prev);
4110 REG_NOTES (our_prev) = note;
4111 break;
4112 }
4113 }
4114 }
4115
4116 /* Delete INSN and recursively delete insns that compute values used only
4117 by INSN. This uses the REG_DEAD notes computed during flow analysis.
4118 If we are running before flow.c, we need do nothing since flow.c will
4119 delete dead code. We also can't know if the registers being used are
4120 dead or not at this point.
4121
4122 Otherwise, look at all our REG_DEAD notes. If a previous insn does
4123 nothing other than set a register that dies in this insn, we can delete
4124 that insn as well.
4125
4126 On machines with CC0, if CC0 is used in this insn, we may be able to
4127 delete the insn that set it. */
4128
4129 static void
4130 delete_computation (insn)
4131 rtx insn;
4132 {
4133 rtx note, next;
4134 rtx set;
4135
4136 #ifdef HAVE_cc0
4137 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
4138 {
4139 rtx prev = prev_nonnote_insn (insn);
4140 /* We assume that at this stage
4141 CC's are always set explicitly
4142 and always immediately before the jump that
4143 will use them. So if the previous insn
4144 exists to set the CC's, delete it
4145 (unless it performs auto-increments, etc.). */
4146 if (prev && GET_CODE (prev) == INSN
4147 && sets_cc0_p (PATTERN (prev)))
4148 {
4149 if (sets_cc0_p (PATTERN (prev)) > 0
4150 && ! side_effects_p (PATTERN (prev)))
4151 delete_computation (prev);
4152 else
4153 /* Otherwise, show that cc0 won't be used. */
4154 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
4155 cc0_rtx, REG_NOTES (prev));
4156 }
4157 }
4158 #endif
4159
4160 #ifdef INSN_SCHEDULING
4161 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4162 reload has completed. The schedulers need to be fixed. Until
4163 they are, we must not rely on the death notes here. */
4164 if (reload_completed && flag_schedule_insns_after_reload)
4165 {
4166 delete_insn (insn);
4167 return;
4168 }
4169 #endif
4170
4171 /* The REG_DEAD note may have been omitted for a register
4172 which is both set and used by the insn. */
4173 set = single_set (insn);
4174 if (set && GET_CODE (SET_DEST (set)) == REG)
4175 {
4176 int dest_regno = REGNO (SET_DEST (set));
4177 int dest_endregno
4178 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4179 ? HARD_REGNO_NREGS (dest_regno,
4180 GET_MODE (SET_DEST (set))) : 1);
4181 int i;
4182
4183 for (i = dest_regno; i < dest_endregno; i++)
4184 {
4185 if (! refers_to_regno_p (i, i + 1, SET_SRC (set), NULL_PTR)
4186 || find_regno_note (insn, REG_DEAD, i))
4187 continue;
4188
4189 note = gen_rtx_EXPR_LIST (REG_DEAD, (i < FIRST_PSEUDO_REGISTER
4190 ? gen_rtx_REG (reg_raw_mode[i], i)
4191 : SET_DEST (set)), NULL_RTX);
4192 delete_prior_computation (note, insn);
4193 }
4194 }
4195
4196 for (note = REG_NOTES (insn); note; note = next)
4197 {
4198 next = XEXP (note, 1);
4199
4200 if (REG_NOTE_KIND (note) != REG_DEAD
4201 /* Verify that the REG_NOTE is legitimate. */
4202 || GET_CODE (XEXP (note, 0)) != REG)
4203 continue;
4204
4205 delete_prior_computation (note, insn);
4206 }
4207
4208 delete_insn (insn);
4209 }
4210 \f
4211 /* Delete insn INSN from the chain of insns and update label ref counts.
4212 May delete some following insns as a consequence; may even delete
4213 a label elsewhere and insns that follow it.
4214
4215 Returns the first insn after INSN that was not deleted. */
4216
4217 rtx
4218 delete_insn (insn)
4219 register rtx insn;
4220 {
4221 register rtx next = NEXT_INSN (insn);
4222 register rtx prev = PREV_INSN (insn);
4223 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4224 register int dont_really_delete = 0;
4225
4226 while (next && INSN_DELETED_P (next))
4227 next = NEXT_INSN (next);
4228
4229 /* This insn is already deleted => return first following nondeleted. */
4230 if (INSN_DELETED_P (insn))
4231 return next;
4232
4233 if (was_code_label)
4234 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4235
4236 /* Don't delete user-declared labels. When optimizing, convert them
4237 to special NOTEs instead. When not optimizing, leave them alone. */
4238 if (was_code_label && LABEL_NAME (insn) != 0)
4239 {
4240 if (! optimize)
4241 dont_really_delete = 1;
4242 else if (! dont_really_delete)
4243 {
4244 PUT_CODE (insn, NOTE);
4245 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4246 NOTE_SOURCE_FILE (insn) = 0;
4247 dont_really_delete = 1;
4248 }
4249 }
4250 else
4251 /* Mark this insn as deleted. */
4252 INSN_DELETED_P (insn) = 1;
4253
4254 /* If this is an unconditional jump, delete it from the jump chain. */
4255 if (simplejump_p (insn))
4256 delete_from_jump_chain (insn);
4257
4258 /* If instruction is followed by a barrier,
4259 delete the barrier too. */
4260
4261 if (next != 0 && GET_CODE (next) == BARRIER)
4262 {
4263 INSN_DELETED_P (next) = 1;
4264 next = NEXT_INSN (next);
4265 }
4266
4267 /* Patch out INSN (and the barrier if any) */
4268
4269 if (! dont_really_delete)
4270 {
4271 if (prev)
4272 {
4273 NEXT_INSN (prev) = next;
4274 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4275 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4276 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4277 }
4278
4279 if (next)
4280 {
4281 PREV_INSN (next) = prev;
4282 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4283 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4284 }
4285
4286 if (prev && NEXT_INSN (prev) == 0)
4287 set_last_insn (prev);
4288 }
4289
4290 /* If deleting a jump, decrement the count of the label,
4291 and delete the label if it is now unused. */
4292
4293 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4294 {
4295 rtx lab = JUMP_LABEL (insn), lab_next;
4296
4297 if (--LABEL_NUSES (lab) == 0)
4298 {
4299 /* This can delete NEXT or PREV,
4300 either directly if NEXT is JUMP_LABEL (INSN),
4301 or indirectly through more levels of jumps. */
4302 delete_insn (lab);
4303
4304 /* I feel a little doubtful about this loop,
4305 but I see no clean and sure alternative way
4306 to find the first insn after INSN that is not now deleted.
4307 I hope this works. */
4308 while (next && INSN_DELETED_P (next))
4309 next = NEXT_INSN (next);
4310 return next;
4311 }
4312 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4313 && GET_CODE (lab_next) == JUMP_INSN
4314 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4315 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4316 {
4317 /* If we're deleting the tablejump, delete the dispatch table.
4318 We may not be able to kill the label immediately preceeding
4319 just yet, as it might be referenced in code leading up to
4320 the tablejump. */
4321 delete_insn (lab_next);
4322 }
4323 }
4324
4325 /* Likewise if we're deleting a dispatch table. */
4326
4327 if (GET_CODE (insn) == JUMP_INSN
4328 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4329 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4330 {
4331 rtx pat = PATTERN (insn);
4332 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4333 int len = XVECLEN (pat, diff_vec_p);
4334
4335 for (i = 0; i < len; i++)
4336 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4337 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4338 while (next && INSN_DELETED_P (next))
4339 next = NEXT_INSN (next);
4340 return next;
4341 }
4342
4343 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4344 prev = PREV_INSN (prev);
4345
4346 /* If INSN was a label and a dispatch table follows it,
4347 delete the dispatch table. The tablejump must have gone already.
4348 It isn't useful to fall through into a table. */
4349
4350 if (was_code_label
4351 && NEXT_INSN (insn) != 0
4352 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4353 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4354 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4355 next = delete_insn (NEXT_INSN (insn));
4356
4357 /* If INSN was a label, delete insns following it if now unreachable. */
4358
4359 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4360 {
4361 register RTX_CODE code;
4362 while (next != 0
4363 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4364 || code == NOTE || code == BARRIER
4365 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4366 {
4367 if (code == NOTE
4368 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4369 next = NEXT_INSN (next);
4370 /* Keep going past other deleted labels to delete what follows. */
4371 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4372 next = NEXT_INSN (next);
4373 else
4374 /* Note: if this deletes a jump, it can cause more
4375 deletion of unreachable code, after a different label.
4376 As long as the value from this recursive call is correct,
4377 this invocation functions correctly. */
4378 next = delete_insn (next);
4379 }
4380 }
4381
4382 return next;
4383 }
4384
4385 /* Advance from INSN till reaching something not deleted
4386 then return that. May return INSN itself. */
4387
4388 rtx
4389 next_nondeleted_insn (insn)
4390 rtx insn;
4391 {
4392 while (INSN_DELETED_P (insn))
4393 insn = NEXT_INSN (insn);
4394 return insn;
4395 }
4396 \f
4397 /* Delete a range of insns from FROM to TO, inclusive.
4398 This is for the sake of peephole optimization, so assume
4399 that whatever these insns do will still be done by a new
4400 peephole insn that will replace them. */
4401
4402 void
4403 delete_for_peephole (from, to)
4404 register rtx from, to;
4405 {
4406 register rtx insn = from;
4407
4408 while (1)
4409 {
4410 register rtx next = NEXT_INSN (insn);
4411 register rtx prev = PREV_INSN (insn);
4412
4413 if (GET_CODE (insn) != NOTE)
4414 {
4415 INSN_DELETED_P (insn) = 1;
4416
4417 /* Patch this insn out of the chain. */
4418 /* We don't do this all at once, because we
4419 must preserve all NOTEs. */
4420 if (prev)
4421 NEXT_INSN (prev) = next;
4422
4423 if (next)
4424 PREV_INSN (next) = prev;
4425 }
4426
4427 if (insn == to)
4428 break;
4429 insn = next;
4430 }
4431
4432 /* Note that if TO is an unconditional jump
4433 we *do not* delete the BARRIER that follows,
4434 since the peephole that replaces this sequence
4435 is also an unconditional jump in that case. */
4436 }
4437 \f
4438 /* We have determined that INSN is never reached, and are about to
4439 delete it. Print a warning if the user asked for one.
4440
4441 To try to make this warning more useful, this should only be called
4442 once per basic block not reached, and it only warns when the basic
4443 block contains more than one line from the current function, and
4444 contains at least one operation. CSE and inlining can duplicate insns,
4445 so it's possible to get spurious warnings from this. */
4446
4447 void
4448 never_reached_warning (avoided_insn)
4449 rtx avoided_insn;
4450 {
4451 rtx insn;
4452 rtx a_line_note = NULL;
4453 int two_avoided_lines = 0;
4454 int contains_insn = 0;
4455
4456 if (! warn_notreached)
4457 return;
4458
4459 /* Scan forwards, looking at LINE_NUMBER notes, until
4460 we hit a LABEL or we run out of insns. */
4461
4462 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
4463 {
4464 if (GET_CODE (insn) == CODE_LABEL)
4465 break;
4466 else if (GET_CODE (insn) == NOTE /* A line number note? */
4467 && NOTE_LINE_NUMBER (insn) >= 0)
4468 {
4469 if (a_line_note == NULL)
4470 a_line_note = insn;
4471 else
4472 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
4473 != NOTE_LINE_NUMBER (insn));
4474 }
4475 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4476 contains_insn = 1;
4477 }
4478 if (two_avoided_lines && contains_insn)
4479 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
4480 NOTE_LINE_NUMBER (a_line_note),
4481 "will never be executed");
4482 }
4483 \f
4484 /* Invert the condition of the jump JUMP, and make it jump
4485 to label NLABEL instead of where it jumps now. */
4486
4487 int
4488 invert_jump (jump, nlabel)
4489 rtx jump, nlabel;
4490 {
4491 /* We have to either invert the condition and change the label or
4492 do neither. Either operation could fail. We first try to invert
4493 the jump. If that succeeds, we try changing the label. If that fails,
4494 we invert the jump back to what it was. */
4495
4496 if (! invert_exp (PATTERN (jump), jump))
4497 return 0;
4498
4499 if (redirect_jump (jump, nlabel))
4500 {
4501 if (flag_branch_probabilities)
4502 {
4503 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4504
4505 /* An inverted jump means that a probability taken becomes a
4506 probability not taken. Subtract the branch probability from the
4507 probability base to convert it back to a taken probability.
4508 (We don't flip the probability on a branch that's never taken. */
4509 if (note && XINT (XEXP (note, 0), 0) >= 0)
4510 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4511 }
4512
4513 return 1;
4514 }
4515
4516 if (! invert_exp (PATTERN (jump), jump))
4517 /* This should just be putting it back the way it was. */
4518 abort ();
4519
4520 return 0;
4521 }
4522
4523 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4524
4525 Return 1 if we can do so, 0 if we cannot find a way to do so that
4526 matches a pattern. */
4527
4528 int
4529 invert_exp (x, insn)
4530 rtx x;
4531 rtx insn;
4532 {
4533 register RTX_CODE code;
4534 register int i;
4535 register const char *fmt;
4536
4537 code = GET_CODE (x);
4538
4539 if (code == IF_THEN_ELSE)
4540 {
4541 register rtx comp = XEXP (x, 0);
4542 register rtx tem;
4543
4544 /* We can do this in two ways: The preferable way, which can only
4545 be done if this is not an integer comparison, is to reverse
4546 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4547 of the IF_THEN_ELSE. If we can't do either, fail. */
4548
4549 if (can_reverse_comparison_p (comp, insn)
4550 && validate_change (insn, &XEXP (x, 0),
4551 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4552 GET_MODE (comp), XEXP (comp, 0),
4553 XEXP (comp, 1)), 0))
4554 return 1;
4555
4556 tem = XEXP (x, 1);
4557 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4558 validate_change (insn, &XEXP (x, 2), tem, 1);
4559 return apply_change_group ();
4560 }
4561
4562 fmt = GET_RTX_FORMAT (code);
4563 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4564 {
4565 if (fmt[i] == 'e')
4566 {
4567 if (! invert_exp (XEXP (x, i), insn))
4568 return 0;
4569 }
4570 else if (fmt[i] == 'E')
4571 {
4572 register int j;
4573 for (j = 0; j < XVECLEN (x, i); j++)
4574 if (!invert_exp (XVECEXP (x, i, j), insn))
4575 return 0;
4576 }
4577 }
4578
4579 return 1;
4580 }
4581 \f
4582 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4583 If the old jump target label is unused as a result,
4584 it and the code following it may be deleted.
4585
4586 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4587 RETURN insn.
4588
4589 The return value will be 1 if the change was made, 0 if it wasn't (this
4590 can only occur for NLABEL == 0). */
4591
4592 int
4593 redirect_jump (jump, nlabel)
4594 rtx jump, nlabel;
4595 {
4596 register rtx olabel = JUMP_LABEL (jump);
4597
4598 if (nlabel == olabel)
4599 return 1;
4600
4601 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4602 return 0;
4603
4604 /* If this is an unconditional branch, delete it from the jump_chain of
4605 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4606 have UID's in range and JUMP_CHAIN is valid). */
4607 if (jump_chain && (simplejump_p (jump)
4608 || GET_CODE (PATTERN (jump)) == RETURN))
4609 {
4610 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4611
4612 delete_from_jump_chain (jump);
4613 if (label_index < max_jump_chain
4614 && INSN_UID (jump) < max_jump_chain)
4615 {
4616 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4617 jump_chain[label_index] = jump;
4618 }
4619 }
4620
4621 JUMP_LABEL (jump) = nlabel;
4622 if (nlabel)
4623 ++LABEL_NUSES (nlabel);
4624
4625 /* If we're eliding the jump over exception cleanups at the end of a
4626 function, move the function end note so that -Wreturn-type works. */
4627 if (olabel && NEXT_INSN (olabel)
4628 && GET_CODE (NEXT_INSN (olabel)) == NOTE
4629 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
4630 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
4631
4632 if (olabel && --LABEL_NUSES (olabel) == 0)
4633 delete_insn (olabel);
4634
4635 return 1;
4636 }
4637
4638 /* Delete the instruction JUMP from any jump chain it might be on. */
4639
4640 static void
4641 delete_from_jump_chain (jump)
4642 rtx jump;
4643 {
4644 int index;
4645 rtx olabel = JUMP_LABEL (jump);
4646
4647 /* Handle unconditional jumps. */
4648 if (jump_chain && olabel != 0
4649 && INSN_UID (olabel) < max_jump_chain
4650 && simplejump_p (jump))
4651 index = INSN_UID (olabel);
4652 /* Handle return insns. */
4653 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4654 index = 0;
4655 else return;
4656
4657 if (jump_chain[index] == jump)
4658 jump_chain[index] = jump_chain[INSN_UID (jump)];
4659 else
4660 {
4661 rtx insn;
4662
4663 for (insn = jump_chain[index];
4664 insn != 0;
4665 insn = jump_chain[INSN_UID (insn)])
4666 if (jump_chain[INSN_UID (insn)] == jump)
4667 {
4668 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4669 break;
4670 }
4671 }
4672 }
4673
4674 /* If NLABEL is nonzero, throughout the rtx at LOC,
4675 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4676 zero, alter (RETURN) to (LABEL_REF NLABEL).
4677
4678 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4679 validity with validate_change. Convert (set (pc) (label_ref olabel))
4680 to (return).
4681
4682 Return 0 if we found a change we would like to make but it is invalid.
4683 Otherwise, return 1. */
4684
4685 int
4686 redirect_exp (loc, olabel, nlabel, insn)
4687 rtx *loc;
4688 rtx olabel, nlabel;
4689 rtx insn;
4690 {
4691 register rtx x = *loc;
4692 register RTX_CODE code = GET_CODE (x);
4693 register int i;
4694 register const char *fmt;
4695
4696 if (code == LABEL_REF)
4697 {
4698 if (XEXP (x, 0) == olabel)
4699 {
4700 if (nlabel)
4701 XEXP (x, 0) = nlabel;
4702 else
4703 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4704 return 1;
4705 }
4706 }
4707 else if (code == RETURN && olabel == 0)
4708 {
4709 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4710 if (loc == &PATTERN (insn))
4711 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4712 return validate_change (insn, loc, x, 0);
4713 }
4714
4715 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4716 && GET_CODE (SET_SRC (x)) == LABEL_REF
4717 && XEXP (SET_SRC (x), 0) == olabel)
4718 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4719
4720 fmt = GET_RTX_FORMAT (code);
4721 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4722 {
4723 if (fmt[i] == 'e')
4724 {
4725 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4726 return 0;
4727 }
4728 else if (fmt[i] == 'E')
4729 {
4730 register int j;
4731 for (j = 0; j < XVECLEN (x, i); j++)
4732 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4733 return 0;
4734 }
4735 }
4736
4737 return 1;
4738 }
4739 \f
4740 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4741
4742 If the old jump target label (before the dispatch table) becomes unused,
4743 it and the dispatch table may be deleted. In that case, find the insn
4744 before the jump references that label and delete it and logical successors
4745 too. */
4746
4747 static void
4748 redirect_tablejump (jump, nlabel)
4749 rtx jump, nlabel;
4750 {
4751 register rtx olabel = JUMP_LABEL (jump);
4752
4753 /* Add this jump to the jump_chain of NLABEL. */
4754 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4755 && INSN_UID (jump) < max_jump_chain)
4756 {
4757 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4758 jump_chain[INSN_UID (nlabel)] = jump;
4759 }
4760
4761 PATTERN (jump) = gen_jump (nlabel);
4762 JUMP_LABEL (jump) = nlabel;
4763 ++LABEL_NUSES (nlabel);
4764 INSN_CODE (jump) = -1;
4765
4766 if (--LABEL_NUSES (olabel) == 0)
4767 {
4768 delete_labelref_insn (jump, olabel, 0);
4769 delete_insn (olabel);
4770 }
4771 }
4772
4773 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4774 If we found one, delete it and then delete this insn if DELETE_THIS is
4775 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4776
4777 static int
4778 delete_labelref_insn (insn, label, delete_this)
4779 rtx insn, label;
4780 int delete_this;
4781 {
4782 int deleted = 0;
4783 rtx link;
4784
4785 if (GET_CODE (insn) != NOTE
4786 && reg_mentioned_p (label, PATTERN (insn)))
4787 {
4788 if (delete_this)
4789 {
4790 delete_insn (insn);
4791 deleted = 1;
4792 }
4793 else
4794 return 1;
4795 }
4796
4797 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4798 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4799 {
4800 if (delete_this)
4801 {
4802 delete_insn (insn);
4803 deleted = 1;
4804 }
4805 else
4806 return 1;
4807 }
4808
4809 return deleted;
4810 }
4811 \f
4812 /* Like rtx_equal_p except that it considers two REGs as equal
4813 if they renumber to the same value and considers two commutative
4814 operations to be the same if the order of the operands has been
4815 reversed.
4816
4817 ??? Addition is not commutative on the PA due to the weird implicit
4818 space register selection rules for memory addresses. Therefore, we
4819 don't consider a + b == b + a.
4820
4821 We could/should make this test a little tighter. Possibly only
4822 disabling it on the PA via some backend macro or only disabling this
4823 case when the PLUS is inside a MEM. */
4824
4825 int
4826 rtx_renumbered_equal_p (x, y)
4827 rtx x, y;
4828 {
4829 register int i;
4830 register RTX_CODE code = GET_CODE (x);
4831 register const char *fmt;
4832
4833 if (x == y)
4834 return 1;
4835
4836 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4837 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4838 && GET_CODE (SUBREG_REG (y)) == REG)))
4839 {
4840 int reg_x = -1, reg_y = -1;
4841 int word_x = 0, word_y = 0;
4842
4843 if (GET_MODE (x) != GET_MODE (y))
4844 return 0;
4845
4846 /* If we haven't done any renumbering, don't
4847 make any assumptions. */
4848 if (reg_renumber == 0)
4849 return rtx_equal_p (x, y);
4850
4851 if (code == SUBREG)
4852 {
4853 reg_x = REGNO (SUBREG_REG (x));
4854 word_x = SUBREG_WORD (x);
4855
4856 if (reg_renumber[reg_x] >= 0)
4857 {
4858 reg_x = reg_renumber[reg_x] + word_x;
4859 word_x = 0;
4860 }
4861 }
4862
4863 else
4864 {
4865 reg_x = REGNO (x);
4866 if (reg_renumber[reg_x] >= 0)
4867 reg_x = reg_renumber[reg_x];
4868 }
4869
4870 if (GET_CODE (y) == SUBREG)
4871 {
4872 reg_y = REGNO (SUBREG_REG (y));
4873 word_y = SUBREG_WORD (y);
4874
4875 if (reg_renumber[reg_y] >= 0)
4876 {
4877 reg_y = reg_renumber[reg_y];
4878 word_y = 0;
4879 }
4880 }
4881
4882 else
4883 {
4884 reg_y = REGNO (y);
4885 if (reg_renumber[reg_y] >= 0)
4886 reg_y = reg_renumber[reg_y];
4887 }
4888
4889 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4890 }
4891
4892 /* Now we have disposed of all the cases
4893 in which different rtx codes can match. */
4894 if (code != GET_CODE (y))
4895 return 0;
4896
4897 switch (code)
4898 {
4899 case PC:
4900 case CC0:
4901 case ADDR_VEC:
4902 case ADDR_DIFF_VEC:
4903 return 0;
4904
4905 case CONST_INT:
4906 return INTVAL (x) == INTVAL (y);
4907
4908 case LABEL_REF:
4909 /* We can't assume nonlocal labels have their following insns yet. */
4910 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4911 return XEXP (x, 0) == XEXP (y, 0);
4912
4913 /* Two label-refs are equivalent if they point at labels
4914 in the same position in the instruction stream. */
4915 return (next_real_insn (XEXP (x, 0))
4916 == next_real_insn (XEXP (y, 0)));
4917
4918 case SYMBOL_REF:
4919 return XSTR (x, 0) == XSTR (y, 0);
4920
4921 case CODE_LABEL:
4922 /* If we didn't match EQ equality above, they aren't the same. */
4923 return 0;
4924
4925 default:
4926 break;
4927 }
4928
4929 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4930
4931 if (GET_MODE (x) != GET_MODE (y))
4932 return 0;
4933
4934 /* For commutative operations, the RTX match if the operand match in any
4935 order. Also handle the simple binary and unary cases without a loop.
4936
4937 ??? Don't consider PLUS a commutative operator; see comments above. */
4938 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4939 && code != PLUS)
4940 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4941 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4942 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4943 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4944 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4945 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4946 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4947 else if (GET_RTX_CLASS (code) == '1')
4948 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4949
4950 /* Compare the elements. If any pair of corresponding elements
4951 fail to match, return 0 for the whole things. */
4952
4953 fmt = GET_RTX_FORMAT (code);
4954 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4955 {
4956 register int j;
4957 switch (fmt[i])
4958 {
4959 case 'w':
4960 if (XWINT (x, i) != XWINT (y, i))
4961 return 0;
4962 break;
4963
4964 case 'i':
4965 if (XINT (x, i) != XINT (y, i))
4966 return 0;
4967 break;
4968
4969 case 's':
4970 if (strcmp (XSTR (x, i), XSTR (y, i)))
4971 return 0;
4972 break;
4973
4974 case 'e':
4975 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4976 return 0;
4977 break;
4978
4979 case 'u':
4980 if (XEXP (x, i) != XEXP (y, i))
4981 return 0;
4982 /* fall through. */
4983 case '0':
4984 break;
4985
4986 case 'E':
4987 if (XVECLEN (x, i) != XVECLEN (y, i))
4988 return 0;
4989 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4990 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4991 return 0;
4992 break;
4993
4994 default:
4995 abort ();
4996 }
4997 }
4998 return 1;
4999 }
5000 \f
5001 /* If X is a hard register or equivalent to one or a subregister of one,
5002 return the hard register number. If X is a pseudo register that was not
5003 assigned a hard register, return the pseudo register number. Otherwise,
5004 return -1. Any rtx is valid for X. */
5005
5006 int
5007 true_regnum (x)
5008 rtx x;
5009 {
5010 if (GET_CODE (x) == REG)
5011 {
5012 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
5013 return reg_renumber[REGNO (x)];
5014 return REGNO (x);
5015 }
5016 if (GET_CODE (x) == SUBREG)
5017 {
5018 int base = true_regnum (SUBREG_REG (x));
5019 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
5020 return SUBREG_WORD (x) + base;
5021 }
5022 return -1;
5023 }
5024 \f
5025 /* Optimize code of the form:
5026
5027 for (x = a[i]; x; ...)
5028 ...
5029 for (x = a[i]; x; ...)
5030 ...
5031 foo:
5032
5033 Loop optimize will change the above code into
5034
5035 if (x = a[i])
5036 for (;;)
5037 { ...; if (! (x = ...)) break; }
5038 if (x = a[i])
5039 for (;;)
5040 { ...; if (! (x = ...)) break; }
5041 foo:
5042
5043 In general, if the first test fails, the program can branch
5044 directly to `foo' and skip the second try which is doomed to fail.
5045 We run this after loop optimization and before flow analysis. */
5046
5047 /* When comparing the insn patterns, we track the fact that different
5048 pseudo-register numbers may have been used in each computation.
5049 The following array stores an equivalence -- same_regs[I] == J means
5050 that pseudo register I was used in the first set of tests in a context
5051 where J was used in the second set. We also count the number of such
5052 pending equivalences. If nonzero, the expressions really aren't the
5053 same. */
5054
5055 static int *same_regs;
5056
5057 static int num_same_regs;
5058
5059 /* Track any registers modified between the target of the first jump and
5060 the second jump. They never compare equal. */
5061
5062 static char *modified_regs;
5063
5064 /* Record if memory was modified. */
5065
5066 static int modified_mem;
5067
5068 /* Called via note_stores on each insn between the target of the first
5069 branch and the second branch. It marks any changed registers. */
5070
5071 static void
5072 mark_modified_reg (dest, x, data)
5073 rtx dest;
5074 rtx x ATTRIBUTE_UNUSED;
5075 void *data ATTRIBUTE_UNUSED;
5076 {
5077 int regno, i;
5078
5079 if (GET_CODE (dest) == SUBREG)
5080 dest = SUBREG_REG (dest);
5081
5082 if (GET_CODE (dest) == MEM)
5083 modified_mem = 1;
5084
5085 if (GET_CODE (dest) != REG)
5086 return;
5087
5088 regno = REGNO (dest);
5089 if (regno >= FIRST_PSEUDO_REGISTER)
5090 modified_regs[regno] = 1;
5091 else
5092 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
5093 modified_regs[regno + i] = 1;
5094 }
5095
5096 /* F is the first insn in the chain of insns. */
5097
5098 void
5099 thread_jumps (f, max_reg, flag_before_loop)
5100 rtx f;
5101 int max_reg;
5102 int flag_before_loop;
5103 {
5104 /* Basic algorithm is to find a conditional branch,
5105 the label it may branch to, and the branch after
5106 that label. If the two branches test the same condition,
5107 walk back from both branch paths until the insn patterns
5108 differ, or code labels are hit. If we make it back to
5109 the target of the first branch, then we know that the first branch
5110 will either always succeed or always fail depending on the relative
5111 senses of the two branches. So adjust the first branch accordingly
5112 in this case. */
5113
5114 rtx label, b1, b2, t1, t2;
5115 enum rtx_code code1, code2;
5116 rtx b1op0, b1op1, b2op0, b2op1;
5117 int changed = 1;
5118 int i;
5119 int *all_reset;
5120
5121 /* Allocate register tables and quick-reset table. */
5122 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
5123 same_regs = (int *) xmalloc (max_reg * sizeof (int));
5124 all_reset = (int *) xmalloc (max_reg * sizeof (int));
5125 for (i = 0; i < max_reg; i++)
5126 all_reset[i] = -1;
5127
5128 while (changed)
5129 {
5130 changed = 0;
5131
5132 for (b1 = f; b1; b1 = NEXT_INSN (b1))
5133 {
5134 /* Get to a candidate branch insn. */
5135 if (GET_CODE (b1) != JUMP_INSN
5136 || ! condjump_p (b1) || simplejump_p (b1)
5137 || JUMP_LABEL (b1) == 0)
5138 continue;
5139
5140 bzero (modified_regs, max_reg * sizeof (char));
5141 modified_mem = 0;
5142
5143 bcopy ((char *) all_reset, (char *) same_regs,
5144 max_reg * sizeof (int));
5145 num_same_regs = 0;
5146
5147 label = JUMP_LABEL (b1);
5148
5149 /* Look for a branch after the target. Record any registers and
5150 memory modified between the target and the branch. Stop when we
5151 get to a label since we can't know what was changed there. */
5152 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
5153 {
5154 if (GET_CODE (b2) == CODE_LABEL)
5155 break;
5156
5157 else if (GET_CODE (b2) == JUMP_INSN)
5158 {
5159 /* If this is an unconditional jump and is the only use of
5160 its target label, we can follow it. */
5161 if (simplejump_p (b2)
5162 && JUMP_LABEL (b2) != 0
5163 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
5164 {
5165 b2 = JUMP_LABEL (b2);
5166 continue;
5167 }
5168 else
5169 break;
5170 }
5171
5172 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
5173 continue;
5174
5175 if (GET_CODE (b2) == CALL_INSN)
5176 {
5177 modified_mem = 1;
5178 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5179 if (call_used_regs[i] && ! fixed_regs[i]
5180 && i != STACK_POINTER_REGNUM
5181 && i != FRAME_POINTER_REGNUM
5182 && i != HARD_FRAME_POINTER_REGNUM
5183 && i != ARG_POINTER_REGNUM)
5184 modified_regs[i] = 1;
5185 }
5186
5187 note_stores (PATTERN (b2), mark_modified_reg, NULL);
5188 }
5189
5190 /* Check the next candidate branch insn from the label
5191 of the first. */
5192 if (b2 == 0
5193 || GET_CODE (b2) != JUMP_INSN
5194 || b2 == b1
5195 || ! condjump_p (b2)
5196 || simplejump_p (b2))
5197 continue;
5198
5199 /* Get the comparison codes and operands, reversing the
5200 codes if appropriate. If we don't have comparison codes,
5201 we can't do anything. */
5202 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
5203 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
5204 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
5205 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
5206 code1 = reverse_condition (code1);
5207
5208 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
5209 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
5210 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
5211 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
5212 code2 = reverse_condition (code2);
5213
5214 /* If they test the same things and knowing that B1 branches
5215 tells us whether or not B2 branches, check if we
5216 can thread the branch. */
5217 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
5218 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
5219 && (comparison_dominates_p (code1, code2)
5220 || (can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
5221 0),
5222 b1)
5223 && comparison_dominates_p (code1, reverse_condition (code2)))))
5224
5225 {
5226 t1 = prev_nonnote_insn (b1);
5227 t2 = prev_nonnote_insn (b2);
5228
5229 while (t1 != 0 && t2 != 0)
5230 {
5231 if (t2 == label)
5232 {
5233 /* We have reached the target of the first branch.
5234 If there are no pending register equivalents,
5235 we know that this branch will either always
5236 succeed (if the senses of the two branches are
5237 the same) or always fail (if not). */
5238 rtx new_label;
5239
5240 if (num_same_regs != 0)
5241 break;
5242
5243 if (comparison_dominates_p (code1, code2))
5244 new_label = JUMP_LABEL (b2);
5245 else
5246 new_label = get_label_after (b2);
5247
5248 if (JUMP_LABEL (b1) != new_label)
5249 {
5250 rtx prev = PREV_INSN (new_label);
5251
5252 if (flag_before_loop
5253 && GET_CODE (prev) == NOTE
5254 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5255 {
5256 /* Don't thread to the loop label. If a loop
5257 label is reused, loop optimization will
5258 be disabled for that loop. */
5259 new_label = gen_label_rtx ();
5260 emit_label_after (new_label, PREV_INSN (prev));
5261 }
5262 changed |= redirect_jump (b1, new_label);
5263 }
5264 break;
5265 }
5266
5267 /* If either of these is not a normal insn (it might be
5268 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5269 have already been skipped above.) Similarly, fail
5270 if the insns are different. */
5271 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5272 || recog_memoized (t1) != recog_memoized (t2)
5273 || ! rtx_equal_for_thread_p (PATTERN (t1),
5274 PATTERN (t2), t2))
5275 break;
5276
5277 t1 = prev_nonnote_insn (t1);
5278 t2 = prev_nonnote_insn (t2);
5279 }
5280 }
5281 }
5282 }
5283
5284 /* Clean up. */
5285 free (modified_regs);
5286 free (same_regs);
5287 free (all_reset);
5288 }
5289 \f
5290 /* This is like RTX_EQUAL_P except that it knows about our handling of
5291 possibly equivalent registers and knows to consider volatile and
5292 modified objects as not equal.
5293
5294 YINSN is the insn containing Y. */
5295
5296 int
5297 rtx_equal_for_thread_p (x, y, yinsn)
5298 rtx x, y;
5299 rtx yinsn;
5300 {
5301 register int i;
5302 register int j;
5303 register enum rtx_code code;
5304 register const char *fmt;
5305
5306 code = GET_CODE (x);
5307 /* Rtx's of different codes cannot be equal. */
5308 if (code != GET_CODE (y))
5309 return 0;
5310
5311 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5312 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5313
5314 if (GET_MODE (x) != GET_MODE (y))
5315 return 0;
5316
5317 /* For floating-point, consider everything unequal. This is a bit
5318 pessimistic, but this pass would only rarely do anything for FP
5319 anyway. */
5320 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5321 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5322 return 0;
5323
5324 /* For commutative operations, the RTX match if the operand match in any
5325 order. Also handle the simple binary and unary cases without a loop. */
5326 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5327 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5328 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5329 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5330 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5331 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5332 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5333 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5334 else if (GET_RTX_CLASS (code) == '1')
5335 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5336
5337 /* Handle special-cases first. */
5338 switch (code)
5339 {
5340 case REG:
5341 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5342 return 1;
5343
5344 /* If neither is user variable or hard register, check for possible
5345 equivalence. */
5346 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5347 || REGNO (x) < FIRST_PSEUDO_REGISTER
5348 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5349 return 0;
5350
5351 if (same_regs[REGNO (x)] == -1)
5352 {
5353 same_regs[REGNO (x)] = REGNO (y);
5354 num_same_regs++;
5355
5356 /* If this is the first time we are seeing a register on the `Y'
5357 side, see if it is the last use. If not, we can't thread the
5358 jump, so mark it as not equivalent. */
5359 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5360 return 0;
5361
5362 return 1;
5363 }
5364 else
5365 return (same_regs[REGNO (x)] == REGNO (y));
5366
5367 break;
5368
5369 case MEM:
5370 /* If memory modified or either volatile, not equivalent.
5371 Else, check address. */
5372 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5373 return 0;
5374
5375 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5376
5377 case ASM_INPUT:
5378 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5379 return 0;
5380
5381 break;
5382
5383 case SET:
5384 /* Cancel a pending `same_regs' if setting equivalenced registers.
5385 Then process source. */
5386 if (GET_CODE (SET_DEST (x)) == REG
5387 && GET_CODE (SET_DEST (y)) == REG)
5388 {
5389 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
5390 {
5391 same_regs[REGNO (SET_DEST (x))] = -1;
5392 num_same_regs--;
5393 }
5394 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5395 return 0;
5396 }
5397 else
5398 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5399 return 0;
5400
5401 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5402
5403 case LABEL_REF:
5404 return XEXP (x, 0) == XEXP (y, 0);
5405
5406 case SYMBOL_REF:
5407 return XSTR (x, 0) == XSTR (y, 0);
5408
5409 default:
5410 break;
5411 }
5412
5413 if (x == y)
5414 return 1;
5415
5416 fmt = GET_RTX_FORMAT (code);
5417 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5418 {
5419 switch (fmt[i])
5420 {
5421 case 'w':
5422 if (XWINT (x, i) != XWINT (y, i))
5423 return 0;
5424 break;
5425
5426 case 'n':
5427 case 'i':
5428 if (XINT (x, i) != XINT (y, i))
5429 return 0;
5430 break;
5431
5432 case 'V':
5433 case 'E':
5434 /* Two vectors must have the same length. */
5435 if (XVECLEN (x, i) != XVECLEN (y, i))
5436 return 0;
5437
5438 /* And the corresponding elements must match. */
5439 for (j = 0; j < XVECLEN (x, i); j++)
5440 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5441 XVECEXP (y, i, j), yinsn) == 0)
5442 return 0;
5443 break;
5444
5445 case 'e':
5446 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5447 return 0;
5448 break;
5449
5450 case 'S':
5451 case 's':
5452 if (strcmp (XSTR (x, i), XSTR (y, i)))
5453 return 0;
5454 break;
5455
5456 case 'u':
5457 /* These are just backpointers, so they don't matter. */
5458 break;
5459
5460 case '0':
5461 case 't':
5462 break;
5463
5464 /* It is believed that rtx's at this level will never
5465 contain anything but integers and other rtx's,
5466 except for within LABEL_REFs and SYMBOL_REFs. */
5467 default:
5468 abort ();
5469 }
5470 }
5471 return 1;
5472 }
5473 \f
5474
5475 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
5476 /* Return the insn that NEW can be safely inserted in front of starting at
5477 the jump insn INSN. Return 0 if it is not safe to do this jump
5478 optimization. Note that NEW must contain a single set. */
5479
5480 static rtx
5481 find_insert_position (insn, new)
5482 rtx insn;
5483 rtx new;
5484 {
5485 int i;
5486 rtx prev;
5487
5488 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5489 if (GET_CODE (PATTERN (new)) != PARALLEL)
5490 return insn;
5491
5492 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5493 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5494 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5495 insn))
5496 break;
5497
5498 if (i < 0)
5499 return insn;
5500
5501 /* There is a good chance that the previous insn PREV sets the thing
5502 being clobbered (often the CC in a hard reg). If PREV does not
5503 use what NEW sets, we can insert NEW before PREV. */
5504
5505 prev = prev_active_insn (insn);
5506 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5507 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5508 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5509 insn)
5510 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5511 prev))
5512 return 0;
5513
5514 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5515 }
5516 #endif /* !HAVE_cc0 */