978b2666a0e05940ac1e299a1d0dbdc45d5ebb35
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
70
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
83
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
91
92 static rtx *jump_chain;
93
94 /* Maximum index in jump_chain. */
95
96 static int max_jump_chain;
97
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
103
104 static int cross_jump_death_matters = 0;
105
106 static int init_label_info PARAMS ((rtx));
107 static void delete_barrier_successors PARAMS ((rtx));
108 static void mark_all_labels PARAMS ((rtx, int));
109 static rtx delete_unreferenced_labels PARAMS ((rtx));
110 static void delete_noop_moves PARAMS ((rtx));
111 static int duplicate_loop_exit_test PARAMS ((rtx));
112 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
113 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
114 static int jump_back_p PARAMS ((rtx, rtx));
115 static int tension_vector_labels PARAMS ((rtx, int));
116 static void delete_computation PARAMS ((rtx));
117 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
118 static int redirect_exp PARAMS ((rtx, rtx, rtx));
119 static void invert_exp_1 PARAMS ((rtx));
120 static int invert_exp PARAMS ((rtx));
121 static void delete_from_jump_chain PARAMS ((rtx));
122 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
123 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
124 static void redirect_tablejump PARAMS ((rtx, rtx));
125 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
126 static int returnjump_p_1 PARAMS ((rtx *, void *));
127 static void delete_prior_computation PARAMS ((rtx, rtx));
128 \f
129 /* Main external entry point into the jump optimizer. See comments before
130 jump_optimize_1 for descriptions of the arguments. */
131 void
132 jump_optimize (f, cross_jump, noop_moves, after_regscan)
133 rtx f;
134 int cross_jump;
135 int noop_moves;
136 int after_regscan;
137 {
138 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
139 }
140
141 /* Alternate entry into the jump optimizer. This entry point only rebuilds
142 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
143 instructions. */
144 void
145 rebuild_jump_labels (f)
146 rtx f;
147 {
148 jump_optimize_1 (f, 0, 0, 0, 1, 0);
149 }
150
151 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
152
153 void
154 jump_optimize_minimal (f)
155 rtx f;
156 {
157 jump_optimize_1 (f, 0, 0, 0, 0, 1);
158 }
159 \f
160 /* Delete no-op jumps and optimize jumps to jumps
161 and jumps around jumps.
162 Delete unused labels and unreachable code.
163
164 If CROSS_JUMP is 1, detect matching code
165 before a jump and its destination and unify them.
166 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
167
168 If NOOP_MOVES is nonzero, delete no-op move insns.
169
170 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
171 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
172
173 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
174 and JUMP_LABEL field for jumping insns.
175
176 If `optimize' is zero, don't change any code,
177 just determine whether control drops off the end of the function.
178 This case occurs when we have -W and not -O.
179 It works because `delete_insn' checks the value of `optimize'
180 and refrains from actually deleting when that is 0.
181
182 If MINIMAL is nonzero, then we only perform trivial optimizations:
183
184 * Removal of unreachable code after BARRIERs.
185 * Removal of unreferenced CODE_LABELs.
186 * Removal of a jump to the next instruction.
187 * Removal of a conditional jump followed by an unconditional jump
188 to the same target as the conditional jump.
189 * Simplify a conditional jump around an unconditional jump.
190 * Simplify a jump to a jump.
191 * Delete extraneous line number notes.
192 */
193
194 static void
195 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
196 mark_labels_only, minimal)
197 rtx f;
198 int cross_jump;
199 int noop_moves;
200 int after_regscan;
201 int mark_labels_only;
202 int minimal;
203 {
204 register rtx insn, next;
205 int changed;
206 int old_max_reg;
207 int first = 1;
208 int max_uid = 0;
209 rtx last_insn;
210 enum rtx_code reversed_code;
211
212 cross_jump_death_matters = (cross_jump == 2);
213 max_uid = init_label_info (f) + 1;
214
215 /* If we are performing cross jump optimizations, then initialize
216 tables mapping UIDs to EH regions to avoid incorrect movement
217 of insns from one EH region to another. */
218 if (flag_exceptions && cross_jump)
219 init_insn_eh_region (f, max_uid);
220
221 if (! mark_labels_only)
222 delete_barrier_successors (f);
223
224 /* Leave some extra room for labels and duplicate exit test insns
225 we make. */
226 max_jump_chain = max_uid * 14 / 10;
227 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
228
229 mark_all_labels (f, cross_jump);
230
231 /* Keep track of labels used from static data; we don't track them
232 closely enough to delete them here, so make sure their reference
233 count doesn't drop to zero. */
234
235 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
236 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
237 LABEL_NUSES (XEXP (insn, 0))++;
238
239 check_exception_handler_labels ();
240
241 /* Keep track of labels used for marking handlers for exception
242 regions; they cannot usually be deleted. */
243
244 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
245 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
246 LABEL_NUSES (XEXP (insn, 0))++;
247
248 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
249 notes and recompute LABEL_NUSES. */
250 if (mark_labels_only)
251 goto end;
252
253 if (! minimal)
254 exception_optimize ();
255
256 last_insn = delete_unreferenced_labels (f);
257
258 if (noop_moves)
259 delete_noop_moves (f);
260
261 /* If we haven't yet gotten to reload and we have just run regscan,
262 delete any insn that sets a register that isn't used elsewhere.
263 This helps some of the optimizations below by having less insns
264 being jumped around. */
265
266 if (optimize && ! reload_completed && after_regscan)
267 for (insn = f; insn; insn = next)
268 {
269 rtx set = single_set (insn);
270
271 next = NEXT_INSN (insn);
272
273 if (set && GET_CODE (SET_DEST (set)) == REG
274 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
275 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
276 /* We use regno_last_note_uid so as not to delete the setting
277 of a reg that's used in notes. A subsequent optimization
278 might arrange to use that reg for real. */
279 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
280 && ! side_effects_p (SET_SRC (set))
281 && ! find_reg_note (insn, REG_RETVAL, 0)
282 /* An ADDRESSOF expression can turn into a use of the internal arg
283 pointer, so do not delete the initialization of the internal
284 arg pointer yet. If it is truly dead, flow will delete the
285 initializing insn. */
286 && SET_DEST (set) != current_function_internal_arg_pointer)
287 delete_insn (insn);
288 }
289
290 /* Now iterate optimizing jumps until nothing changes over one pass. */
291 changed = 1;
292 old_max_reg = max_reg_num ();
293 while (changed)
294 {
295 changed = 0;
296
297 for (insn = f; insn; insn = next)
298 {
299 rtx reallabelprev;
300 rtx temp, temp1, temp2 = NULL_RTX;
301 rtx temp4 ATTRIBUTE_UNUSED;
302 rtx nlabel;
303 int this_is_any_uncondjump;
304 int this_is_any_condjump;
305 int this_is_onlyjump;
306
307 next = NEXT_INSN (insn);
308
309 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
310 jump. Try to optimize by duplicating the loop exit test if so.
311 This is only safe immediately after regscan, because it uses
312 the values of regno_first_uid and regno_last_uid. */
313 if (after_regscan && GET_CODE (insn) == NOTE
314 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
315 && (temp1 = next_nonnote_insn (insn)) != 0
316 && any_uncondjump_p (temp1)
317 && onlyjump_p (temp1))
318 {
319 temp = PREV_INSN (insn);
320 if (duplicate_loop_exit_test (insn))
321 {
322 changed = 1;
323 next = NEXT_INSN (temp);
324 continue;
325 }
326 }
327
328 if (GET_CODE (insn) != JUMP_INSN)
329 continue;
330
331 this_is_any_condjump = any_condjump_p (insn);
332 this_is_any_uncondjump = any_uncondjump_p (insn);
333 this_is_onlyjump = onlyjump_p (insn);
334
335 /* Tension the labels in dispatch tables. */
336
337 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
338 changed |= tension_vector_labels (PATTERN (insn), 0);
339 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
340 changed |= tension_vector_labels (PATTERN (insn), 1);
341
342 /* See if this jump goes to another jump and redirect if so. */
343 nlabel = follow_jumps (JUMP_LABEL (insn));
344 if (nlabel != JUMP_LABEL (insn))
345 changed |= redirect_jump (insn, nlabel, 1);
346
347 if (! optimize || minimal)
348 continue;
349
350 /* If a dispatch table always goes to the same place,
351 get rid of it and replace the insn that uses it. */
352
353 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
354 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
355 {
356 int i;
357 rtx pat = PATTERN (insn);
358 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
359 int len = XVECLEN (pat, diff_vec_p);
360 rtx dispatch = prev_real_insn (insn);
361 rtx set;
362
363 for (i = 0; i < len; i++)
364 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
365 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
366 break;
367
368 if (i == len
369 && dispatch != 0
370 && GET_CODE (dispatch) == JUMP_INSN
371 && JUMP_LABEL (dispatch) != 0
372 /* Don't mess with a casesi insn.
373 XXX according to the comment before computed_jump_p(),
374 all casesi insns should be a parallel of the jump
375 and a USE of a LABEL_REF. */
376 && ! ((set = single_set (dispatch)) != NULL
377 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
378 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
379 {
380 redirect_tablejump (dispatch,
381 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
382 changed = 1;
383 }
384 }
385
386 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
387
388 /* Detect jump to following insn. */
389 if (reallabelprev == insn
390 && (this_is_any_condjump || this_is_any_uncondjump)
391 && this_is_onlyjump)
392 {
393 next = next_real_insn (JUMP_LABEL (insn));
394 delete_jump (insn);
395
396 /* Remove the "inactive" but "real" insns (i.e. uses and
397 clobbers) in between here and there. */
398 temp = insn;
399 while ((temp = next_real_insn (temp)) != next)
400 delete_insn (temp);
401
402 changed = 1;
403 continue;
404 }
405
406 /* Detect a conditional jump going to the same place
407 as an immediately following unconditional jump. */
408 else if (this_is_any_condjump && this_is_onlyjump
409 && (temp = next_active_insn (insn)) != 0
410 && simplejump_p (temp)
411 && (next_active_insn (JUMP_LABEL (insn))
412 == next_active_insn (JUMP_LABEL (temp))))
413 {
414 /* Don't mess up test coverage analysis. */
415 temp2 = temp;
416 if (flag_test_coverage && !reload_completed)
417 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
418 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
419 break;
420
421 if (temp2 == temp)
422 {
423 delete_jump (insn);
424 changed = 1;
425 continue;
426 }
427 }
428
429 /* Detect a conditional jump jumping over an unconditional jump. */
430
431 else if (this_is_any_condjump
432 && reallabelprev != 0
433 && GET_CODE (reallabelprev) == JUMP_INSN
434 && prev_active_insn (reallabelprev) == insn
435 && no_labels_between_p (insn, reallabelprev)
436 && any_uncondjump_p (reallabelprev)
437 && onlyjump_p (reallabelprev))
438 {
439 /* When we invert the unconditional jump, we will be
440 decrementing the usage count of its old label.
441 Make sure that we don't delete it now because that
442 might cause the following code to be deleted. */
443 rtx prev_uses = prev_nonnote_insn (reallabelprev);
444 rtx prev_label = JUMP_LABEL (insn);
445
446 if (prev_label)
447 ++LABEL_NUSES (prev_label);
448
449 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
450 {
451 /* It is very likely that if there are USE insns before
452 this jump, they hold REG_DEAD notes. These REG_DEAD
453 notes are no longer valid due to this optimization,
454 and will cause the life-analysis that following passes
455 (notably delayed-branch scheduling) to think that
456 these registers are dead when they are not.
457
458 To prevent this trouble, we just remove the USE insns
459 from the insn chain. */
460
461 while (prev_uses && GET_CODE (prev_uses) == INSN
462 && GET_CODE (PATTERN (prev_uses)) == USE)
463 {
464 rtx useless = prev_uses;
465 prev_uses = prev_nonnote_insn (prev_uses);
466 delete_insn (useless);
467 }
468
469 delete_insn (reallabelprev);
470 changed = 1;
471 }
472
473 /* We can now safely delete the label if it is unreferenced
474 since the delete_insn above has deleted the BARRIER. */
475 if (prev_label && --LABEL_NUSES (prev_label) == 0)
476 delete_insn (prev_label);
477
478 next = NEXT_INSN (insn);
479 }
480
481 /* If we have an unconditional jump preceded by a USE, try to put
482 the USE before the target and jump there. This simplifies many
483 of the optimizations below since we don't have to worry about
484 dealing with these USE insns. We only do this if the label
485 being branch to already has the identical USE or if code
486 never falls through to that label. */
487
488 else if (this_is_any_uncondjump
489 && (temp = prev_nonnote_insn (insn)) != 0
490 && GET_CODE (temp) == INSN
491 && GET_CODE (PATTERN (temp)) == USE
492 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
493 && (GET_CODE (temp1) == BARRIER
494 || (GET_CODE (temp1) == INSN
495 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
496 /* Don't do this optimization if we have a loop containing
497 only the USE instruction, and the loop start label has
498 a usage count of 1. This is because we will redo this
499 optimization everytime through the outer loop, and jump
500 opt will never exit. */
501 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
502 && temp2 == JUMP_LABEL (insn)
503 && LABEL_NUSES (temp2) == 1))
504 {
505 if (GET_CODE (temp1) == BARRIER)
506 {
507 emit_insn_after (PATTERN (temp), temp1);
508 temp1 = NEXT_INSN (temp1);
509 }
510
511 delete_insn (temp);
512 redirect_jump (insn, get_label_before (temp1), 1);
513 reallabelprev = prev_real_insn (temp1);
514 changed = 1;
515 next = NEXT_INSN (insn);
516 }
517
518 #ifdef HAVE_trap
519 /* Detect a conditional jump jumping over an unconditional trap. */
520 if (HAVE_trap
521 && this_is_any_condjump && this_is_onlyjump
522 && reallabelprev != 0
523 && GET_CODE (reallabelprev) == INSN
524 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
525 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
526 && prev_active_insn (reallabelprev) == insn
527 && no_labels_between_p (insn, reallabelprev)
528 && (temp2 = get_condition (insn, &temp4))
529 && ((reversed_code = reversed_comparison_code (temp2, insn))
530 != UNKNOWN))
531 {
532 rtx new = gen_cond_trap (reversed_code,
533 XEXP (temp2, 0), XEXP (temp2, 1),
534 TRAP_CODE (PATTERN (reallabelprev)));
535
536 if (new)
537 {
538 emit_insn_before (new, temp4);
539 delete_insn (reallabelprev);
540 delete_jump (insn);
541 changed = 1;
542 continue;
543 }
544 }
545 /* Detect a jump jumping to an unconditional trap. */
546 else if (HAVE_trap && this_is_onlyjump
547 && (temp = next_active_insn (JUMP_LABEL (insn)))
548 && GET_CODE (temp) == INSN
549 && GET_CODE (PATTERN (temp)) == TRAP_IF
550 && (this_is_any_uncondjump
551 || (this_is_any_condjump
552 && (temp2 = get_condition (insn, &temp4)))))
553 {
554 rtx tc = TRAP_CONDITION (PATTERN (temp));
555
556 if (tc == const_true_rtx
557 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
558 {
559 rtx new;
560 /* Replace an unconditional jump to a trap with a trap. */
561 if (this_is_any_uncondjump)
562 {
563 emit_barrier_after (emit_insn_before (gen_trap (), insn));
564 delete_jump (insn);
565 changed = 1;
566 continue;
567 }
568 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
569 XEXP (temp2, 1),
570 TRAP_CODE (PATTERN (temp)));
571 if (new)
572 {
573 emit_insn_before (new, temp4);
574 delete_jump (insn);
575 changed = 1;
576 continue;
577 }
578 }
579 /* If the trap condition and jump condition are mutually
580 exclusive, redirect the jump to the following insn. */
581 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
582 && this_is_any_condjump
583 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
584 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
585 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
586 && redirect_jump (insn, get_label_after (temp), 1))
587 {
588 changed = 1;
589 continue;
590 }
591 }
592 #endif
593 else
594 {
595 /* Now that the jump has been tensioned,
596 try cross jumping: check for identical code
597 before the jump and before its target label. */
598
599 /* First, cross jumping of conditional jumps: */
600
601 if (cross_jump && condjump_p (insn))
602 {
603 rtx newjpos, newlpos;
604 rtx x = prev_real_insn (JUMP_LABEL (insn));
605
606 /* A conditional jump may be crossjumped
607 only if the place it jumps to follows
608 an opposing jump that comes back here. */
609
610 if (x != 0 && ! jump_back_p (x, insn))
611 /* We have no opposing jump;
612 cannot cross jump this insn. */
613 x = 0;
614
615 newjpos = 0;
616 /* TARGET is nonzero if it is ok to cross jump
617 to code before TARGET. If so, see if matches. */
618 if (x != 0)
619 find_cross_jump (insn, x, 2,
620 &newjpos, &newlpos);
621
622 if (newjpos != 0)
623 {
624 do_cross_jump (insn, newjpos, newlpos);
625 /* Make the old conditional jump
626 into an unconditional one. */
627 SET_SRC (PATTERN (insn))
628 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
629 INSN_CODE (insn) = -1;
630 emit_barrier_after (insn);
631 /* Add to jump_chain unless this is a new label
632 whose UID is too large. */
633 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
634 {
635 jump_chain[INSN_UID (insn)]
636 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
637 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
638 }
639 changed = 1;
640 next = insn;
641 }
642 }
643
644 /* Cross jumping of unconditional jumps:
645 a few differences. */
646
647 if (cross_jump && simplejump_p (insn))
648 {
649 rtx newjpos, newlpos;
650 rtx target;
651
652 newjpos = 0;
653
654 /* TARGET is nonzero if it is ok to cross jump
655 to code before TARGET. If so, see if matches. */
656 find_cross_jump (insn, JUMP_LABEL (insn), 1,
657 &newjpos, &newlpos);
658
659 /* If cannot cross jump to code before the label,
660 see if we can cross jump to another jump to
661 the same label. */
662 /* Try each other jump to this label. */
663 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
664 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
665 target != 0 && newjpos == 0;
666 target = jump_chain[INSN_UID (target)])
667 if (target != insn
668 && JUMP_LABEL (target) == JUMP_LABEL (insn)
669 /* Ignore TARGET if it's deleted. */
670 && ! INSN_DELETED_P (target))
671 find_cross_jump (insn, target, 2,
672 &newjpos, &newlpos);
673
674 if (newjpos != 0)
675 {
676 do_cross_jump (insn, newjpos, newlpos);
677 changed = 1;
678 next = insn;
679 }
680 }
681
682 /* This code was dead in the previous jump.c! */
683 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
684 {
685 /* Return insns all "jump to the same place"
686 so we can cross-jump between any two of them. */
687
688 rtx newjpos, newlpos, target;
689
690 newjpos = 0;
691
692 /* If cannot cross jump to code before the label,
693 see if we can cross jump to another jump to
694 the same label. */
695 /* Try each other jump to this label. */
696 for (target = jump_chain[0];
697 target != 0 && newjpos == 0;
698 target = jump_chain[INSN_UID (target)])
699 if (target != insn
700 && ! INSN_DELETED_P (target)
701 && GET_CODE (PATTERN (target)) == RETURN)
702 find_cross_jump (insn, target, 2,
703 &newjpos, &newlpos);
704
705 if (newjpos != 0)
706 {
707 do_cross_jump (insn, newjpos, newlpos);
708 changed = 1;
709 next = insn;
710 }
711 }
712 }
713 }
714
715 first = 0;
716 }
717
718 /* Delete extraneous line number notes.
719 Note that two consecutive notes for different lines are not really
720 extraneous. There should be some indication where that line belonged,
721 even if it became empty. */
722
723 {
724 rtx last_note = 0;
725
726 for (insn = f; insn; insn = NEXT_INSN (insn))
727 if (GET_CODE (insn) == NOTE)
728 {
729 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
730 /* Any previous line note was for the prologue; gdb wants a new
731 note after the prologue even if it is for the same line. */
732 last_note = NULL_RTX;
733 else if (NOTE_LINE_NUMBER (insn) >= 0)
734 {
735 /* Delete this note if it is identical to previous note. */
736 if (last_note
737 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
738 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
739 {
740 delete_insn (insn);
741 continue;
742 }
743
744 last_note = insn;
745 }
746 }
747 }
748
749 end:
750 /* Clean up. */
751 free (jump_chain);
752 jump_chain = 0;
753 }
754 \f
755 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
756 notes whose labels don't occur in the insn any more. Returns the
757 largest INSN_UID found. */
758 static int
759 init_label_info (f)
760 rtx f;
761 {
762 int largest_uid = 0;
763 rtx insn;
764
765 for (insn = f; insn; insn = NEXT_INSN (insn))
766 {
767 if (GET_CODE (insn) == CODE_LABEL)
768 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
769 else if (GET_CODE (insn) == JUMP_INSN)
770 JUMP_LABEL (insn) = 0;
771 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
772 {
773 rtx note, next;
774
775 for (note = REG_NOTES (insn); note; note = next)
776 {
777 next = XEXP (note, 1);
778 if (REG_NOTE_KIND (note) == REG_LABEL
779 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
780 remove_note (insn, note);
781 }
782 }
783 if (INSN_UID (insn) > largest_uid)
784 largest_uid = INSN_UID (insn);
785 }
786
787 return largest_uid;
788 }
789
790 /* Delete insns following barriers, up to next label.
791
792 Also delete no-op jumps created by gcse. */
793
794 static void
795 delete_barrier_successors (f)
796 rtx f;
797 {
798 rtx insn;
799 rtx set;
800
801 for (insn = f; insn;)
802 {
803 if (GET_CODE (insn) == BARRIER)
804 {
805 insn = NEXT_INSN (insn);
806
807 never_reached_warning (insn);
808
809 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
810 {
811 if (GET_CODE (insn) == NOTE
812 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
813 insn = NEXT_INSN (insn);
814 else
815 insn = delete_insn (insn);
816 }
817 /* INSN is now the code_label. */
818 }
819
820 /* Also remove (set (pc) (pc)) insns which can be created by
821 gcse. We eliminate such insns now to avoid having them
822 cause problems later. */
823 else if (GET_CODE (insn) == JUMP_INSN
824 && (set = pc_set (insn)) != NULL
825 && SET_SRC (set) == pc_rtx
826 && SET_DEST (set) == pc_rtx
827 && onlyjump_p (insn))
828 insn = delete_insn (insn);
829
830 else
831 insn = NEXT_INSN (insn);
832 }
833 }
834
835 /* Mark the label each jump jumps to.
836 Combine consecutive labels, and count uses of labels.
837
838 For each label, make a chain (using `jump_chain')
839 of all the *unconditional* jumps that jump to it;
840 also make a chain of all returns.
841
842 CROSS_JUMP indicates whether we are doing cross jumping
843 and if we are whether we will be paying attention to
844 death notes or not. */
845
846 static void
847 mark_all_labels (f, cross_jump)
848 rtx f;
849 int cross_jump;
850 {
851 rtx insn;
852
853 for (insn = f; insn; insn = NEXT_INSN (insn))
854 if (INSN_P (insn))
855 {
856 if (GET_CODE (insn) == CALL_INSN
857 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
858 {
859 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
860 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
861 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
862 continue;
863 }
864
865 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
866 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
867 {
868 /* When we know the LABEL_REF contained in a REG used in
869 an indirect jump, we'll have a REG_LABEL note so that
870 flow can tell where it's going. */
871 if (JUMP_LABEL (insn) == 0)
872 {
873 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
874 if (label_note)
875 {
876 /* But a LABEL_REF around the REG_LABEL note, so
877 that we can canonicalize it. */
878 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
879 XEXP (label_note, 0));
880
881 mark_jump_label (label_ref, insn, cross_jump, 0);
882 XEXP (label_note, 0) = XEXP (label_ref, 0);
883 JUMP_LABEL (insn) = XEXP (label_note, 0);
884 }
885 }
886 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
887 {
888 jump_chain[INSN_UID (insn)]
889 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
890 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
891 }
892 if (GET_CODE (PATTERN (insn)) == RETURN)
893 {
894 jump_chain[INSN_UID (insn)] = jump_chain[0];
895 jump_chain[0] = insn;
896 }
897 }
898 }
899 }
900
901 /* Delete all labels already not referenced.
902 Also find and return the last insn. */
903
904 static rtx
905 delete_unreferenced_labels (f)
906 rtx f;
907 {
908 rtx final = NULL_RTX;
909 rtx insn;
910
911 for (insn = f; insn;)
912 {
913 if (GET_CODE (insn) == CODE_LABEL
914 && LABEL_NUSES (insn) == 0
915 && LABEL_ALTERNATE_NAME (insn) == NULL)
916 insn = delete_insn (insn);
917 else
918 {
919 final = insn;
920 insn = NEXT_INSN (insn);
921 }
922 }
923
924 return final;
925 }
926
927 /* Delete various simple forms of moves which have no necessary
928 side effect. */
929
930 static void
931 delete_noop_moves (f)
932 rtx f;
933 {
934 rtx insn, next;
935
936 for (insn = f; insn;)
937 {
938 next = NEXT_INSN (insn);
939
940 if (GET_CODE (insn) == INSN)
941 {
942 register rtx body = PATTERN (insn);
943
944 /* Detect and delete no-op move instructions
945 resulting from not allocating a parameter in a register. */
946
947 if (GET_CODE (body) == SET
948 && (SET_DEST (body) == SET_SRC (body)
949 || (GET_CODE (SET_DEST (body)) == MEM
950 && GET_CODE (SET_SRC (body)) == MEM
951 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
952 && ! (GET_CODE (SET_DEST (body)) == MEM
953 && MEM_VOLATILE_P (SET_DEST (body)))
954 && ! (GET_CODE (SET_SRC (body)) == MEM
955 && MEM_VOLATILE_P (SET_SRC (body))))
956 delete_computation (insn);
957
958 /* Detect and ignore no-op move instructions
959 resulting from smart or fortuitous register allocation. */
960
961 else if (GET_CODE (body) == SET)
962 {
963 int sreg = true_regnum (SET_SRC (body));
964 int dreg = true_regnum (SET_DEST (body));
965
966 if (sreg == dreg && sreg >= 0)
967 delete_insn (insn);
968 else if (sreg >= 0 && dreg >= 0)
969 {
970 rtx trial;
971 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
972 sreg, NULL_PTR, dreg,
973 GET_MODE (SET_SRC (body)));
974
975 if (tem != 0
976 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
977 {
978 /* DREG may have been the target of a REG_DEAD note in
979 the insn which makes INSN redundant. If so, reorg
980 would still think it is dead. So search for such a
981 note and delete it if we find it. */
982 if (! find_regno_note (insn, REG_UNUSED, dreg))
983 for (trial = prev_nonnote_insn (insn);
984 trial && GET_CODE (trial) != CODE_LABEL;
985 trial = prev_nonnote_insn (trial))
986 if (find_regno_note (trial, REG_DEAD, dreg))
987 {
988 remove_death (dreg, trial);
989 break;
990 }
991
992 /* Deleting insn could lose a death-note for SREG. */
993 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
994 {
995 /* Change this into a USE so that we won't emit
996 code for it, but still can keep the note. */
997 PATTERN (insn)
998 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
999 INSN_CODE (insn) = -1;
1000 /* Remove all reg notes but the REG_DEAD one. */
1001 REG_NOTES (insn) = trial;
1002 XEXP (trial, 1) = NULL_RTX;
1003 }
1004 else
1005 delete_insn (insn);
1006 }
1007 }
1008 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
1009 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
1010 NULL_PTR, 0,
1011 GET_MODE (SET_DEST (body))))
1012 {
1013 /* This handles the case where we have two consecutive
1014 assignments of the same constant to pseudos that didn't
1015 get a hard reg. Each SET from the constant will be
1016 converted into a SET of the spill register and an
1017 output reload will be made following it. This produces
1018 two loads of the same constant into the same spill
1019 register. */
1020
1021 rtx in_insn = insn;
1022
1023 /* Look back for a death note for the first reg.
1024 If there is one, it is no longer accurate. */
1025 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1026 {
1027 if ((GET_CODE (in_insn) == INSN
1028 || GET_CODE (in_insn) == JUMP_INSN)
1029 && find_regno_note (in_insn, REG_DEAD, dreg))
1030 {
1031 remove_death (dreg, in_insn);
1032 break;
1033 }
1034 in_insn = PREV_INSN (in_insn);
1035 }
1036
1037 /* Delete the second load of the value. */
1038 delete_insn (insn);
1039 }
1040 }
1041 else if (GET_CODE (body) == PARALLEL)
1042 {
1043 /* If each part is a set between two identical registers or
1044 a USE or CLOBBER, delete the insn. */
1045 int i, sreg, dreg;
1046 rtx tem;
1047
1048 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1049 {
1050 tem = XVECEXP (body, 0, i);
1051 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1052 continue;
1053
1054 if (GET_CODE (tem) != SET
1055 || (sreg = true_regnum (SET_SRC (tem))) < 0
1056 || (dreg = true_regnum (SET_DEST (tem))) < 0
1057 || dreg != sreg)
1058 break;
1059 }
1060
1061 if (i < 0)
1062 delete_insn (insn);
1063 }
1064 /* Also delete insns to store bit fields if they are no-ops. */
1065 /* Not worth the hair to detect this in the big-endian case. */
1066 else if (! BYTES_BIG_ENDIAN
1067 && GET_CODE (body) == SET
1068 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
1069 && XEXP (SET_DEST (body), 2) == const0_rtx
1070 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
1071 && ! (GET_CODE (SET_SRC (body)) == MEM
1072 && MEM_VOLATILE_P (SET_SRC (body))))
1073 delete_insn (insn);
1074 }
1075 insn = next;
1076 }
1077 }
1078
1079 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1080 jump. Assume that this unconditional jump is to the exit test code. If
1081 the code is sufficiently simple, make a copy of it before INSN,
1082 followed by a jump to the exit of the loop. Then delete the unconditional
1083 jump after INSN.
1084
1085 Return 1 if we made the change, else 0.
1086
1087 This is only safe immediately after a regscan pass because it uses the
1088 values of regno_first_uid and regno_last_uid. */
1089
1090 static int
1091 duplicate_loop_exit_test (loop_start)
1092 rtx loop_start;
1093 {
1094 rtx insn, set, reg, p, link;
1095 rtx copy = 0, first_copy = 0;
1096 int num_insns = 0;
1097 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1098 rtx lastexit;
1099 int max_reg = max_reg_num ();
1100 rtx *reg_map = 0;
1101
1102 /* Scan the exit code. We do not perform this optimization if any insn:
1103
1104 is a CALL_INSN
1105 is a CODE_LABEL
1106 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1107 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1108 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1109 is not valid.
1110
1111 We also do not do this if we find an insn with ASM_OPERANDS. While
1112 this restriction should not be necessary, copying an insn with
1113 ASM_OPERANDS can confuse asm_noperands in some cases.
1114
1115 Also, don't do this if the exit code is more than 20 insns. */
1116
1117 for (insn = exitcode;
1118 insn
1119 && ! (GET_CODE (insn) == NOTE
1120 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1121 insn = NEXT_INSN (insn))
1122 {
1123 switch (GET_CODE (insn))
1124 {
1125 case CODE_LABEL:
1126 case CALL_INSN:
1127 return 0;
1128 case NOTE:
1129 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1130 a jump immediately after the loop start that branches outside
1131 the loop but within an outer loop, near the exit test.
1132 If we copied this exit test and created a phony
1133 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1134 before the exit test look like these could be safely moved
1135 out of the loop even if they actually may be never executed.
1136 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1137
1138 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1139 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1140 return 0;
1141
1142 if (optimize < 2
1143 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1144 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1145 /* If we were to duplicate this code, we would not move
1146 the BLOCK notes, and so debugging the moved code would
1147 be difficult. Thus, we only move the code with -O2 or
1148 higher. */
1149 return 0;
1150
1151 break;
1152 case JUMP_INSN:
1153 case INSN:
1154 /* The code below would grossly mishandle REG_WAS_0 notes,
1155 so get rid of them here. */
1156 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1157 remove_note (insn, p);
1158 if (++num_insns > 20
1159 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1160 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1161 return 0;
1162 break;
1163 default:
1164 break;
1165 }
1166 }
1167
1168 /* Unless INSN is zero, we can do the optimization. */
1169 if (insn == 0)
1170 return 0;
1171
1172 lastexit = insn;
1173
1174 /* See if any insn sets a register only used in the loop exit code and
1175 not a user variable. If so, replace it with a new register. */
1176 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1177 if (GET_CODE (insn) == INSN
1178 && (set = single_set (insn)) != 0
1179 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1180 || (GET_CODE (reg) == SUBREG
1181 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1182 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1183 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1184 {
1185 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1186 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1187 break;
1188
1189 if (p != lastexit)
1190 {
1191 /* We can do the replacement. Allocate reg_map if this is the
1192 first replacement we found. */
1193 if (reg_map == 0)
1194 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1195
1196 REG_LOOP_TEST_P (reg) = 1;
1197
1198 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1199 }
1200 }
1201
1202 /* Now copy each insn. */
1203 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1204 {
1205 switch (GET_CODE (insn))
1206 {
1207 case BARRIER:
1208 copy = emit_barrier_before (loop_start);
1209 break;
1210 case NOTE:
1211 /* Only copy line-number notes. */
1212 if (NOTE_LINE_NUMBER (insn) >= 0)
1213 {
1214 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1215 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1216 }
1217 break;
1218
1219 case INSN:
1220 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1221 if (reg_map)
1222 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1223
1224 mark_jump_label (PATTERN (copy), copy, 0, 0);
1225
1226 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1227 make them. */
1228 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1229 if (REG_NOTE_KIND (link) != REG_LABEL)
1230 {
1231 if (GET_CODE (link) == EXPR_LIST)
1232 REG_NOTES (copy)
1233 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1234 XEXP (link, 0),
1235 REG_NOTES (copy)));
1236 else
1237 REG_NOTES (copy)
1238 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1239 XEXP (link, 0),
1240 REG_NOTES (copy)));
1241 }
1242
1243 if (reg_map && REG_NOTES (copy))
1244 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1245 break;
1246
1247 case JUMP_INSN:
1248 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1249 loop_start);
1250 if (reg_map)
1251 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1252 mark_jump_label (PATTERN (copy), copy, 0, 0);
1253 if (REG_NOTES (insn))
1254 {
1255 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1256 if (reg_map)
1257 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1258 }
1259
1260 /* If this is a simple jump, add it to the jump chain. */
1261
1262 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1263 && simplejump_p (copy))
1264 {
1265 jump_chain[INSN_UID (copy)]
1266 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1267 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1268 }
1269 break;
1270
1271 default:
1272 abort ();
1273 }
1274
1275 /* Record the first insn we copied. We need it so that we can
1276 scan the copied insns for new pseudo registers. */
1277 if (! first_copy)
1278 first_copy = copy;
1279 }
1280
1281 /* Now clean up by emitting a jump to the end label and deleting the jump
1282 at the start of the loop. */
1283 if (! copy || GET_CODE (copy) != BARRIER)
1284 {
1285 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1286 loop_start);
1287
1288 /* Record the first insn we copied. We need it so that we can
1289 scan the copied insns for new pseudo registers. This may not
1290 be strictly necessary since we should have copied at least one
1291 insn above. But I am going to be safe. */
1292 if (! first_copy)
1293 first_copy = copy;
1294
1295 mark_jump_label (PATTERN (copy), copy, 0, 0);
1296 if (INSN_UID (copy) < max_jump_chain
1297 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1298 {
1299 jump_chain[INSN_UID (copy)]
1300 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1301 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1302 }
1303 emit_barrier_before (loop_start);
1304 }
1305
1306 /* Now scan from the first insn we copied to the last insn we copied
1307 (copy) for new pseudo registers. Do this after the code to jump to
1308 the end label since that might create a new pseudo too. */
1309 reg_scan_update (first_copy, copy, max_reg);
1310
1311 /* Mark the exit code as the virtual top of the converted loop. */
1312 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1313
1314 delete_insn (next_nonnote_insn (loop_start));
1315
1316 /* Clean up. */
1317 if (reg_map)
1318 free (reg_map);
1319
1320 return 1;
1321 }
1322 \f
1323 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1324 notes between START and END out before START. Assume that END is not
1325 such a note. START may be such a note. Returns the value of the new
1326 starting insn, which may be different if the original start was such a
1327 note. */
1328
1329 rtx
1330 squeeze_notes (start, end)
1331 rtx start, end;
1332 {
1333 rtx insn;
1334 rtx next;
1335
1336 for (insn = start; insn != end; insn = next)
1337 {
1338 next = NEXT_INSN (insn);
1339 if (GET_CODE (insn) == NOTE
1340 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1341 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1342 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1343 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1344 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1345 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
1346 {
1347 if (insn == start)
1348 start = next;
1349 else
1350 {
1351 rtx prev = PREV_INSN (insn);
1352 PREV_INSN (insn) = PREV_INSN (start);
1353 NEXT_INSN (insn) = start;
1354 NEXT_INSN (PREV_INSN (insn)) = insn;
1355 PREV_INSN (NEXT_INSN (insn)) = insn;
1356 NEXT_INSN (prev) = next;
1357 PREV_INSN (next) = prev;
1358 }
1359 }
1360 }
1361
1362 return start;
1363 }
1364 \f
1365 /* Compare the instructions before insn E1 with those before E2
1366 to find an opportunity for cross jumping.
1367 (This means detecting identical sequences of insns followed by
1368 jumps to the same place, or followed by a label and a jump
1369 to that label, and replacing one with a jump to the other.)
1370
1371 Assume E1 is a jump that jumps to label E2
1372 (that is not always true but it might as well be).
1373 Find the longest possible equivalent sequences
1374 and store the first insns of those sequences into *F1 and *F2.
1375 Store zero there if no equivalent preceding instructions are found.
1376
1377 We give up if we find a label in stream 1.
1378 Actually we could transfer that label into stream 2. */
1379
1380 static void
1381 find_cross_jump (e1, e2, minimum, f1, f2)
1382 rtx e1, e2;
1383 int minimum;
1384 rtx *f1, *f2;
1385 {
1386 register rtx i1 = e1, i2 = e2;
1387 register rtx p1, p2;
1388 int lose = 0;
1389
1390 rtx last1 = 0, last2 = 0;
1391 rtx afterlast1 = 0, afterlast2 = 0;
1392
1393 *f1 = 0;
1394 *f2 = 0;
1395
1396 while (1)
1397 {
1398 i1 = prev_nonnote_insn (i1);
1399
1400 i2 = PREV_INSN (i2);
1401 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1402 i2 = PREV_INSN (i2);
1403
1404 if (i1 == 0)
1405 break;
1406
1407 /* Don't allow the range of insns preceding E1 or E2
1408 to include the other (E2 or E1). */
1409 if (i2 == e1 || i1 == e2)
1410 break;
1411
1412 /* If we will get to this code by jumping, those jumps will be
1413 tensioned to go directly to the new label (before I2),
1414 so this cross-jumping won't cost extra. So reduce the minimum. */
1415 if (GET_CODE (i1) == CODE_LABEL)
1416 {
1417 --minimum;
1418 break;
1419 }
1420
1421 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1422 break;
1423
1424 /* Avoid moving insns across EH regions if either of the insns
1425 can throw. */
1426 if (flag_exceptions
1427 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
1428 && !in_same_eh_region (i1, i2))
1429 break;
1430
1431 p1 = PATTERN (i1);
1432 p2 = PATTERN (i2);
1433
1434 /* If this is a CALL_INSN, compare register usage information.
1435 If we don't check this on stack register machines, the two
1436 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1437 numbers of stack registers in the same basic block.
1438 If we don't check this on machines with delay slots, a delay slot may
1439 be filled that clobbers a parameter expected by the subroutine.
1440
1441 ??? We take the simple route for now and assume that if they're
1442 equal, they were constructed identically. */
1443
1444 if (GET_CODE (i1) == CALL_INSN
1445 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1446 CALL_INSN_FUNCTION_USAGE (i2)))
1447 lose = 1;
1448
1449 #ifdef STACK_REGS
1450 /* If cross_jump_death_matters is not 0, the insn's mode
1451 indicates whether or not the insn contains any stack-like
1452 regs. */
1453
1454 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1455 {
1456 /* If register stack conversion has already been done, then
1457 death notes must also be compared before it is certain that
1458 the two instruction streams match. */
1459
1460 rtx note;
1461 HARD_REG_SET i1_regset, i2_regset;
1462
1463 CLEAR_HARD_REG_SET (i1_regset);
1464 CLEAR_HARD_REG_SET (i2_regset);
1465
1466 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1467 if (REG_NOTE_KIND (note) == REG_DEAD
1468 && STACK_REG_P (XEXP (note, 0)))
1469 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1470
1471 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1472 if (REG_NOTE_KIND (note) == REG_DEAD
1473 && STACK_REG_P (XEXP (note, 0)))
1474 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1475
1476 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1477
1478 lose = 1;
1479
1480 done:
1481 ;
1482 }
1483 #endif
1484
1485 /* Don't allow old-style asm or volatile extended asms to be accepted
1486 for cross jumping purposes. It is conceptually correct to allow
1487 them, since cross-jumping preserves the dynamic instruction order
1488 even though it is changing the static instruction order. However,
1489 if an asm is being used to emit an assembler pseudo-op, such as
1490 the MIPS `.set reorder' pseudo-op, then the static instruction order
1491 matters and it must be preserved. */
1492 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1493 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1494 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1495 lose = 1;
1496
1497 if (lose || GET_CODE (p1) != GET_CODE (p2)
1498 || ! rtx_renumbered_equal_p (p1, p2))
1499 {
1500 /* The following code helps take care of G++ cleanups. */
1501 rtx equiv1;
1502 rtx equiv2;
1503
1504 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1505 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1506 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1507 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1508 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1509 /* If the equivalences are not to a constant, they may
1510 reference pseudos that no longer exist, so we can't
1511 use them. */
1512 && CONSTANT_P (XEXP (equiv1, 0))
1513 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1514 {
1515 rtx s1 = single_set (i1);
1516 rtx s2 = single_set (i2);
1517 if (s1 != 0 && s2 != 0
1518 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1519 {
1520 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1521 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1522 if (! rtx_renumbered_equal_p (p1, p2))
1523 cancel_changes (0);
1524 else if (apply_change_group ())
1525 goto win;
1526 }
1527 }
1528
1529 /* Insns fail to match; cross jumping is limited to the following
1530 insns. */
1531
1532 #ifdef HAVE_cc0
1533 /* Don't allow the insn after a compare to be shared by
1534 cross-jumping unless the compare is also shared.
1535 Here, if either of these non-matching insns is a compare,
1536 exclude the following insn from possible cross-jumping. */
1537 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1538 last1 = afterlast1, last2 = afterlast2, ++minimum;
1539 #endif
1540
1541 /* If cross-jumping here will feed a jump-around-jump
1542 optimization, this jump won't cost extra, so reduce
1543 the minimum. */
1544 if (GET_CODE (i1) == JUMP_INSN
1545 && JUMP_LABEL (i1)
1546 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1547 --minimum;
1548 break;
1549 }
1550
1551 win:
1552 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1553 {
1554 /* Ok, this insn is potentially includable in a cross-jump here. */
1555 afterlast1 = last1, afterlast2 = last2;
1556 last1 = i1, last2 = i2, --minimum;
1557 }
1558 }
1559
1560 if (minimum <= 0 && last1 != 0 && last1 != e1)
1561 *f1 = last1, *f2 = last2;
1562 }
1563
1564 static void
1565 do_cross_jump (insn, newjpos, newlpos)
1566 rtx insn, newjpos, newlpos;
1567 {
1568 /* Find an existing label at this point
1569 or make a new one if there is none. */
1570 register rtx label = get_label_before (newlpos);
1571
1572 /* Make the same jump insn jump to the new point. */
1573 if (GET_CODE (PATTERN (insn)) == RETURN)
1574 {
1575 /* Remove from jump chain of returns. */
1576 delete_from_jump_chain (insn);
1577 /* Change the insn. */
1578 PATTERN (insn) = gen_jump (label);
1579 INSN_CODE (insn) = -1;
1580 JUMP_LABEL (insn) = label;
1581 LABEL_NUSES (label)++;
1582 /* Add to new the jump chain. */
1583 if (INSN_UID (label) < max_jump_chain
1584 && INSN_UID (insn) < max_jump_chain)
1585 {
1586 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1587 jump_chain[INSN_UID (label)] = insn;
1588 }
1589 }
1590 else
1591 redirect_jump (insn, label, 1);
1592
1593 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1594 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1595 the NEWJPOS stream. */
1596
1597 while (newjpos != insn)
1598 {
1599 rtx lnote;
1600
1601 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1602 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1603 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1604 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1605 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1606 remove_note (newlpos, lnote);
1607
1608 delete_insn (newjpos);
1609 newjpos = next_real_insn (newjpos);
1610 newlpos = next_real_insn (newlpos);
1611 }
1612 }
1613 \f
1614 /* Return the label before INSN, or put a new label there. */
1615
1616 rtx
1617 get_label_before (insn)
1618 rtx insn;
1619 {
1620 rtx label;
1621
1622 /* Find an existing label at this point
1623 or make a new one if there is none. */
1624 label = prev_nonnote_insn (insn);
1625
1626 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1627 {
1628 rtx prev = PREV_INSN (insn);
1629
1630 label = gen_label_rtx ();
1631 emit_label_after (label, prev);
1632 LABEL_NUSES (label) = 0;
1633 }
1634 return label;
1635 }
1636
1637 /* Return the label after INSN, or put a new label there. */
1638
1639 rtx
1640 get_label_after (insn)
1641 rtx insn;
1642 {
1643 rtx label;
1644
1645 /* Find an existing label at this point
1646 or make a new one if there is none. */
1647 label = next_nonnote_insn (insn);
1648
1649 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1650 {
1651 label = gen_label_rtx ();
1652 emit_label_after (label, insn);
1653 LABEL_NUSES (label) = 0;
1654 }
1655 return label;
1656 }
1657 \f
1658 /* Return 1 if INSN is a jump that jumps to right after TARGET
1659 only on the condition that TARGET itself would drop through.
1660 Assumes that TARGET is a conditional jump. */
1661
1662 static int
1663 jump_back_p (insn, target)
1664 rtx insn, target;
1665 {
1666 rtx cinsn, ctarget;
1667 enum rtx_code codei, codet;
1668 rtx set, tset;
1669
1670 if (! any_condjump_p (insn)
1671 || any_uncondjump_p (target)
1672 || target != prev_real_insn (JUMP_LABEL (insn)))
1673 return 0;
1674 set = pc_set (insn);
1675 tset = pc_set (target);
1676
1677 cinsn = XEXP (SET_SRC (set), 0);
1678 ctarget = XEXP (SET_SRC (tset), 0);
1679
1680 codei = GET_CODE (cinsn);
1681 codet = GET_CODE (ctarget);
1682
1683 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1684 {
1685 codei = reversed_comparison_code (cinsn, insn);
1686 if (codei == UNKNOWN)
1687 return 0;
1688 }
1689
1690 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1691 {
1692 codet = reversed_comparison_code (ctarget, target);
1693 if (codei == UNKNOWN)
1694 return 0;
1695 }
1696
1697 return (codei == codet
1698 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1699 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1700 }
1701 \f
1702 /* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
1703 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
1704 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
1705 know whether it's source is floating point or integer comparison. Machine
1706 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
1707 to help this function avoid overhead in these cases. */
1708 enum rtx_code
1709 reversed_comparison_code_parts (code, arg0, arg1, insn)
1710 rtx insn, arg0, arg1;
1711 enum rtx_code code;
1712 {
1713 enum machine_mode mode;
1714
1715 /* If this is not actually a comparison, we can't reverse it. */
1716 if (GET_RTX_CLASS (code) != '<')
1717 return UNKNOWN;
1718
1719 mode = GET_MODE (arg0);
1720 if (mode == VOIDmode)
1721 mode = GET_MODE (arg1);
1722
1723 /* First see if machine description supply us way to reverse the comparison.
1724 Give it priority over everything else to allow machine description to do
1725 tricks. */
1726 #ifdef REVERSIBLE_CC_MODE
1727 if (GET_MODE_CLASS (mode) == MODE_CC
1728 && REVERSIBLE_CC_MODE (mode))
1729 {
1730 #ifdef REVERSE_CONDITION
1731 return REVERSE_CONDITION (code, mode);
1732 #endif
1733 return reverse_condition (code);
1734 }
1735 #endif
1736
1737 /* Try few special cases based on the comparison code. */
1738 switch (code)
1739 {
1740 case GEU:
1741 case GTU:
1742 case LEU:
1743 case LTU:
1744 case NE:
1745 case EQ:
1746 /* It is always safe to reverse EQ and NE, even for the floating
1747 point. Similary the unsigned comparisons are never used for
1748 floating point so we can reverse them in the default way. */
1749 return reverse_condition (code);
1750 case ORDERED:
1751 case UNORDERED:
1752 case LTGT:
1753 case UNEQ:
1754 /* In case we already see unordered comparison, we can be sure to
1755 be dealing with floating point so we don't need any more tests. */
1756 return reverse_condition_maybe_unordered (code);
1757 case UNLT:
1758 case UNLE:
1759 case UNGT:
1760 case UNGE:
1761 /* We don't have safe way to reverse these yet. */
1762 return UNKNOWN;
1763 default:
1764 break;
1765 }
1766
1767 /* In case we give up IEEE compatibility, all comparisons are reversible. */
1768 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1769 || flag_fast_math)
1770 return reverse_condition (code);
1771
1772 if (GET_MODE_CLASS (mode) == MODE_CC
1773 #ifdef HAVE_cc0
1774 || arg0 == cc0_rtx
1775 #endif
1776 )
1777 {
1778 rtx prev;
1779 /* Try to search for the comparison to determine the real mode.
1780 This code is expensive, but with sane machine description it
1781 will be never used, since REVERSIBLE_CC_MODE will return true
1782 in all cases. */
1783 if (! insn)
1784 return UNKNOWN;
1785
1786 for (prev = prev_nonnote_insn (insn);
1787 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1788 prev = prev_nonnote_insn (prev))
1789 {
1790 rtx set = set_of (arg0, prev);
1791 if (set && GET_CODE (set) == SET
1792 && rtx_equal_p (SET_DEST (set), arg0))
1793 {
1794 rtx src = SET_SRC (set);
1795
1796 if (GET_CODE (src) == COMPARE)
1797 {
1798 rtx comparison = src;
1799 arg0 = XEXP (src, 0);
1800 mode = GET_MODE (arg0);
1801 if (mode == VOIDmode)
1802 mode = GET_MODE (XEXP (comparison, 1));
1803 break;
1804 }
1805 /* We can get past reg-reg moves. This may be usefull for model
1806 of i387 comparisons that first move flag registers around. */
1807 if (REG_P (src))
1808 {
1809 arg0 = src;
1810 continue;
1811 }
1812 }
1813 /* If register is clobbered in some ununderstandable way,
1814 give up. */
1815 if (set)
1816 return UNKNOWN;
1817 }
1818 }
1819
1820 /* An integer condition. */
1821 if (GET_CODE (arg0) == CONST_INT
1822 || (GET_MODE (arg0) != VOIDmode
1823 && GET_MODE_CLASS (mode) != MODE_CC
1824 && ! FLOAT_MODE_P (mode)))
1825 return reverse_condition (code);
1826
1827 return UNKNOWN;
1828 }
1829
1830 /* An wrapper around the previous function to take COMPARISON as rtx
1831 expression. This simplifies many callers. */
1832 enum rtx_code
1833 reversed_comparison_code (comparison, insn)
1834 rtx comparison, insn;
1835 {
1836 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1837 return UNKNOWN;
1838 return reversed_comparison_code_parts (GET_CODE (comparison),
1839 XEXP (comparison, 0),
1840 XEXP (comparison, 1), insn);
1841 }
1842 \f
1843 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
1844 return non-zero if it is safe to reverse this comparison. It is if our
1845 floating-point is not IEEE, if this is an NE or EQ comparison, or if
1846 this is known to be an integer comparison.
1847
1848 Use of this function is depreached and you should use
1849 REVERSED_COMPARISON_CODE bits instead.
1850 */
1851
1852 int
1853 can_reverse_comparison_p (comparison, insn)
1854 rtx comparison;
1855 rtx insn;
1856 {
1857 enum rtx_code code;
1858
1859 /* If this is not actually a comparison, we can't reverse it. */
1860 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1861 return 0;
1862
1863 code = reversed_comparison_code (comparison, insn);
1864 if (code == UNKNOWN)
1865 return 0;
1866
1867 /* The code will follow can_reverse_comparison_p with reverse_condition,
1868 so see if it will get proper result. */
1869 return (code == reverse_condition (GET_CODE (comparison)));
1870 }
1871
1872 /* Given an rtx-code for a comparison, return the code for the negated
1873 comparison. If no such code exists, return UNKNOWN.
1874
1875 WATCH OUT! reverse_condition is not safe to use on a jump that might
1876 be acting on the results of an IEEE floating point comparison, because
1877 of the special treatment of non-signaling nans in comparisons.
1878 Use reversed_comparison_code instead. */
1879
1880 enum rtx_code
1881 reverse_condition (code)
1882 enum rtx_code code;
1883 {
1884 switch (code)
1885 {
1886 case EQ:
1887 return NE;
1888 case NE:
1889 return EQ;
1890 case GT:
1891 return LE;
1892 case GE:
1893 return LT;
1894 case LT:
1895 return GE;
1896 case LE:
1897 return GT;
1898 case GTU:
1899 return LEU;
1900 case GEU:
1901 return LTU;
1902 case LTU:
1903 return GEU;
1904 case LEU:
1905 return GTU;
1906 case UNORDERED:
1907 return ORDERED;
1908 case ORDERED:
1909 return UNORDERED;
1910
1911 case UNLT:
1912 case UNLE:
1913 case UNGT:
1914 case UNGE:
1915 case UNEQ:
1916 case LTGT:
1917 return UNKNOWN;
1918
1919 default:
1920 abort ();
1921 }
1922 }
1923
1924 /* Similar, but we're allowed to generate unordered comparisons, which
1925 makes it safe for IEEE floating-point. Of course, we have to recognize
1926 that the target will support them too... */
1927
1928 enum rtx_code
1929 reverse_condition_maybe_unordered (code)
1930 enum rtx_code code;
1931 {
1932 /* Non-IEEE formats don't have unordered conditions. */
1933 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1934 return reverse_condition (code);
1935
1936 switch (code)
1937 {
1938 case EQ:
1939 return NE;
1940 case NE:
1941 return EQ;
1942 case GT:
1943 return UNLE;
1944 case GE:
1945 return UNLT;
1946 case LT:
1947 return UNGE;
1948 case LE:
1949 return UNGT;
1950 case LTGT:
1951 return UNEQ;
1952 case UNORDERED:
1953 return ORDERED;
1954 case ORDERED:
1955 return UNORDERED;
1956 case UNLT:
1957 return GE;
1958 case UNLE:
1959 return GT;
1960 case UNGT:
1961 return LE;
1962 case UNGE:
1963 return LT;
1964 case UNEQ:
1965 return LTGT;
1966
1967 default:
1968 abort ();
1969 }
1970 }
1971
1972 /* Similar, but return the code when two operands of a comparison are swapped.
1973 This IS safe for IEEE floating-point. */
1974
1975 enum rtx_code
1976 swap_condition (code)
1977 enum rtx_code code;
1978 {
1979 switch (code)
1980 {
1981 case EQ:
1982 case NE:
1983 case UNORDERED:
1984 case ORDERED:
1985 case UNEQ:
1986 case LTGT:
1987 return code;
1988
1989 case GT:
1990 return LT;
1991 case GE:
1992 return LE;
1993 case LT:
1994 return GT;
1995 case LE:
1996 return GE;
1997 case GTU:
1998 return LTU;
1999 case GEU:
2000 return LEU;
2001 case LTU:
2002 return GTU;
2003 case LEU:
2004 return GEU;
2005 case UNLT:
2006 return UNGT;
2007 case UNLE:
2008 return UNGE;
2009 case UNGT:
2010 return UNLT;
2011 case UNGE:
2012 return UNLE;
2013
2014 default:
2015 abort ();
2016 }
2017 }
2018
2019 /* Given a comparison CODE, return the corresponding unsigned comparison.
2020 If CODE is an equality comparison or already an unsigned comparison,
2021 CODE is returned. */
2022
2023 enum rtx_code
2024 unsigned_condition (code)
2025 enum rtx_code code;
2026 {
2027 switch (code)
2028 {
2029 case EQ:
2030 case NE:
2031 case GTU:
2032 case GEU:
2033 case LTU:
2034 case LEU:
2035 return code;
2036
2037 case GT:
2038 return GTU;
2039 case GE:
2040 return GEU;
2041 case LT:
2042 return LTU;
2043 case LE:
2044 return LEU;
2045
2046 default:
2047 abort ();
2048 }
2049 }
2050
2051 /* Similarly, return the signed version of a comparison. */
2052
2053 enum rtx_code
2054 signed_condition (code)
2055 enum rtx_code code;
2056 {
2057 switch (code)
2058 {
2059 case EQ:
2060 case NE:
2061 case GT:
2062 case GE:
2063 case LT:
2064 case LE:
2065 return code;
2066
2067 case GTU:
2068 return GT;
2069 case GEU:
2070 return GE;
2071 case LTU:
2072 return LT;
2073 case LEU:
2074 return LE;
2075
2076 default:
2077 abort ();
2078 }
2079 }
2080 \f
2081 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2082 truth of CODE1 implies the truth of CODE2. */
2083
2084 int
2085 comparison_dominates_p (code1, code2)
2086 enum rtx_code code1, code2;
2087 {
2088 /* UNKNOWN comparison codes can happen as a result of trying to revert
2089 comparison codes.
2090 They can't match anything, so we have to reject them here. */
2091 if (code1 == UNKNOWN || code2 == UNKNOWN)
2092 return 0;
2093
2094 if (code1 == code2)
2095 return 1;
2096
2097 switch (code1)
2098 {
2099 case UNEQ:
2100 if (code2 == UNLE || code2 == UNGE)
2101 return 1;
2102 break;
2103
2104 case EQ:
2105 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
2106 || code2 == ORDERED)
2107 return 1;
2108 break;
2109
2110 case UNLT:
2111 if (code2 == UNLE || code2 == NE)
2112 return 1;
2113 break;
2114
2115 case LT:
2116 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2117 return 1;
2118 break;
2119
2120 case UNGT:
2121 if (code2 == UNGE || code2 == NE)
2122 return 1;
2123 break;
2124
2125 case GT:
2126 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2127 return 1;
2128 break;
2129
2130 case GE:
2131 case LE:
2132 if (code2 == ORDERED)
2133 return 1;
2134 break;
2135
2136 case LTGT:
2137 if (code2 == NE || code2 == ORDERED)
2138 return 1;
2139 break;
2140
2141 case LTU:
2142 if (code2 == LEU || code2 == NE)
2143 return 1;
2144 break;
2145
2146 case GTU:
2147 if (code2 == GEU || code2 == NE)
2148 return 1;
2149 break;
2150
2151 case UNORDERED:
2152 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
2153 || code2 == UNGE || code2 == UNGT)
2154 return 1;
2155 break;
2156
2157 default:
2158 break;
2159 }
2160
2161 return 0;
2162 }
2163 \f
2164 /* Return 1 if INSN is an unconditional jump and nothing else. */
2165
2166 int
2167 simplejump_p (insn)
2168 rtx insn;
2169 {
2170 return (GET_CODE (insn) == JUMP_INSN
2171 && GET_CODE (PATTERN (insn)) == SET
2172 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2173 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2174 }
2175
2176 /* Return nonzero if INSN is a (possibly) conditional jump
2177 and nothing more.
2178
2179 Use this function is deprecated, since we need to support combined
2180 branch and compare insns. Use any_condjump_p instead whenever possible. */
2181
2182 int
2183 condjump_p (insn)
2184 rtx insn;
2185 {
2186 register rtx x = PATTERN (insn);
2187
2188 if (GET_CODE (x) != SET
2189 || GET_CODE (SET_DEST (x)) != PC)
2190 return 0;
2191
2192 x = SET_SRC (x);
2193 if (GET_CODE (x) == LABEL_REF)
2194 return 1;
2195 else
2196 return (GET_CODE (x) == IF_THEN_ELSE
2197 && ((GET_CODE (XEXP (x, 2)) == PC
2198 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2199 || GET_CODE (XEXP (x, 1)) == RETURN))
2200 || (GET_CODE (XEXP (x, 1)) == PC
2201 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2202 || GET_CODE (XEXP (x, 2)) == RETURN))));
2203
2204 return 0;
2205 }
2206
2207 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2208 PARALLEL.
2209
2210 Use this function is deprecated, since we need to support combined
2211 branch and compare insns. Use any_condjump_p instead whenever possible. */
2212
2213 int
2214 condjump_in_parallel_p (insn)
2215 rtx insn;
2216 {
2217 register rtx x = PATTERN (insn);
2218
2219 if (GET_CODE (x) != PARALLEL)
2220 return 0;
2221 else
2222 x = XVECEXP (x, 0, 0);
2223
2224 if (GET_CODE (x) != SET)
2225 return 0;
2226 if (GET_CODE (SET_DEST (x)) != PC)
2227 return 0;
2228 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2229 return 1;
2230 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2231 return 0;
2232 if (XEXP (SET_SRC (x), 2) == pc_rtx
2233 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2234 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2235 return 1;
2236 if (XEXP (SET_SRC (x), 1) == pc_rtx
2237 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2238 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2239 return 1;
2240 return 0;
2241 }
2242
2243 /* Return set of PC, otherwise NULL. */
2244
2245 rtx
2246 pc_set (insn)
2247 rtx insn;
2248 {
2249 rtx pat;
2250 if (GET_CODE (insn) != JUMP_INSN)
2251 return NULL_RTX;
2252 pat = PATTERN (insn);
2253
2254 /* The set is allowed to appear either as the insn pattern or
2255 the first set in a PARALLEL. */
2256 if (GET_CODE (pat) == PARALLEL)
2257 pat = XVECEXP (pat, 0, 0);
2258 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2259 return pat;
2260
2261 return NULL_RTX;
2262 }
2263
2264 /* Return true when insn is an unconditional direct jump,
2265 possibly bundled inside a PARALLEL. */
2266
2267 int
2268 any_uncondjump_p (insn)
2269 rtx insn;
2270 {
2271 rtx x = pc_set (insn);
2272 if (!x)
2273 return 0;
2274 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2275 return 0;
2276 return 1;
2277 }
2278
2279 /* Return true when insn is a conditional jump. This function works for
2280 instructions containing PC sets in PARALLELs. The instruction may have
2281 various other effects so before removing the jump you must verify
2282 onlyjump_p.
2283
2284 Note that unlike condjump_p it returns false for unconditional jumps. */
2285
2286 int
2287 any_condjump_p (insn)
2288 rtx insn;
2289 {
2290 rtx x = pc_set (insn);
2291 enum rtx_code a, b;
2292
2293 if (!x)
2294 return 0;
2295 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2296 return 0;
2297
2298 a = GET_CODE (XEXP (SET_SRC (x), 1));
2299 b = GET_CODE (XEXP (SET_SRC (x), 2));
2300
2301 return ((b == PC && (a == LABEL_REF || a == RETURN))
2302 || (a == PC && (b == LABEL_REF || b == RETURN)));
2303 }
2304
2305 /* Return the label of a conditional jump. */
2306
2307 rtx
2308 condjump_label (insn)
2309 rtx insn;
2310 {
2311 rtx x = pc_set (insn);
2312
2313 if (!x)
2314 return NULL_RTX;
2315 x = SET_SRC (x);
2316 if (GET_CODE (x) == LABEL_REF)
2317 return x;
2318 if (GET_CODE (x) != IF_THEN_ELSE)
2319 return NULL_RTX;
2320 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2321 return XEXP (x, 1);
2322 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2323 return XEXP (x, 2);
2324 return NULL_RTX;
2325 }
2326
2327 /* Return true if INSN is a (possibly conditional) return insn. */
2328
2329 static int
2330 returnjump_p_1 (loc, data)
2331 rtx *loc;
2332 void *data ATTRIBUTE_UNUSED;
2333 {
2334 rtx x = *loc;
2335 return x && GET_CODE (x) == RETURN;
2336 }
2337
2338 int
2339 returnjump_p (insn)
2340 rtx insn;
2341 {
2342 if (GET_CODE (insn) != JUMP_INSN)
2343 return 0;
2344 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2345 }
2346
2347 /* Return true if INSN is a jump that only transfers control and
2348 nothing more. */
2349
2350 int
2351 onlyjump_p (insn)
2352 rtx insn;
2353 {
2354 rtx set;
2355
2356 if (GET_CODE (insn) != JUMP_INSN)
2357 return 0;
2358
2359 set = single_set (insn);
2360 if (set == NULL)
2361 return 0;
2362 if (GET_CODE (SET_DEST (set)) != PC)
2363 return 0;
2364 if (side_effects_p (SET_SRC (set)))
2365 return 0;
2366
2367 return 1;
2368 }
2369
2370 #ifdef HAVE_cc0
2371
2372 /* Return 1 if X is an RTX that does nothing but set the condition codes
2373 and CLOBBER or USE registers.
2374 Return -1 if X does explicitly set the condition codes,
2375 but also does other things. */
2376
2377 int
2378 sets_cc0_p (x)
2379 rtx x ATTRIBUTE_UNUSED;
2380 {
2381 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2382 return 1;
2383 if (GET_CODE (x) == PARALLEL)
2384 {
2385 int i;
2386 int sets_cc0 = 0;
2387 int other_things = 0;
2388 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2389 {
2390 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2391 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2392 sets_cc0 = 1;
2393 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2394 other_things = 1;
2395 }
2396 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2397 }
2398 return 0;
2399 }
2400 #endif
2401 \f
2402 /* Follow any unconditional jump at LABEL;
2403 return the ultimate label reached by any such chain of jumps.
2404 If LABEL is not followed by a jump, return LABEL.
2405 If the chain loops or we can't find end, return LABEL,
2406 since that tells caller to avoid changing the insn.
2407
2408 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2409 a USE or CLOBBER. */
2410
2411 rtx
2412 follow_jumps (label)
2413 rtx label;
2414 {
2415 register rtx insn;
2416 register rtx next;
2417 register rtx value = label;
2418 register int depth;
2419
2420 for (depth = 0;
2421 (depth < 10
2422 && (insn = next_active_insn (value)) != 0
2423 && GET_CODE (insn) == JUMP_INSN
2424 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2425 && onlyjump_p (insn))
2426 || GET_CODE (PATTERN (insn)) == RETURN)
2427 && (next = NEXT_INSN (insn))
2428 && GET_CODE (next) == BARRIER);
2429 depth++)
2430 {
2431 /* Don't chain through the insn that jumps into a loop
2432 from outside the loop,
2433 since that would create multiple loop entry jumps
2434 and prevent loop optimization. */
2435 rtx tem;
2436 if (!reload_completed)
2437 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2438 if (GET_CODE (tem) == NOTE
2439 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2440 /* ??? Optional. Disables some optimizations, but makes
2441 gcov output more accurate with -O. */
2442 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2443 return value;
2444
2445 /* If we have found a cycle, make the insn jump to itself. */
2446 if (JUMP_LABEL (insn) == label)
2447 return label;
2448
2449 tem = next_active_insn (JUMP_LABEL (insn));
2450 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2451 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2452 break;
2453
2454 value = JUMP_LABEL (insn);
2455 }
2456 if (depth == 10)
2457 return label;
2458 return value;
2459 }
2460
2461 /* Assuming that field IDX of X is a vector of label_refs,
2462 replace each of them by the ultimate label reached by it.
2463 Return nonzero if a change is made.
2464 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2465
2466 static int
2467 tension_vector_labels (x, idx)
2468 register rtx x;
2469 register int idx;
2470 {
2471 int changed = 0;
2472 register int i;
2473 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2474 {
2475 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2476 register rtx nlabel = follow_jumps (olabel);
2477 if (nlabel && nlabel != olabel)
2478 {
2479 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2480 ++LABEL_NUSES (nlabel);
2481 if (--LABEL_NUSES (olabel) == 0)
2482 delete_insn (olabel);
2483 changed = 1;
2484 }
2485 }
2486 return changed;
2487 }
2488 \f
2489 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2490 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2491 in INSN, then store one of them in JUMP_LABEL (INSN).
2492 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2493 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2494 Also, when there are consecutive labels, canonicalize on the last of them.
2495
2496 Note that two labels separated by a loop-beginning note
2497 must be kept distinct if we have not yet done loop-optimization,
2498 because the gap between them is where loop-optimize
2499 will want to move invariant code to. CROSS_JUMP tells us
2500 that loop-optimization is done with.
2501
2502 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2503 two labels distinct if they are separated by only USE or CLOBBER insns. */
2504
2505 void
2506 mark_jump_label (x, insn, cross_jump, in_mem)
2507 register rtx x;
2508 rtx insn;
2509 int cross_jump;
2510 int in_mem;
2511 {
2512 register RTX_CODE code = GET_CODE (x);
2513 register int i;
2514 register const char *fmt;
2515
2516 switch (code)
2517 {
2518 case PC:
2519 case CC0:
2520 case REG:
2521 case SUBREG:
2522 case CONST_INT:
2523 case CONST_DOUBLE:
2524 case CLOBBER:
2525 case CALL:
2526 return;
2527
2528 case MEM:
2529 in_mem = 1;
2530 break;
2531
2532 case SYMBOL_REF:
2533 if (!in_mem)
2534 return;
2535
2536 /* If this is a constant-pool reference, see if it is a label. */
2537 if (CONSTANT_POOL_ADDRESS_P (x))
2538 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2539 break;
2540
2541 case LABEL_REF:
2542 {
2543 rtx label = XEXP (x, 0);
2544 rtx olabel = label;
2545 rtx note;
2546 rtx next;
2547
2548 /* Ignore remaining references to unreachable labels that
2549 have been deleted. */
2550 if (GET_CODE (label) == NOTE
2551 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2552 break;
2553
2554 if (GET_CODE (label) != CODE_LABEL)
2555 abort ();
2556
2557 /* Ignore references to labels of containing functions. */
2558 if (LABEL_REF_NONLOCAL_P (x))
2559 break;
2560
2561 /* If there are other labels following this one,
2562 replace it with the last of the consecutive labels. */
2563 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2564 {
2565 if (GET_CODE (next) == CODE_LABEL)
2566 label = next;
2567 else if (cross_jump && GET_CODE (next) == INSN
2568 && (GET_CODE (PATTERN (next)) == USE
2569 || GET_CODE (PATTERN (next)) == CLOBBER))
2570 continue;
2571 else if (GET_CODE (next) != NOTE)
2572 break;
2573 else if (! cross_jump
2574 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2575 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2576 /* ??? Optional. Disables some optimizations, but
2577 makes gcov output more accurate with -O. */
2578 || (flag_test_coverage
2579 && NOTE_LINE_NUMBER (next) > 0)))
2580 break;
2581 }
2582
2583 XEXP (x, 0) = label;
2584 if (! insn || ! INSN_DELETED_P (insn))
2585 ++LABEL_NUSES (label);
2586
2587 if (insn)
2588 {
2589 if (GET_CODE (insn) == JUMP_INSN)
2590 JUMP_LABEL (insn) = label;
2591
2592 /* If we've changed OLABEL and we had a REG_LABEL note
2593 for it, update it as well. */
2594 else if (label != olabel
2595 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
2596 XEXP (note, 0) = label;
2597
2598 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2599 is one. */
2600 else if (! find_reg_note (insn, REG_LABEL, label))
2601 {
2602 /* This code used to ignore labels which refered to dispatch
2603 tables to avoid flow.c generating worse code.
2604
2605 However, in the presense of global optimizations like
2606 gcse which call find_basic_blocks without calling
2607 life_analysis, not recording such labels will lead
2608 to compiler aborts because of inconsistencies in the
2609 flow graph. So we go ahead and record the label.
2610
2611 It may also be the case that the optimization argument
2612 is no longer valid because of the more accurate cfg
2613 we build in find_basic_blocks -- it no longer pessimizes
2614 code when it finds a REG_LABEL note. */
2615 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2616 REG_NOTES (insn));
2617 }
2618 }
2619 return;
2620 }
2621
2622 /* Do walk the labels in a vector, but not the first operand of an
2623 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2624 case ADDR_VEC:
2625 case ADDR_DIFF_VEC:
2626 if (! INSN_DELETED_P (insn))
2627 {
2628 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2629
2630 for (i = 0; i < XVECLEN (x, eltnum); i++)
2631 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2632 cross_jump, in_mem);
2633 }
2634 return;
2635
2636 default:
2637 break;
2638 }
2639
2640 fmt = GET_RTX_FORMAT (code);
2641 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2642 {
2643 if (fmt[i] == 'e')
2644 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2645 else if (fmt[i] == 'E')
2646 {
2647 register int j;
2648 for (j = 0; j < XVECLEN (x, i); j++)
2649 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2650 }
2651 }
2652 }
2653
2654 /* If all INSN does is set the pc, delete it,
2655 and delete the insn that set the condition codes for it
2656 if that's what the previous thing was. */
2657
2658 void
2659 delete_jump (insn)
2660 rtx insn;
2661 {
2662 register rtx set = single_set (insn);
2663
2664 if (set && GET_CODE (SET_DEST (set)) == PC)
2665 delete_computation (insn);
2666 }
2667
2668 /* Verify INSN is a BARRIER and delete it. */
2669
2670 void
2671 delete_barrier (insn)
2672 rtx insn;
2673 {
2674 if (GET_CODE (insn) != BARRIER)
2675 abort ();
2676
2677 delete_insn (insn);
2678 }
2679
2680 /* Recursively delete prior insns that compute the value (used only by INSN
2681 which the caller is deleting) stored in the register mentioned by NOTE
2682 which is a REG_DEAD note associated with INSN. */
2683
2684 static void
2685 delete_prior_computation (note, insn)
2686 rtx note;
2687 rtx insn;
2688 {
2689 rtx our_prev;
2690 rtx reg = XEXP (note, 0);
2691
2692 for (our_prev = prev_nonnote_insn (insn);
2693 our_prev && (GET_CODE (our_prev) == INSN
2694 || GET_CODE (our_prev) == CALL_INSN);
2695 our_prev = prev_nonnote_insn (our_prev))
2696 {
2697 rtx pat = PATTERN (our_prev);
2698
2699 /* If we reach a CALL which is not calling a const function
2700 or the callee pops the arguments, then give up. */
2701 if (GET_CODE (our_prev) == CALL_INSN
2702 && (! CONST_CALL_P (our_prev)
2703 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2704 break;
2705
2706 /* If we reach a SEQUENCE, it is too complex to try to
2707 do anything with it, so give up. */
2708 if (GET_CODE (pat) == SEQUENCE)
2709 break;
2710
2711 if (GET_CODE (pat) == USE
2712 && GET_CODE (XEXP (pat, 0)) == INSN)
2713 /* reorg creates USEs that look like this. We leave them
2714 alone because reorg needs them for its own purposes. */
2715 break;
2716
2717 if (reg_set_p (reg, pat))
2718 {
2719 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2720 break;
2721
2722 if (GET_CODE (pat) == PARALLEL)
2723 {
2724 /* If we find a SET of something else, we can't
2725 delete the insn. */
2726
2727 int i;
2728
2729 for (i = 0; i < XVECLEN (pat, 0); i++)
2730 {
2731 rtx part = XVECEXP (pat, 0, i);
2732
2733 if (GET_CODE (part) == SET
2734 && SET_DEST (part) != reg)
2735 break;
2736 }
2737
2738 if (i == XVECLEN (pat, 0))
2739 delete_computation (our_prev);
2740 }
2741 else if (GET_CODE (pat) == SET
2742 && GET_CODE (SET_DEST (pat)) == REG)
2743 {
2744 int dest_regno = REGNO (SET_DEST (pat));
2745 int dest_endregno
2746 = (dest_regno
2747 + (dest_regno < FIRST_PSEUDO_REGISTER
2748 ? HARD_REGNO_NREGS (dest_regno,
2749 GET_MODE (SET_DEST (pat))) : 1));
2750 int regno = REGNO (reg);
2751 int endregno
2752 = (regno
2753 + (regno < FIRST_PSEUDO_REGISTER
2754 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2755
2756 if (dest_regno >= regno
2757 && dest_endregno <= endregno)
2758 delete_computation (our_prev);
2759
2760 /* We may have a multi-word hard register and some, but not
2761 all, of the words of the register are needed in subsequent
2762 insns. Write REG_UNUSED notes for those parts that were not
2763 needed. */
2764 else if (dest_regno <= regno
2765 && dest_endregno >= endregno)
2766 {
2767 int i;
2768
2769 REG_NOTES (our_prev)
2770 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2771 REG_NOTES (our_prev));
2772
2773 for (i = dest_regno; i < dest_endregno; i++)
2774 if (! find_regno_note (our_prev, REG_UNUSED, i))
2775 break;
2776
2777 if (i == dest_endregno)
2778 delete_computation (our_prev);
2779 }
2780 }
2781
2782 break;
2783 }
2784
2785 /* If PAT references the register that dies here, it is an
2786 additional use. Hence any prior SET isn't dead. However, this
2787 insn becomes the new place for the REG_DEAD note. */
2788 if (reg_overlap_mentioned_p (reg, pat))
2789 {
2790 XEXP (note, 1) = REG_NOTES (our_prev);
2791 REG_NOTES (our_prev) = note;
2792 break;
2793 }
2794 }
2795 }
2796
2797 /* Delete INSN and recursively delete insns that compute values used only
2798 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2799 If we are running before flow.c, we need do nothing since flow.c will
2800 delete dead code. We also can't know if the registers being used are
2801 dead or not at this point.
2802
2803 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2804 nothing other than set a register that dies in this insn, we can delete
2805 that insn as well.
2806
2807 On machines with CC0, if CC0 is used in this insn, we may be able to
2808 delete the insn that set it. */
2809
2810 static void
2811 delete_computation (insn)
2812 rtx insn;
2813 {
2814 rtx note, next;
2815
2816 #ifdef HAVE_cc0
2817 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2818 {
2819 rtx prev = prev_nonnote_insn (insn);
2820 /* We assume that at this stage
2821 CC's are always set explicitly
2822 and always immediately before the jump that
2823 will use them. So if the previous insn
2824 exists to set the CC's, delete it
2825 (unless it performs auto-increments, etc.). */
2826 if (prev && GET_CODE (prev) == INSN
2827 && sets_cc0_p (PATTERN (prev)))
2828 {
2829 if (sets_cc0_p (PATTERN (prev)) > 0
2830 && ! side_effects_p (PATTERN (prev)))
2831 delete_computation (prev);
2832 else
2833 /* Otherwise, show that cc0 won't be used. */
2834 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2835 cc0_rtx, REG_NOTES (prev));
2836 }
2837 }
2838 #endif
2839
2840 for (note = REG_NOTES (insn); note; note = next)
2841 {
2842 next = XEXP (note, 1);
2843
2844 if (REG_NOTE_KIND (note) != REG_DEAD
2845 /* Verify that the REG_NOTE is legitimate. */
2846 || GET_CODE (XEXP (note, 0)) != REG)
2847 continue;
2848
2849 delete_prior_computation (note, insn);
2850 }
2851
2852 delete_insn (insn);
2853 }
2854 \f
2855 /* Delete insn INSN from the chain of insns and update label ref counts.
2856 May delete some following insns as a consequence; may even delete
2857 a label elsewhere and insns that follow it.
2858
2859 Returns the first insn after INSN that was not deleted. */
2860
2861 rtx
2862 delete_insn (insn)
2863 register rtx insn;
2864 {
2865 register rtx next = NEXT_INSN (insn);
2866 register rtx prev = PREV_INSN (insn);
2867 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2868 register int dont_really_delete = 0;
2869 rtx note;
2870
2871 while (next && INSN_DELETED_P (next))
2872 next = NEXT_INSN (next);
2873
2874 /* This insn is already deleted => return first following nondeleted. */
2875 if (INSN_DELETED_P (insn))
2876 return next;
2877
2878 if (was_code_label)
2879 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2880
2881 /* Don't delete user-declared labels. When optimizing, convert them
2882 to special NOTEs instead. When not optimizing, leave them alone. */
2883 if (was_code_label && LABEL_NAME (insn) != 0)
2884 {
2885 if (! optimize)
2886 dont_really_delete = 1;
2887 else if (! dont_really_delete)
2888 {
2889 const char *name = LABEL_NAME (insn);
2890 PUT_CODE (insn, NOTE);
2891 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2892 NOTE_SOURCE_FILE (insn) = name;
2893 dont_really_delete = 1;
2894 }
2895 }
2896 else
2897 /* Mark this insn as deleted. */
2898 INSN_DELETED_P (insn) = 1;
2899
2900 /* If this is an unconditional jump, delete it from the jump chain. */
2901 if (simplejump_p (insn))
2902 delete_from_jump_chain (insn);
2903
2904 /* If instruction is followed by a barrier,
2905 delete the barrier too. */
2906
2907 if (next != 0 && GET_CODE (next) == BARRIER)
2908 {
2909 INSN_DELETED_P (next) = 1;
2910 next = NEXT_INSN (next);
2911 }
2912
2913 /* Patch out INSN (and the barrier if any) */
2914
2915 if (! dont_really_delete)
2916 {
2917 if (prev)
2918 {
2919 NEXT_INSN (prev) = next;
2920 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2921 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2922 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2923 }
2924
2925 if (next)
2926 {
2927 PREV_INSN (next) = prev;
2928 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2929 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2930 }
2931
2932 if (prev && NEXT_INSN (prev) == 0)
2933 set_last_insn (prev);
2934 }
2935
2936 /* If deleting a jump, decrement the count of the label,
2937 and delete the label if it is now unused. */
2938
2939 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2940 {
2941 rtx lab = JUMP_LABEL (insn), lab_next;
2942
2943 if (--LABEL_NUSES (lab) == 0)
2944 {
2945 /* This can delete NEXT or PREV,
2946 either directly if NEXT is JUMP_LABEL (INSN),
2947 or indirectly through more levels of jumps. */
2948 delete_insn (lab);
2949
2950 /* I feel a little doubtful about this loop,
2951 but I see no clean and sure alternative way
2952 to find the first insn after INSN that is not now deleted.
2953 I hope this works. */
2954 while (next && INSN_DELETED_P (next))
2955 next = NEXT_INSN (next);
2956 return next;
2957 }
2958 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2959 && GET_CODE (lab_next) == JUMP_INSN
2960 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2961 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2962 {
2963 /* If we're deleting the tablejump, delete the dispatch table.
2964 We may not be able to kill the label immediately preceeding
2965 just yet, as it might be referenced in code leading up to
2966 the tablejump. */
2967 delete_insn (lab_next);
2968 }
2969 }
2970
2971 /* Likewise if we're deleting a dispatch table. */
2972
2973 if (GET_CODE (insn) == JUMP_INSN
2974 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2975 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2976 {
2977 rtx pat = PATTERN (insn);
2978 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2979 int len = XVECLEN (pat, diff_vec_p);
2980
2981 for (i = 0; i < len; i++)
2982 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2983 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2984 while (next && INSN_DELETED_P (next))
2985 next = NEXT_INSN (next);
2986 return next;
2987 }
2988
2989 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
2990 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2991 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2992 if (REG_NOTE_KIND (note) == REG_LABEL
2993 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
2994 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2995 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
2996 delete_insn (XEXP (note, 0));
2997
2998 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
2999 prev = PREV_INSN (prev);
3000
3001 /* If INSN was a label and a dispatch table follows it,
3002 delete the dispatch table. The tablejump must have gone already.
3003 It isn't useful to fall through into a table. */
3004
3005 if (was_code_label
3006 && NEXT_INSN (insn) != 0
3007 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3008 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3009 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3010 next = delete_insn (NEXT_INSN (insn));
3011
3012 /* If INSN was a label, delete insns following it if now unreachable. */
3013
3014 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3015 {
3016 register RTX_CODE code;
3017 while (next != 0
3018 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
3019 || code == NOTE || code == BARRIER
3020 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3021 {
3022 if (code == NOTE
3023 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3024 next = NEXT_INSN (next);
3025 /* Keep going past other deleted labels to delete what follows. */
3026 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3027 next = NEXT_INSN (next);
3028 else
3029 /* Note: if this deletes a jump, it can cause more
3030 deletion of unreachable code, after a different label.
3031 As long as the value from this recursive call is correct,
3032 this invocation functions correctly. */
3033 next = delete_insn (next);
3034 }
3035 }
3036
3037 return next;
3038 }
3039
3040 /* Advance from INSN till reaching something not deleted
3041 then return that. May return INSN itself. */
3042
3043 rtx
3044 next_nondeleted_insn (insn)
3045 rtx insn;
3046 {
3047 while (INSN_DELETED_P (insn))
3048 insn = NEXT_INSN (insn);
3049 return insn;
3050 }
3051 \f
3052 /* Delete a range of insns from FROM to TO, inclusive.
3053 This is for the sake of peephole optimization, so assume
3054 that whatever these insns do will still be done by a new
3055 peephole insn that will replace them. */
3056
3057 void
3058 delete_for_peephole (from, to)
3059 register rtx from, to;
3060 {
3061 register rtx insn = from;
3062
3063 while (1)
3064 {
3065 register rtx next = NEXT_INSN (insn);
3066 register rtx prev = PREV_INSN (insn);
3067
3068 if (GET_CODE (insn) != NOTE)
3069 {
3070 INSN_DELETED_P (insn) = 1;
3071
3072 /* Patch this insn out of the chain. */
3073 /* We don't do this all at once, because we
3074 must preserve all NOTEs. */
3075 if (prev)
3076 NEXT_INSN (prev) = next;
3077
3078 if (next)
3079 PREV_INSN (next) = prev;
3080 }
3081
3082 if (insn == to)
3083 break;
3084 insn = next;
3085 }
3086
3087 /* Note that if TO is an unconditional jump
3088 we *do not* delete the BARRIER that follows,
3089 since the peephole that replaces this sequence
3090 is also an unconditional jump in that case. */
3091 }
3092 \f
3093 /* We have determined that INSN is never reached, and are about to
3094 delete it. Print a warning if the user asked for one.
3095
3096 To try to make this warning more useful, this should only be called
3097 once per basic block not reached, and it only warns when the basic
3098 block contains more than one line from the current function, and
3099 contains at least one operation. CSE and inlining can duplicate insns,
3100 so it's possible to get spurious warnings from this. */
3101
3102 void
3103 never_reached_warning (avoided_insn)
3104 rtx avoided_insn;
3105 {
3106 rtx insn;
3107 rtx a_line_note = NULL;
3108 int two_avoided_lines = 0;
3109 int contains_insn = 0;
3110
3111 if (! warn_notreached)
3112 return;
3113
3114 /* Scan forwards, looking at LINE_NUMBER notes, until
3115 we hit a LABEL or we run out of insns. */
3116
3117 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3118 {
3119 if (GET_CODE (insn) == CODE_LABEL)
3120 break;
3121 else if (GET_CODE (insn) == NOTE /* A line number note? */
3122 && NOTE_LINE_NUMBER (insn) >= 0)
3123 {
3124 if (a_line_note == NULL)
3125 a_line_note = insn;
3126 else
3127 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3128 != NOTE_LINE_NUMBER (insn));
3129 }
3130 else if (INSN_P (insn))
3131 contains_insn = 1;
3132 }
3133 if (two_avoided_lines && contains_insn)
3134 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3135 NOTE_LINE_NUMBER (a_line_note),
3136 "will never be executed");
3137 }
3138 \f
3139 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3140 NLABEL as a return. Accrue modifications into the change group. */
3141
3142 static void
3143 redirect_exp_1 (loc, olabel, nlabel, insn)
3144 rtx *loc;
3145 rtx olabel, nlabel;
3146 rtx insn;
3147 {
3148 register rtx x = *loc;
3149 register RTX_CODE code = GET_CODE (x);
3150 register int i;
3151 register const char *fmt;
3152
3153 if (code == LABEL_REF)
3154 {
3155 if (XEXP (x, 0) == olabel)
3156 {
3157 rtx n;
3158 if (nlabel)
3159 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3160 else
3161 n = gen_rtx_RETURN (VOIDmode);
3162
3163 validate_change (insn, loc, n, 1);
3164 return;
3165 }
3166 }
3167 else if (code == RETURN && olabel == 0)
3168 {
3169 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3170 if (loc == &PATTERN (insn))
3171 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3172 validate_change (insn, loc, x, 1);
3173 return;
3174 }
3175
3176 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3177 && GET_CODE (SET_SRC (x)) == LABEL_REF
3178 && XEXP (SET_SRC (x), 0) == olabel)
3179 {
3180 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3181 return;
3182 }
3183
3184 fmt = GET_RTX_FORMAT (code);
3185 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3186 {
3187 if (fmt[i] == 'e')
3188 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3189 else if (fmt[i] == 'E')
3190 {
3191 register int j;
3192 for (j = 0; j < XVECLEN (x, i); j++)
3193 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3194 }
3195 }
3196 }
3197
3198 /* Similar, but apply the change group and report success or failure. */
3199
3200 static int
3201 redirect_exp (olabel, nlabel, insn)
3202 rtx olabel, nlabel;
3203 rtx insn;
3204 {
3205 rtx *loc;
3206
3207 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3208 loc = &XVECEXP (PATTERN (insn), 0, 0);
3209 else
3210 loc = &PATTERN (insn);
3211
3212 redirect_exp_1 (loc, olabel, nlabel, insn);
3213 if (num_validated_changes () == 0)
3214 return 0;
3215
3216 return apply_change_group ();
3217 }
3218
3219 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3220 the modifications into the change group. Return false if we did
3221 not see how to do that. */
3222
3223 int
3224 redirect_jump_1 (jump, nlabel)
3225 rtx jump, nlabel;
3226 {
3227 int ochanges = num_validated_changes ();
3228 rtx *loc;
3229
3230 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3231 loc = &XVECEXP (PATTERN (jump), 0, 0);
3232 else
3233 loc = &PATTERN (jump);
3234
3235 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3236 return num_validated_changes () > ochanges;
3237 }
3238
3239 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3240 jump target label is unused as a result, it and the code following
3241 it may be deleted.
3242
3243 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3244 RETURN insn.
3245
3246 The return value will be 1 if the change was made, 0 if it wasn't
3247 (this can only occur for NLABEL == 0). */
3248
3249 int
3250 redirect_jump (jump, nlabel, delete_unused)
3251 rtx jump, nlabel;
3252 int delete_unused;
3253 {
3254 register rtx olabel = JUMP_LABEL (jump);
3255
3256 if (nlabel == olabel)
3257 return 1;
3258
3259 if (! redirect_exp (olabel, nlabel, jump))
3260 return 0;
3261
3262 /* If this is an unconditional branch, delete it from the jump_chain of
3263 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3264 have UID's in range and JUMP_CHAIN is valid). */
3265 if (jump_chain && (simplejump_p (jump)
3266 || GET_CODE (PATTERN (jump)) == RETURN))
3267 {
3268 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3269
3270 delete_from_jump_chain (jump);
3271 if (label_index < max_jump_chain
3272 && INSN_UID (jump) < max_jump_chain)
3273 {
3274 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3275 jump_chain[label_index] = jump;
3276 }
3277 }
3278
3279 JUMP_LABEL (jump) = nlabel;
3280 if (nlabel)
3281 ++LABEL_NUSES (nlabel);
3282
3283 /* If we're eliding the jump over exception cleanups at the end of a
3284 function, move the function end note so that -Wreturn-type works. */
3285 if (olabel && nlabel
3286 && NEXT_INSN (olabel)
3287 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3288 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3289 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3290
3291 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3292 delete_insn (olabel);
3293
3294 return 1;
3295 }
3296
3297 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3298 Accrue the modifications into the change group. */
3299
3300 static void
3301 invert_exp_1 (insn)
3302 rtx insn;
3303 {
3304 register RTX_CODE code;
3305 rtx x = pc_set (insn);
3306
3307 if (!x)
3308 abort ();
3309 x = SET_SRC (x);
3310
3311 code = GET_CODE (x);
3312
3313 if (code == IF_THEN_ELSE)
3314 {
3315 register rtx comp = XEXP (x, 0);
3316 register rtx tem;
3317 enum rtx_code reversed_code;
3318
3319 /* We can do this in two ways: The preferable way, which can only
3320 be done if this is not an integer comparison, is to reverse
3321 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3322 of the IF_THEN_ELSE. If we can't do either, fail. */
3323
3324 reversed_code = reversed_comparison_code (comp, insn);
3325
3326 if (reversed_code != UNKNOWN)
3327 {
3328 validate_change (insn, &XEXP (x, 0),
3329 gen_rtx_fmt_ee (reversed_code,
3330 GET_MODE (comp), XEXP (comp, 0),
3331 XEXP (comp, 1)),
3332 1);
3333 return;
3334 }
3335
3336 tem = XEXP (x, 1);
3337 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3338 validate_change (insn, &XEXP (x, 2), tem, 1);
3339 }
3340 else
3341 abort ();
3342 }
3343
3344 /* Invert the jump condition of conditional jump insn, INSN.
3345
3346 Return 1 if we can do so, 0 if we cannot find a way to do so that
3347 matches a pattern. */
3348
3349 static int
3350 invert_exp (insn)
3351 rtx insn;
3352 {
3353 invert_exp_1 (insn);
3354 if (num_validated_changes () == 0)
3355 return 0;
3356
3357 return apply_change_group ();
3358 }
3359
3360 /* Invert the condition of the jump JUMP, and make it jump to label
3361 NLABEL instead of where it jumps now. Accrue changes into the
3362 change group. Return false if we didn't see how to perform the
3363 inversion and redirection. */
3364
3365 int
3366 invert_jump_1 (jump, nlabel)
3367 rtx jump, nlabel;
3368 {
3369 int ochanges;
3370
3371 ochanges = num_validated_changes ();
3372 invert_exp_1 (jump);
3373 if (num_validated_changes () == ochanges)
3374 return 0;
3375
3376 return redirect_jump_1 (jump, nlabel);
3377 }
3378
3379 /* Invert the condition of the jump JUMP, and make it jump to label
3380 NLABEL instead of where it jumps now. Return true if successful. */
3381
3382 int
3383 invert_jump (jump, nlabel, delete_unused)
3384 rtx jump, nlabel;
3385 int delete_unused;
3386 {
3387 /* We have to either invert the condition and change the label or
3388 do neither. Either operation could fail. We first try to invert
3389 the jump. If that succeeds, we try changing the label. If that fails,
3390 we invert the jump back to what it was. */
3391
3392 if (! invert_exp (jump))
3393 return 0;
3394
3395 if (redirect_jump (jump, nlabel, delete_unused))
3396 {
3397 /* An inverted jump means that a probability taken becomes a
3398 probability not taken. Subtract the branch probability from the
3399 probability base to convert it back to a taken probability. */
3400
3401 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3402 if (note)
3403 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3404
3405 return 1;
3406 }
3407
3408 if (! invert_exp (jump))
3409 /* This should just be putting it back the way it was. */
3410 abort ();
3411
3412 return 0;
3413 }
3414
3415 /* Delete the instruction JUMP from any jump chain it might be on. */
3416
3417 static void
3418 delete_from_jump_chain (jump)
3419 rtx jump;
3420 {
3421 int index;
3422 rtx olabel = JUMP_LABEL (jump);
3423
3424 /* Handle unconditional jumps. */
3425 if (jump_chain && olabel != 0
3426 && INSN_UID (olabel) < max_jump_chain
3427 && simplejump_p (jump))
3428 index = INSN_UID (olabel);
3429 /* Handle return insns. */
3430 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3431 index = 0;
3432 else
3433 return;
3434
3435 if (jump_chain[index] == jump)
3436 jump_chain[index] = jump_chain[INSN_UID (jump)];
3437 else
3438 {
3439 rtx insn;
3440
3441 for (insn = jump_chain[index];
3442 insn != 0;
3443 insn = jump_chain[INSN_UID (insn)])
3444 if (jump_chain[INSN_UID (insn)] == jump)
3445 {
3446 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3447 break;
3448 }
3449 }
3450 }
3451 \f
3452 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3453
3454 If the old jump target label (before the dispatch table) becomes unused,
3455 it and the dispatch table may be deleted. In that case, find the insn
3456 before the jump references that label and delete it and logical successors
3457 too. */
3458
3459 static void
3460 redirect_tablejump (jump, nlabel)
3461 rtx jump, nlabel;
3462 {
3463 register rtx olabel = JUMP_LABEL (jump);
3464 rtx *notep, note, next;
3465
3466 /* Add this jump to the jump_chain of NLABEL. */
3467 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3468 && INSN_UID (jump) < max_jump_chain)
3469 {
3470 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3471 jump_chain[INSN_UID (nlabel)] = jump;
3472 }
3473
3474 for (notep = &REG_NOTES (jump), note = *notep; note; note = next)
3475 {
3476 next = XEXP (note, 1);
3477
3478 if (REG_NOTE_KIND (note) != REG_DEAD
3479 /* Verify that the REG_NOTE is legitimate. */
3480 || GET_CODE (XEXP (note, 0)) != REG
3481 || ! reg_mentioned_p (XEXP (note, 0), PATTERN (jump)))
3482 notep = &XEXP (note, 1);
3483 else
3484 {
3485 delete_prior_computation (note, jump);
3486 *notep = next;
3487 }
3488 }
3489
3490 PATTERN (jump) = gen_jump (nlabel);
3491 JUMP_LABEL (jump) = nlabel;
3492 ++LABEL_NUSES (nlabel);
3493 INSN_CODE (jump) = -1;
3494
3495 if (--LABEL_NUSES (olabel) == 0)
3496 {
3497 delete_labelref_insn (jump, olabel, 0);
3498 delete_insn (olabel);
3499 }
3500 }
3501
3502 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3503 If we found one, delete it and then delete this insn if DELETE_THIS is
3504 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3505
3506 static int
3507 delete_labelref_insn (insn, label, delete_this)
3508 rtx insn, label;
3509 int delete_this;
3510 {
3511 int deleted = 0;
3512 rtx link;
3513
3514 if (GET_CODE (insn) != NOTE
3515 && reg_mentioned_p (label, PATTERN (insn)))
3516 {
3517 if (delete_this)
3518 {
3519 delete_insn (insn);
3520 deleted = 1;
3521 }
3522 else
3523 return 1;
3524 }
3525
3526 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3527 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3528 {
3529 if (delete_this)
3530 {
3531 delete_insn (insn);
3532 deleted = 1;
3533 }
3534 else
3535 return 1;
3536 }
3537
3538 return deleted;
3539 }
3540 \f
3541 /* Like rtx_equal_p except that it considers two REGs as equal
3542 if they renumber to the same value and considers two commutative
3543 operations to be the same if the order of the operands has been
3544 reversed.
3545
3546 ??? Addition is not commutative on the PA due to the weird implicit
3547 space register selection rules for memory addresses. Therefore, we
3548 don't consider a + b == b + a.
3549
3550 We could/should make this test a little tighter. Possibly only
3551 disabling it on the PA via some backend macro or only disabling this
3552 case when the PLUS is inside a MEM. */
3553
3554 int
3555 rtx_renumbered_equal_p (x, y)
3556 rtx x, y;
3557 {
3558 register int i;
3559 register RTX_CODE code = GET_CODE (x);
3560 register const char *fmt;
3561
3562 if (x == y)
3563 return 1;
3564
3565 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3566 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3567 && GET_CODE (SUBREG_REG (y)) == REG)))
3568 {
3569 int reg_x = -1, reg_y = -1;
3570 int word_x = 0, word_y = 0;
3571
3572 if (GET_MODE (x) != GET_MODE (y))
3573 return 0;
3574
3575 /* If we haven't done any renumbering, don't
3576 make any assumptions. */
3577 if (reg_renumber == 0)
3578 return rtx_equal_p (x, y);
3579
3580 if (code == SUBREG)
3581 {
3582 reg_x = REGNO (SUBREG_REG (x));
3583 word_x = SUBREG_WORD (x);
3584
3585 if (reg_renumber[reg_x] >= 0)
3586 {
3587 reg_x = reg_renumber[reg_x] + word_x;
3588 word_x = 0;
3589 }
3590 }
3591
3592 else
3593 {
3594 reg_x = REGNO (x);
3595 if (reg_renumber[reg_x] >= 0)
3596 reg_x = reg_renumber[reg_x];
3597 }
3598
3599 if (GET_CODE (y) == SUBREG)
3600 {
3601 reg_y = REGNO (SUBREG_REG (y));
3602 word_y = SUBREG_WORD (y);
3603
3604 if (reg_renumber[reg_y] >= 0)
3605 {
3606 reg_y = reg_renumber[reg_y];
3607 word_y = 0;
3608 }
3609 }
3610
3611 else
3612 {
3613 reg_y = REGNO (y);
3614 if (reg_renumber[reg_y] >= 0)
3615 reg_y = reg_renumber[reg_y];
3616 }
3617
3618 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
3619 }
3620
3621 /* Now we have disposed of all the cases
3622 in which different rtx codes can match. */
3623 if (code != GET_CODE (y))
3624 return 0;
3625
3626 switch (code)
3627 {
3628 case PC:
3629 case CC0:
3630 case ADDR_VEC:
3631 case ADDR_DIFF_VEC:
3632 return 0;
3633
3634 case CONST_INT:
3635 return INTVAL (x) == INTVAL (y);
3636
3637 case LABEL_REF:
3638 /* We can't assume nonlocal labels have their following insns yet. */
3639 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3640 return XEXP (x, 0) == XEXP (y, 0);
3641
3642 /* Two label-refs are equivalent if they point at labels
3643 in the same position in the instruction stream. */
3644 return (next_real_insn (XEXP (x, 0))
3645 == next_real_insn (XEXP (y, 0)));
3646
3647 case SYMBOL_REF:
3648 return XSTR (x, 0) == XSTR (y, 0);
3649
3650 case CODE_LABEL:
3651 /* If we didn't match EQ equality above, they aren't the same. */
3652 return 0;
3653
3654 default:
3655 break;
3656 }
3657
3658 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3659
3660 if (GET_MODE (x) != GET_MODE (y))
3661 return 0;
3662
3663 /* For commutative operations, the RTX match if the operand match in any
3664 order. Also handle the simple binary and unary cases without a loop.
3665
3666 ??? Don't consider PLUS a commutative operator; see comments above. */
3667 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3668 && code != PLUS)
3669 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3670 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3671 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3672 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3673 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3674 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3675 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3676 else if (GET_RTX_CLASS (code) == '1')
3677 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3678
3679 /* Compare the elements. If any pair of corresponding elements
3680 fail to match, return 0 for the whole things. */
3681
3682 fmt = GET_RTX_FORMAT (code);
3683 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3684 {
3685 register int j;
3686 switch (fmt[i])
3687 {
3688 case 'w':
3689 if (XWINT (x, i) != XWINT (y, i))
3690 return 0;
3691 break;
3692
3693 case 'i':
3694 if (XINT (x, i) != XINT (y, i))
3695 return 0;
3696 break;
3697
3698 case 's':
3699 if (strcmp (XSTR (x, i), XSTR (y, i)))
3700 return 0;
3701 break;
3702
3703 case 'e':
3704 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3705 return 0;
3706 break;
3707
3708 case 'u':
3709 if (XEXP (x, i) != XEXP (y, i))
3710 return 0;
3711 /* fall through. */
3712 case '0':
3713 break;
3714
3715 case 'E':
3716 if (XVECLEN (x, i) != XVECLEN (y, i))
3717 return 0;
3718 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3719 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3720 return 0;
3721 break;
3722
3723 default:
3724 abort ();
3725 }
3726 }
3727 return 1;
3728 }
3729 \f
3730 /* If X is a hard register or equivalent to one or a subregister of one,
3731 return the hard register number. If X is a pseudo register that was not
3732 assigned a hard register, return the pseudo register number. Otherwise,
3733 return -1. Any rtx is valid for X. */
3734
3735 int
3736 true_regnum (x)
3737 rtx x;
3738 {
3739 if (GET_CODE (x) == REG)
3740 {
3741 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3742 return reg_renumber[REGNO (x)];
3743 return REGNO (x);
3744 }
3745 if (GET_CODE (x) == SUBREG)
3746 {
3747 int base = true_regnum (SUBREG_REG (x));
3748 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3749 return SUBREG_WORD (x) + base;
3750 }
3751 return -1;
3752 }
3753 \f
3754 /* Optimize code of the form:
3755
3756 for (x = a[i]; x; ...)
3757 ...
3758 for (x = a[i]; x; ...)
3759 ...
3760 foo:
3761
3762 Loop optimize will change the above code into
3763
3764 if (x = a[i])
3765 for (;;)
3766 { ...; if (! (x = ...)) break; }
3767 if (x = a[i])
3768 for (;;)
3769 { ...; if (! (x = ...)) break; }
3770 foo:
3771
3772 In general, if the first test fails, the program can branch
3773 directly to `foo' and skip the second try which is doomed to fail.
3774 We run this after loop optimization and before flow analysis. */
3775
3776 /* When comparing the insn patterns, we track the fact that different
3777 pseudo-register numbers may have been used in each computation.
3778 The following array stores an equivalence -- same_regs[I] == J means
3779 that pseudo register I was used in the first set of tests in a context
3780 where J was used in the second set. We also count the number of such
3781 pending equivalences. If nonzero, the expressions really aren't the
3782 same. */
3783
3784 static int *same_regs;
3785
3786 static int num_same_regs;
3787
3788 /* Track any registers modified between the target of the first jump and
3789 the second jump. They never compare equal. */
3790
3791 static char *modified_regs;
3792
3793 /* Record if memory was modified. */
3794
3795 static int modified_mem;
3796
3797 /* Called via note_stores on each insn between the target of the first
3798 branch and the second branch. It marks any changed registers. */
3799
3800 static void
3801 mark_modified_reg (dest, x, data)
3802 rtx dest;
3803 rtx x ATTRIBUTE_UNUSED;
3804 void *data ATTRIBUTE_UNUSED;
3805 {
3806 int regno;
3807 unsigned int i;
3808
3809 if (GET_CODE (dest) == SUBREG)
3810 dest = SUBREG_REG (dest);
3811
3812 if (GET_CODE (dest) == MEM)
3813 modified_mem = 1;
3814
3815 if (GET_CODE (dest) != REG)
3816 return;
3817
3818 regno = REGNO (dest);
3819 if (regno >= FIRST_PSEUDO_REGISTER)
3820 modified_regs[regno] = 1;
3821 else
3822 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3823 modified_regs[regno + i] = 1;
3824 }
3825
3826 /* F is the first insn in the chain of insns. */
3827
3828 void
3829 thread_jumps (f, max_reg, flag_before_loop)
3830 rtx f;
3831 int max_reg;
3832 int flag_before_loop;
3833 {
3834 /* Basic algorithm is to find a conditional branch,
3835 the label it may branch to, and the branch after
3836 that label. If the two branches test the same condition,
3837 walk back from both branch paths until the insn patterns
3838 differ, or code labels are hit. If we make it back to
3839 the target of the first branch, then we know that the first branch
3840 will either always succeed or always fail depending on the relative
3841 senses of the two branches. So adjust the first branch accordingly
3842 in this case. */
3843
3844 rtx label, b1, b2, t1, t2;
3845 enum rtx_code code1, code2;
3846 rtx b1op0, b1op1, b2op0, b2op1;
3847 int changed = 1;
3848 int i;
3849 int *all_reset;
3850 enum rtx_code reversed_code1, reversed_code2;
3851
3852 /* Allocate register tables and quick-reset table. */
3853 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3854 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3855 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3856 for (i = 0; i < max_reg; i++)
3857 all_reset[i] = -1;
3858
3859 while (changed)
3860 {
3861 changed = 0;
3862
3863 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3864 {
3865 rtx set;
3866 rtx set2;
3867
3868 /* Get to a candidate branch insn. */
3869 if (GET_CODE (b1) != JUMP_INSN
3870 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3871 continue;
3872
3873 memset (modified_regs, 0, max_reg * sizeof (char));
3874 modified_mem = 0;
3875
3876 memcpy (same_regs, all_reset, max_reg * sizeof (int));
3877 num_same_regs = 0;
3878
3879 label = JUMP_LABEL (b1);
3880
3881 /* Look for a branch after the target. Record any registers and
3882 memory modified between the target and the branch. Stop when we
3883 get to a label since we can't know what was changed there. */
3884 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3885 {
3886 if (GET_CODE (b2) == CODE_LABEL)
3887 break;
3888
3889 else if (GET_CODE (b2) == JUMP_INSN)
3890 {
3891 /* If this is an unconditional jump and is the only use of
3892 its target label, we can follow it. */
3893 if (any_uncondjump_p (b2)
3894 && onlyjump_p (b2)
3895 && JUMP_LABEL (b2) != 0
3896 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3897 {
3898 b2 = JUMP_LABEL (b2);
3899 continue;
3900 }
3901 else
3902 break;
3903 }
3904
3905 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3906 continue;
3907
3908 if (GET_CODE (b2) == CALL_INSN)
3909 {
3910 modified_mem = 1;
3911 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3912 if (call_used_regs[i] && ! fixed_regs[i]
3913 && i != STACK_POINTER_REGNUM
3914 && i != FRAME_POINTER_REGNUM
3915 && i != HARD_FRAME_POINTER_REGNUM
3916 && i != ARG_POINTER_REGNUM)
3917 modified_regs[i] = 1;
3918 }
3919
3920 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3921 }
3922
3923 /* Check the next candidate branch insn from the label
3924 of the first. */
3925 if (b2 == 0
3926 || GET_CODE (b2) != JUMP_INSN
3927 || b2 == b1
3928 || !any_condjump_p (b2)
3929 || !onlyjump_p (b2))
3930 continue;
3931 set = pc_set (b1);
3932 set2 = pc_set (b2);
3933
3934 /* Get the comparison codes and operands, reversing the
3935 codes if appropriate. If we don't have comparison codes,
3936 we can't do anything. */
3937 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3938 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3939 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3940 reversed_code1 = code1;
3941 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3942 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3943 else
3944 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3945
3946 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3947 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3948 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3949 reversed_code2 = code2;
3950 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3951 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3952 else
3953 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3954
3955 /* If they test the same things and knowing that B1 branches
3956 tells us whether or not B2 branches, check if we
3957 can thread the branch. */
3958 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3959 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3960 && (comparison_dominates_p (code1, code2)
3961 || comparison_dominates_p (code1, reversed_code2)))
3962
3963 {
3964 t1 = prev_nonnote_insn (b1);
3965 t2 = prev_nonnote_insn (b2);
3966
3967 while (t1 != 0 && t2 != 0)
3968 {
3969 if (t2 == label)
3970 {
3971 /* We have reached the target of the first branch.
3972 If there are no pending register equivalents,
3973 we know that this branch will either always
3974 succeed (if the senses of the two branches are
3975 the same) or always fail (if not). */
3976 rtx new_label;
3977
3978 if (num_same_regs != 0)
3979 break;
3980
3981 if (comparison_dominates_p (code1, code2))
3982 new_label = JUMP_LABEL (b2);
3983 else
3984 new_label = get_label_after (b2);
3985
3986 if (JUMP_LABEL (b1) != new_label)
3987 {
3988 rtx prev = PREV_INSN (new_label);
3989
3990 if (flag_before_loop
3991 && GET_CODE (prev) == NOTE
3992 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3993 {
3994 /* Don't thread to the loop label. If a loop
3995 label is reused, loop optimization will
3996 be disabled for that loop. */
3997 new_label = gen_label_rtx ();
3998 emit_label_after (new_label, PREV_INSN (prev));
3999 }
4000 changed |= redirect_jump (b1, new_label, 1);
4001 }
4002 break;
4003 }
4004
4005 /* If either of these is not a normal insn (it might be
4006 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4007 have already been skipped above.) Similarly, fail
4008 if the insns are different. */
4009 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4010 || recog_memoized (t1) != recog_memoized (t2)
4011 || ! rtx_equal_for_thread_p (PATTERN (t1),
4012 PATTERN (t2), t2))
4013 break;
4014
4015 t1 = prev_nonnote_insn (t1);
4016 t2 = prev_nonnote_insn (t2);
4017 }
4018 }
4019 }
4020 }
4021
4022 /* Clean up. */
4023 free (modified_regs);
4024 free (same_regs);
4025 free (all_reset);
4026 }
4027 \f
4028 /* This is like RTX_EQUAL_P except that it knows about our handling of
4029 possibly equivalent registers and knows to consider volatile and
4030 modified objects as not equal.
4031
4032 YINSN is the insn containing Y. */
4033
4034 int
4035 rtx_equal_for_thread_p (x, y, yinsn)
4036 rtx x, y;
4037 rtx yinsn;
4038 {
4039 register int i;
4040 register int j;
4041 register enum rtx_code code;
4042 register const char *fmt;
4043
4044 code = GET_CODE (x);
4045 /* Rtx's of different codes cannot be equal. */
4046 if (code != GET_CODE (y))
4047 return 0;
4048
4049 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4050 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4051
4052 if (GET_MODE (x) != GET_MODE (y))
4053 return 0;
4054
4055 /* For floating-point, consider everything unequal. This is a bit
4056 pessimistic, but this pass would only rarely do anything for FP
4057 anyway. */
4058 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4059 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
4060 return 0;
4061
4062 /* For commutative operations, the RTX match if the operand match in any
4063 order. Also handle the simple binary and unary cases without a loop. */
4064 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4065 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4066 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4067 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4068 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4069 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4070 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4071 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4072 else if (GET_RTX_CLASS (code) == '1')
4073 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4074
4075 /* Handle special-cases first. */
4076 switch (code)
4077 {
4078 case REG:
4079 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4080 return 1;
4081
4082 /* If neither is user variable or hard register, check for possible
4083 equivalence. */
4084 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4085 || REGNO (x) < FIRST_PSEUDO_REGISTER
4086 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4087 return 0;
4088
4089 if (same_regs[REGNO (x)] == -1)
4090 {
4091 same_regs[REGNO (x)] = REGNO (y);
4092 num_same_regs++;
4093
4094 /* If this is the first time we are seeing a register on the `Y'
4095 side, see if it is the last use. If not, we can't thread the
4096 jump, so mark it as not equivalent. */
4097 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4098 return 0;
4099
4100 return 1;
4101 }
4102 else
4103 return (same_regs[REGNO (x)] == (int) REGNO (y));
4104
4105 break;
4106
4107 case MEM:
4108 /* If memory modified or either volatile, not equivalent.
4109 Else, check address. */
4110 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4111 return 0;
4112
4113 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4114
4115 case ASM_INPUT:
4116 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4117 return 0;
4118
4119 break;
4120
4121 case SET:
4122 /* Cancel a pending `same_regs' if setting equivalenced registers.
4123 Then process source. */
4124 if (GET_CODE (SET_DEST (x)) == REG
4125 && GET_CODE (SET_DEST (y)) == REG)
4126 {
4127 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4128 {
4129 same_regs[REGNO (SET_DEST (x))] = -1;
4130 num_same_regs--;
4131 }
4132 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4133 return 0;
4134 }
4135 else
4136 {
4137 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4138 return 0;
4139 }
4140
4141 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4142
4143 case LABEL_REF:
4144 return XEXP (x, 0) == XEXP (y, 0);
4145
4146 case SYMBOL_REF:
4147 return XSTR (x, 0) == XSTR (y, 0);
4148
4149 default:
4150 break;
4151 }
4152
4153 if (x == y)
4154 return 1;
4155
4156 fmt = GET_RTX_FORMAT (code);
4157 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4158 {
4159 switch (fmt[i])
4160 {
4161 case 'w':
4162 if (XWINT (x, i) != XWINT (y, i))
4163 return 0;
4164 break;
4165
4166 case 'n':
4167 case 'i':
4168 if (XINT (x, i) != XINT (y, i))
4169 return 0;
4170 break;
4171
4172 case 'V':
4173 case 'E':
4174 /* Two vectors must have the same length. */
4175 if (XVECLEN (x, i) != XVECLEN (y, i))
4176 return 0;
4177
4178 /* And the corresponding elements must match. */
4179 for (j = 0; j < XVECLEN (x, i); j++)
4180 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4181 XVECEXP (y, i, j), yinsn) == 0)
4182 return 0;
4183 break;
4184
4185 case 'e':
4186 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4187 return 0;
4188 break;
4189
4190 case 'S':
4191 case 's':
4192 if (strcmp (XSTR (x, i), XSTR (y, i)))
4193 return 0;
4194 break;
4195
4196 case 'u':
4197 /* These are just backpointers, so they don't matter. */
4198 break;
4199
4200 case '0':
4201 case 't':
4202 break;
4203
4204 /* It is believed that rtx's at this level will never
4205 contain anything but integers and other rtx's,
4206 except for within LABEL_REFs and SYMBOL_REFs. */
4207 default:
4208 abort ();
4209 }
4210 }
4211 return 1;
4212 }