jump.c (redirect_exp_1): Rework from old redirect_exp.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This is the jump-optimization pass of the compiler.
24 It is run two or three times: once before cse, sometimes once after cse,
25 and once after reload (before final).
26
27 jump_optimize deletes unreachable code and labels that are not used.
28 It also deletes jumps that jump to the following insn,
29 and simplifies jumps around unconditional jumps and jumps
30 to unconditional jumps.
31
32 Each CODE_LABEL has a count of the times it is used
33 stored in the LABEL_NUSES internal field, and each JUMP_INSN
34 has one label that it refers to stored in the
35 JUMP_LABEL internal field. With this we can detect labels that
36 become unused because of the deletion of all the jumps that
37 formerly used them. The JUMP_LABEL info is sometimes looked
38 at by later passes.
39
40 Optionally, cross-jumping can be done. Currently it is done
41 only the last time (when after reload and before final).
42 In fact, the code for cross-jumping now assumes that register
43 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44
45 Jump optimization is done after cse when cse's constant-propagation
46 causes jumps to become unconditional or to be deleted.
47
48 Unreachable loops are not detected here, because the labels
49 have references and the insns appear reachable from the labels.
50 find_basic_blocks in flow.c finds and deletes such loops.
51
52 The subroutines delete_insn, redirect_jump, and invert_jump are used
53 from other passes as well. */
54
55 #include "config.h"
56 #include "system.h"
57 #include "rtl.h"
58 #include "tm_p.h"
59 #include "flags.h"
60 #include "hard-reg-set.h"
61 #include "regs.h"
62 #include "insn-config.h"
63 #include "insn-flags.h"
64 #include "insn-attr.h"
65 #include "recog.h"
66 #include "function.h"
67 #include "expr.h"
68 #include "real.h"
69 #include "except.h"
70 #include "toplev.h"
71
72 /* ??? Eventually must record somehow the labels used by jumps
73 from nested functions. */
74 /* Pre-record the next or previous real insn for each label?
75 No, this pass is very fast anyway. */
76 /* Condense consecutive labels?
77 This would make life analysis faster, maybe. */
78 /* Optimize jump y; x: ... y: jumpif... x?
79 Don't know if it is worth bothering with. */
80 /* Optimize two cases of conditional jump to conditional jump?
81 This can never delete any instruction or make anything dead,
82 or even change what is live at any point.
83 So perhaps let combiner do it. */
84
85 /* Vector indexed by uid.
86 For each CODE_LABEL, index by its uid to get first unconditional jump
87 that jumps to the label.
88 For each JUMP_INSN, index by its uid to get the next unconditional jump
89 that jumps to the same label.
90 Element 0 is the start of a chain of all return insns.
91 (It is safe to use element 0 because insn uid 0 is not used. */
92
93 static rtx *jump_chain;
94
95 /* Maximum index in jump_chain. */
96
97 static int max_jump_chain;
98
99 /* Set nonzero by jump_optimize if control can fall through
100 to the end of the function. */
101 int can_reach_end;
102
103 /* Indicates whether death notes are significant in cross jump analysis.
104 Normally they are not significant, because of A and B jump to C,
105 and R dies in A, it must die in B. But this might not be true after
106 stack register conversion, and we must compare death notes in that
107 case. */
108
109 static int cross_jump_death_matters = 0;
110
111 static int init_label_info PARAMS ((rtx));
112 static void delete_barrier_successors PARAMS ((rtx));
113 static void mark_all_labels PARAMS ((rtx, int));
114 static rtx delete_unreferenced_labels PARAMS ((rtx));
115 static void delete_noop_moves PARAMS ((rtx));
116 static int calculate_can_reach_end PARAMS ((rtx, int));
117 static int duplicate_loop_exit_test PARAMS ((rtx));
118 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
119 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
120 static int jump_back_p PARAMS ((rtx, rtx));
121 static int tension_vector_labels PARAMS ((rtx, int));
122 static void mark_jump_label PARAMS ((rtx, rtx, int, int));
123 static void delete_computation PARAMS ((rtx));
124 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
125 static void invert_exp_1 PARAMS ((rtx, rtx));
126 static void delete_from_jump_chain PARAMS ((rtx));
127 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
128 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
129 static void redirect_tablejump PARAMS ((rtx, rtx));
130 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
131 #if ! defined(HAVE_cc0) && ! defined(HAVE_conditional_arithmetic)
132 static rtx find_insert_position PARAMS ((rtx, rtx));
133 #endif
134 static int returnjump_p_1 PARAMS ((rtx *, void *));
135 static void delete_prior_computation PARAMS ((rtx, rtx));
136 \f
137 /* Main external entry point into the jump optimizer. See comments before
138 jump_optimize_1 for descriptions of the arguments. */
139 void
140 jump_optimize (f, cross_jump, noop_moves, after_regscan)
141 rtx f;
142 int cross_jump;
143 int noop_moves;
144 int after_regscan;
145 {
146 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
147 }
148
149 /* Alternate entry into the jump optimizer. This entry point only rebuilds
150 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
151 instructions. */
152 void
153 rebuild_jump_labels (f)
154 rtx f;
155 {
156 jump_optimize_1 (f, 0, 0, 0, 1, 0);
157 }
158
159 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
160 void
161 jump_optimize_minimal (f)
162 rtx f;
163 {
164 jump_optimize_1 (f, 0, 0, 0, 0, 1);
165 }
166 \f
167 /* Delete no-op jumps and optimize jumps to jumps
168 and jumps around jumps.
169 Delete unused labels and unreachable code.
170
171 If CROSS_JUMP is 1, detect matching code
172 before a jump and its destination and unify them.
173 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
174
175 If NOOP_MOVES is nonzero, delete no-op move insns.
176
177 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
178 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
179
180 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
181 and JUMP_LABEL field for jumping insns.
182
183 If `optimize' is zero, don't change any code,
184 just determine whether control drops off the end of the function.
185 This case occurs when we have -W and not -O.
186 It works because `delete_insn' checks the value of `optimize'
187 and refrains from actually deleting when that is 0.
188
189 If MINIMAL is nonzero, then we only perform trivial optimizations:
190
191 * Removal of unreachable code after BARRIERs.
192 * Removal of unreferenced CODE_LABELs.
193 * Removal of a jump to the next instruction.
194 * Removal of a conditional jump followed by an unconditional jump
195 to the same target as the conditional jump.
196 * Simplify a conditional jump around an unconditional jump.
197 * Simplify a jump to a jump.
198 * Delete extraneous line number notes.
199 */
200
201 static void
202 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
203 mark_labels_only, minimal)
204 rtx f;
205 int cross_jump;
206 int noop_moves;
207 int after_regscan;
208 int mark_labels_only;
209 int minimal;
210 {
211 register rtx insn, next;
212 int changed;
213 int old_max_reg;
214 int first = 1;
215 int max_uid = 0;
216 rtx last_insn;
217
218 cross_jump_death_matters = (cross_jump == 2);
219 max_uid = init_label_info (f) + 1;
220
221 /* If we are performing cross jump optimizations, then initialize
222 tables mapping UIDs to EH regions to avoid incorrect movement
223 of insns from one EH region to another. */
224 if (flag_exceptions && cross_jump)
225 init_insn_eh_region (f, max_uid);
226
227 if (! mark_labels_only)
228 delete_barrier_successors (f);
229
230 /* Leave some extra room for labels and duplicate exit test insns
231 we make. */
232 max_jump_chain = max_uid * 14 / 10;
233 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
234
235 mark_all_labels (f, cross_jump);
236
237 /* Keep track of labels used from static data;
238 they cannot ever be deleted. */
239
240 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
241 LABEL_NUSES (XEXP (insn, 0))++;
242
243 check_exception_handler_labels ();
244
245 /* Keep track of labels used for marking handlers for exception
246 regions; they cannot usually be deleted. */
247
248 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
249 LABEL_NUSES (XEXP (insn, 0))++;
250
251 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
252 notes and recompute LABEL_NUSES. */
253 if (mark_labels_only)
254 goto end;
255
256 if (! minimal)
257 exception_optimize ();
258
259 last_insn = delete_unreferenced_labels (f);
260
261 if (noop_moves)
262 delete_noop_moves (f);
263
264 /* If we haven't yet gotten to reload and we have just run regscan,
265 delete any insn that sets a register that isn't used elsewhere.
266 This helps some of the optimizations below by having less insns
267 being jumped around. */
268
269 if (optimize && ! reload_completed && after_regscan)
270 for (insn = f; insn; insn = next)
271 {
272 rtx set = single_set (insn);
273
274 next = NEXT_INSN (insn);
275
276 if (set && GET_CODE (SET_DEST (set)) == REG
277 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
278 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
279 /* We use regno_last_note_uid so as not to delete the setting
280 of a reg that's used in notes. A subsequent optimization
281 might arrange to use that reg for real. */
282 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
283 && ! side_effects_p (SET_SRC (set))
284 && ! find_reg_note (insn, REG_RETVAL, 0)
285 /* An ADDRESSOF expression can turn into a use of the internal arg
286 pointer, so do not delete the initialization of the internal
287 arg pointer yet. If it is truly dead, flow will delete the
288 initializing insn. */
289 && SET_DEST (set) != current_function_internal_arg_pointer)
290 delete_insn (insn);
291 }
292
293 /* Now iterate optimizing jumps until nothing changes over one pass. */
294 changed = 1;
295 old_max_reg = max_reg_num ();
296 while (changed)
297 {
298 changed = 0;
299
300 for (insn = f; insn; insn = next)
301 {
302 rtx reallabelprev;
303 rtx temp, temp1, temp2 = NULL_RTX, temp3, temp4, temp5, temp6;
304 rtx nlabel;
305 int this_is_simplejump, this_is_condjump, reversep = 0;
306 int this_is_condjump_in_parallel;
307
308 next = NEXT_INSN (insn);
309
310 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
311 jump. Try to optimize by duplicating the loop exit test if so.
312 This is only safe immediately after regscan, because it uses
313 the values of regno_first_uid and regno_last_uid. */
314 if (after_regscan && GET_CODE (insn) == NOTE
315 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
316 && (temp1 = next_nonnote_insn (insn)) != 0
317 && simplejump_p (temp1))
318 {
319 temp = PREV_INSN (insn);
320 if (duplicate_loop_exit_test (insn))
321 {
322 changed = 1;
323 next = NEXT_INSN (temp);
324 continue;
325 }
326 }
327
328 if (GET_CODE (insn) != JUMP_INSN)
329 continue;
330
331 this_is_simplejump = simplejump_p (insn);
332 this_is_condjump = condjump_p (insn);
333 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
334
335 /* Tension the labels in dispatch tables. */
336
337 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
338 changed |= tension_vector_labels (PATTERN (insn), 0);
339 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
340 changed |= tension_vector_labels (PATTERN (insn), 1);
341
342 /* See if this jump goes to another jump and redirect if so. */
343 nlabel = follow_jumps (JUMP_LABEL (insn));
344 if (nlabel != JUMP_LABEL (insn))
345 changed |= redirect_jump (insn, nlabel);
346
347 if (! optimize || minimal)
348 continue;
349
350 /* If a dispatch table always goes to the same place,
351 get rid of it and replace the insn that uses it. */
352
353 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
354 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
355 {
356 int i;
357 rtx pat = PATTERN (insn);
358 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
359 int len = XVECLEN (pat, diff_vec_p);
360 rtx dispatch = prev_real_insn (insn);
361 rtx set;
362
363 for (i = 0; i < len; i++)
364 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
365 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
366 break;
367
368 if (i == len
369 && dispatch != 0
370 && GET_CODE (dispatch) == JUMP_INSN
371 && JUMP_LABEL (dispatch) != 0
372 /* Don't mess with a casesi insn.
373 XXX according to the comment before computed_jump_p(),
374 all casesi insns should be a parallel of the jump
375 and a USE of a LABEL_REF. */
376 && ! ((set = single_set (dispatch)) != NULL
377 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
378 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
379 {
380 redirect_tablejump (dispatch,
381 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
382 changed = 1;
383 }
384 }
385
386 /* If a jump references the end of the function, try to turn
387 it into a RETURN insn, possibly a conditional one. */
388 if (JUMP_LABEL (insn) != 0
389 && (next_active_insn (JUMP_LABEL (insn)) == 0
390 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
391 == RETURN))
392 changed |= redirect_jump (insn, NULL_RTX);
393
394 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
395
396 /* Detect jump to following insn. */
397 if (reallabelprev == insn && this_is_condjump)
398 {
399 next = next_real_insn (JUMP_LABEL (insn));
400 delete_jump (insn);
401 changed = 1;
402 continue;
403 }
404
405 /* Detect a conditional jump going to the same place
406 as an immediately following unconditional jump. */
407 else if (this_is_condjump
408 && (temp = next_active_insn (insn)) != 0
409 && simplejump_p (temp)
410 && (next_active_insn (JUMP_LABEL (insn))
411 == next_active_insn (JUMP_LABEL (temp))))
412 {
413 /* Don't mess up test coverage analysis. */
414 temp2 = temp;
415 if (flag_test_coverage && !reload_completed)
416 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
417 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
418 break;
419
420 if (temp2 == temp)
421 {
422 delete_jump (insn);
423 changed = 1;
424 continue;
425 }
426 }
427
428 /* Detect a conditional jump jumping over an unconditional jump. */
429
430 else if ((this_is_condjump || this_is_condjump_in_parallel)
431 && ! this_is_simplejump
432 && reallabelprev != 0
433 && GET_CODE (reallabelprev) == JUMP_INSN
434 && prev_active_insn (reallabelprev) == insn
435 && no_labels_between_p (insn, reallabelprev)
436 && simplejump_p (reallabelprev))
437 {
438 /* When we invert the unconditional jump, we will be
439 decrementing the usage count of its old label.
440 Make sure that we don't delete it now because that
441 might cause the following code to be deleted. */
442 rtx prev_uses = prev_nonnote_insn (reallabelprev);
443 rtx prev_label = JUMP_LABEL (insn);
444
445 if (prev_label)
446 ++LABEL_NUSES (prev_label);
447
448 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
449 {
450 /* It is very likely that if there are USE insns before
451 this jump, they hold REG_DEAD notes. These REG_DEAD
452 notes are no longer valid due to this optimization,
453 and will cause the life-analysis that following passes
454 (notably delayed-branch scheduling) to think that
455 these registers are dead when they are not.
456
457 To prevent this trouble, we just remove the USE insns
458 from the insn chain. */
459
460 while (prev_uses && GET_CODE (prev_uses) == INSN
461 && GET_CODE (PATTERN (prev_uses)) == USE)
462 {
463 rtx useless = prev_uses;
464 prev_uses = prev_nonnote_insn (prev_uses);
465 delete_insn (useless);
466 }
467
468 delete_insn (reallabelprev);
469 changed = 1;
470 }
471
472 /* We can now safely delete the label if it is unreferenced
473 since the delete_insn above has deleted the BARRIER. */
474 if (prev_label && --LABEL_NUSES (prev_label) == 0)
475 delete_insn (prev_label);
476
477 next = NEXT_INSN (insn);
478 }
479
480 /* If we have an unconditional jump preceded by a USE, try to put
481 the USE before the target and jump there. This simplifies many
482 of the optimizations below since we don't have to worry about
483 dealing with these USE insns. We only do this if the label
484 being branch to already has the identical USE or if code
485 never falls through to that label. */
486
487 else if (this_is_simplejump
488 && (temp = prev_nonnote_insn (insn)) != 0
489 && GET_CODE (temp) == INSN
490 && GET_CODE (PATTERN (temp)) == USE
491 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
492 && (GET_CODE (temp1) == BARRIER
493 || (GET_CODE (temp1) == INSN
494 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
495 /* Don't do this optimization if we have a loop containing
496 only the USE instruction, and the loop start label has
497 a usage count of 1. This is because we will redo this
498 optimization everytime through the outer loop, and jump
499 opt will never exit. */
500 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
501 && temp2 == JUMP_LABEL (insn)
502 && LABEL_NUSES (temp2) == 1))
503 {
504 if (GET_CODE (temp1) == BARRIER)
505 {
506 emit_insn_after (PATTERN (temp), temp1);
507 temp1 = NEXT_INSN (temp1);
508 }
509
510 delete_insn (temp);
511 redirect_jump (insn, get_label_before (temp1));
512 reallabelprev = prev_real_insn (temp1);
513 changed = 1;
514 next = NEXT_INSN (insn);
515 }
516
517 /* Simplify if (...) x = a; else x = b; by converting it
518 to x = b; if (...) x = a;
519 if B is sufficiently simple, the test doesn't involve X,
520 and nothing in the test modifies B or X.
521
522 If we have small register classes, we also can't do this if X
523 is a hard register.
524
525 If the "x = b;" insn has any REG_NOTES, we don't do this because
526 of the possibility that we are running after CSE and there is a
527 REG_EQUAL note that is only valid if the branch has already been
528 taken. If we move the insn with the REG_EQUAL note, we may
529 fold the comparison to always be false in a later CSE pass.
530 (We could also delete the REG_NOTES when moving the insn, but it
531 seems simpler to not move it.) An exception is that we can move
532 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
533 value is the same as "b".
534
535 INSN is the branch over the `else' part.
536
537 We set:
538
539 TEMP to the jump insn preceding "x = a;"
540 TEMP1 to X
541 TEMP2 to the insn that sets "x = b;"
542 TEMP3 to the insn that sets "x = a;"
543 TEMP4 to the set of "x = b"; */
544
545 if (this_is_simplejump
546 && (temp3 = prev_active_insn (insn)) != 0
547 && GET_CODE (temp3) == INSN
548 && (temp4 = single_set (temp3)) != 0
549 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
550 && (! SMALL_REGISTER_CLASSES
551 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
552 && (temp2 = next_active_insn (insn)) != 0
553 && GET_CODE (temp2) == INSN
554 && (temp4 = single_set (temp2)) != 0
555 && rtx_equal_p (SET_DEST (temp4), temp1)
556 && ! side_effects_p (SET_SRC (temp4))
557 && ! may_trap_p (SET_SRC (temp4))
558 && (REG_NOTES (temp2) == 0
559 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
560 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
561 && XEXP (REG_NOTES (temp2), 1) == 0
562 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
563 SET_SRC (temp4))))
564 && (temp = prev_active_insn (temp3)) != 0
565 && condjump_p (temp) && ! simplejump_p (temp)
566 /* TEMP must skip over the "x = a;" insn */
567 && prev_real_insn (JUMP_LABEL (temp)) == insn
568 && no_labels_between_p (insn, JUMP_LABEL (temp))
569 /* There must be no other entries to the "x = b;" insn. */
570 && no_labels_between_p (JUMP_LABEL (temp), temp2)
571 /* INSN must either branch to the insn after TEMP2 or the insn
572 after TEMP2 must branch to the same place as INSN. */
573 && (reallabelprev == temp2
574 || ((temp5 = next_active_insn (temp2)) != 0
575 && simplejump_p (temp5)
576 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
577 {
578 /* The test expression, X, may be a complicated test with
579 multiple branches. See if we can find all the uses of
580 the label that TEMP branches to without hitting a CALL_INSN
581 or a jump to somewhere else. */
582 rtx target = JUMP_LABEL (temp);
583 int nuses = LABEL_NUSES (target);
584 rtx p;
585 #ifdef HAVE_cc0
586 rtx q;
587 #endif
588
589 /* Set P to the first jump insn that goes around "x = a;". */
590 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
591 {
592 if (GET_CODE (p) == JUMP_INSN)
593 {
594 if (condjump_p (p) && ! simplejump_p (p)
595 && JUMP_LABEL (p) == target)
596 {
597 nuses--;
598 if (nuses == 0)
599 break;
600 }
601 else
602 break;
603 }
604 else if (GET_CODE (p) == CALL_INSN)
605 break;
606 }
607
608 #ifdef HAVE_cc0
609 /* We cannot insert anything between a set of cc and its use
610 so if P uses cc0, we must back up to the previous insn. */
611 q = prev_nonnote_insn (p);
612 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
613 && sets_cc0_p (PATTERN (q)))
614 p = q;
615 #endif
616
617 if (p)
618 p = PREV_INSN (p);
619
620 /* If we found all the uses and there was no data conflict, we
621 can move the assignment unless we can branch into the middle
622 from somewhere. */
623 if (nuses == 0 && p
624 && no_labels_between_p (p, insn)
625 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
626 && ! reg_set_between_p (temp1, p, temp3)
627 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
628 || ! modified_between_p (SET_SRC (temp4), p, temp2))
629 /* Verify that registers used by the jump are not clobbered
630 by the instruction being moved. */
631 && ! regs_set_between_p (PATTERN (temp),
632 PREV_INSN (temp2),
633 NEXT_INSN (temp2)))
634 {
635 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
636 delete_insn (temp2);
637
638 /* Set NEXT to an insn that we know won't go away. */
639 next = next_active_insn (insn);
640
641 /* Delete the jump around the set. Note that we must do
642 this before we redirect the test jumps so that it won't
643 delete the code immediately following the assignment
644 we moved (which might be a jump). */
645
646 delete_insn (insn);
647
648 /* We either have two consecutive labels or a jump to
649 a jump, so adjust all the JUMP_INSNs to branch to where
650 INSN branches to. */
651 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
652 if (GET_CODE (p) == JUMP_INSN)
653 redirect_jump (p, target);
654
655 changed = 1;
656 next = NEXT_INSN (insn);
657 continue;
658 }
659 }
660
661 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
662 to x = a; if (...) goto l; x = b;
663 if A is sufficiently simple, the test doesn't involve X,
664 and nothing in the test modifies A or X.
665
666 If we have small register classes, we also can't do this if X
667 is a hard register.
668
669 If the "x = a;" insn has any REG_NOTES, we don't do this because
670 of the possibility that we are running after CSE and there is a
671 REG_EQUAL note that is only valid if the branch has already been
672 taken. If we move the insn with the REG_EQUAL note, we may
673 fold the comparison to always be false in a later CSE pass.
674 (We could also delete the REG_NOTES when moving the insn, but it
675 seems simpler to not move it.) An exception is that we can move
676 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
677 value is the same as "a".
678
679 INSN is the goto.
680
681 We set:
682
683 TEMP to the jump insn preceding "x = a;"
684 TEMP1 to X
685 TEMP2 to the insn that sets "x = b;"
686 TEMP3 to the insn that sets "x = a;"
687 TEMP4 to the set of "x = a"; */
688
689 if (this_is_simplejump
690 && (temp2 = next_active_insn (insn)) != 0
691 && GET_CODE (temp2) == INSN
692 && (temp4 = single_set (temp2)) != 0
693 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
694 && (! SMALL_REGISTER_CLASSES
695 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
696 && (temp3 = prev_active_insn (insn)) != 0
697 && GET_CODE (temp3) == INSN
698 && (temp4 = single_set (temp3)) != 0
699 && rtx_equal_p (SET_DEST (temp4), temp1)
700 && ! side_effects_p (SET_SRC (temp4))
701 && ! may_trap_p (SET_SRC (temp4))
702 && (REG_NOTES (temp3) == 0
703 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
704 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
705 && XEXP (REG_NOTES (temp3), 1) == 0
706 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
707 SET_SRC (temp4))))
708 && (temp = prev_active_insn (temp3)) != 0
709 && condjump_p (temp) && ! simplejump_p (temp)
710 /* TEMP must skip over the "x = a;" insn */
711 && prev_real_insn (JUMP_LABEL (temp)) == insn
712 && no_labels_between_p (temp, insn))
713 {
714 rtx prev_label = JUMP_LABEL (temp);
715 rtx insert_after = prev_nonnote_insn (temp);
716
717 #ifdef HAVE_cc0
718 /* We cannot insert anything between a set of cc and its use. */
719 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
720 && sets_cc0_p (PATTERN (insert_after)))
721 insert_after = prev_nonnote_insn (insert_after);
722 #endif
723 ++LABEL_NUSES (prev_label);
724
725 if (insert_after
726 && no_labels_between_p (insert_after, temp)
727 && ! reg_referenced_between_p (temp1, insert_after, temp3)
728 && ! reg_referenced_between_p (temp1, temp3,
729 NEXT_INSN (temp2))
730 && ! reg_set_between_p (temp1, insert_after, temp)
731 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
732 /* Verify that registers used by the jump are not clobbered
733 by the instruction being moved. */
734 && ! regs_set_between_p (PATTERN (temp),
735 PREV_INSN (temp3),
736 NEXT_INSN (temp3))
737 && invert_jump (temp, JUMP_LABEL (insn)))
738 {
739 emit_insn_after_with_line_notes (PATTERN (temp3),
740 insert_after, temp3);
741 delete_insn (temp3);
742 delete_insn (insn);
743 /* Set NEXT to an insn that we know won't go away. */
744 next = temp2;
745 changed = 1;
746 }
747 if (prev_label && --LABEL_NUSES (prev_label) == 0)
748 delete_insn (prev_label);
749 if (changed)
750 continue;
751 }
752
753 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
754
755 /* If we have if (...) x = exp; and branches are expensive,
756 EXP is a single insn, does not have any side effects, cannot
757 trap, and is not too costly, convert this to
758 t = exp; if (...) x = t;
759
760 Don't do this when we have CC0 because it is unlikely to help
761 and we'd need to worry about where to place the new insn and
762 the potential for conflicts. We also can't do this when we have
763 notes on the insn for the same reason as above.
764
765 If we have conditional arithmetic, this will make this
766 harder to optimize later and isn't needed, so don't do it
767 in that case either.
768
769 We set:
770
771 TEMP to the "x = exp;" insn.
772 TEMP1 to the single set in the "x = exp;" insn.
773 TEMP2 to "x". */
774
775 if (! reload_completed
776 && this_is_condjump && ! this_is_simplejump
777 && BRANCH_COST >= 3
778 && (temp = next_nonnote_insn (insn)) != 0
779 && GET_CODE (temp) == INSN
780 && REG_NOTES (temp) == 0
781 && (reallabelprev == temp
782 || ((temp2 = next_active_insn (temp)) != 0
783 && simplejump_p (temp2)
784 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
785 && (temp1 = single_set (temp)) != 0
786 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
787 && (! SMALL_REGISTER_CLASSES
788 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
789 && GET_CODE (SET_SRC (temp1)) != REG
790 && GET_CODE (SET_SRC (temp1)) != SUBREG
791 && GET_CODE (SET_SRC (temp1)) != CONST_INT
792 && ! side_effects_p (SET_SRC (temp1))
793 && ! may_trap_p (SET_SRC (temp1))
794 && rtx_cost (SET_SRC (temp1), SET) < 10)
795 {
796 rtx new = gen_reg_rtx (GET_MODE (temp2));
797
798 if ((temp3 = find_insert_position (insn, temp))
799 && validate_change (temp, &SET_DEST (temp1), new, 0))
800 {
801 next = emit_insn_after (gen_move_insn (temp2, new), insn);
802 emit_insn_after_with_line_notes (PATTERN (temp),
803 PREV_INSN (temp3), temp);
804 delete_insn (temp);
805 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
806
807 if (after_regscan)
808 {
809 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
810 old_max_reg = max_reg_num ();
811 }
812 }
813 }
814
815 /* Similarly, if it takes two insns to compute EXP but they
816 have the same destination. Here TEMP3 will be the second
817 insn and TEMP4 the SET from that insn. */
818
819 if (! reload_completed
820 && this_is_condjump && ! this_is_simplejump
821 && BRANCH_COST >= 4
822 && (temp = next_nonnote_insn (insn)) != 0
823 && GET_CODE (temp) == INSN
824 && REG_NOTES (temp) == 0
825 && (temp3 = next_nonnote_insn (temp)) != 0
826 && GET_CODE (temp3) == INSN
827 && REG_NOTES (temp3) == 0
828 && (reallabelprev == temp3
829 || ((temp2 = next_active_insn (temp3)) != 0
830 && simplejump_p (temp2)
831 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
832 && (temp1 = single_set (temp)) != 0
833 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
834 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
835 && (! SMALL_REGISTER_CLASSES
836 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
837 && ! side_effects_p (SET_SRC (temp1))
838 && ! may_trap_p (SET_SRC (temp1))
839 && rtx_cost (SET_SRC (temp1), SET) < 10
840 && (temp4 = single_set (temp3)) != 0
841 && rtx_equal_p (SET_DEST (temp4), temp2)
842 && ! side_effects_p (SET_SRC (temp4))
843 && ! may_trap_p (SET_SRC (temp4))
844 && rtx_cost (SET_SRC (temp4), SET) < 10)
845 {
846 rtx new = gen_reg_rtx (GET_MODE (temp2));
847
848 if ((temp5 = find_insert_position (insn, temp))
849 && (temp6 = find_insert_position (insn, temp3))
850 && validate_change (temp, &SET_DEST (temp1), new, 0))
851 {
852 /* Use the earliest of temp5 and temp6. */
853 if (temp5 != insn)
854 temp6 = temp5;
855 next = emit_insn_after (gen_move_insn (temp2, new), insn);
856 emit_insn_after_with_line_notes (PATTERN (temp),
857 PREV_INSN (temp6), temp);
858 emit_insn_after_with_line_notes
859 (replace_rtx (PATTERN (temp3), temp2, new),
860 PREV_INSN (temp6), temp3);
861 delete_insn (temp);
862 delete_insn (temp3);
863 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
864
865 if (after_regscan)
866 {
867 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
868 old_max_reg = max_reg_num ();
869 }
870 }
871 }
872
873 /* Finally, handle the case where two insns are used to
874 compute EXP but a temporary register is used. Here we must
875 ensure that the temporary register is not used anywhere else. */
876
877 if (! reload_completed
878 && after_regscan
879 && this_is_condjump && ! this_is_simplejump
880 && BRANCH_COST >= 4
881 && (temp = next_nonnote_insn (insn)) != 0
882 && GET_CODE (temp) == INSN
883 && REG_NOTES (temp) == 0
884 && (temp3 = next_nonnote_insn (temp)) != 0
885 && GET_CODE (temp3) == INSN
886 && REG_NOTES (temp3) == 0
887 && (reallabelprev == temp3
888 || ((temp2 = next_active_insn (temp3)) != 0
889 && simplejump_p (temp2)
890 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
891 && (temp1 = single_set (temp)) != 0
892 && (temp5 = SET_DEST (temp1),
893 (GET_CODE (temp5) == REG
894 || (GET_CODE (temp5) == SUBREG
895 && (temp5 = SUBREG_REG (temp5),
896 GET_CODE (temp5) == REG))))
897 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
898 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
899 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
900 && ! side_effects_p (SET_SRC (temp1))
901 && ! may_trap_p (SET_SRC (temp1))
902 && rtx_cost (SET_SRC (temp1), SET) < 10
903 && (temp4 = single_set (temp3)) != 0
904 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
905 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
906 && (! SMALL_REGISTER_CLASSES
907 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
908 && rtx_equal_p (SET_DEST (temp4), temp2)
909 && ! side_effects_p (SET_SRC (temp4))
910 && ! may_trap_p (SET_SRC (temp4))
911 && rtx_cost (SET_SRC (temp4), SET) < 10)
912 {
913 rtx new = gen_reg_rtx (GET_MODE (temp2));
914
915 if ((temp5 = find_insert_position (insn, temp))
916 && (temp6 = find_insert_position (insn, temp3))
917 && validate_change (temp3, &SET_DEST (temp4), new, 0))
918 {
919 /* Use the earliest of temp5 and temp6. */
920 if (temp5 != insn)
921 temp6 = temp5;
922 next = emit_insn_after (gen_move_insn (temp2, new), insn);
923 emit_insn_after_with_line_notes (PATTERN (temp),
924 PREV_INSN (temp6), temp);
925 emit_insn_after_with_line_notes (PATTERN (temp3),
926 PREV_INSN (temp6), temp3);
927 delete_insn (temp);
928 delete_insn (temp3);
929 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
930
931 if (after_regscan)
932 {
933 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
934 old_max_reg = max_reg_num ();
935 }
936 }
937 }
938 #endif /* HAVE_cc0 */
939
940 #ifdef HAVE_conditional_arithmetic
941 /* ??? This is disabled in genconfig, as this simple-minded
942 transformation can incredibly lengthen register lifetimes.
943
944 Consider this example:
945
946 234 (set (pc)
947 (if_then_else (ne (reg:DI 149) (const_int 0 [0x0]))
948 (label_ref 248) (pc)))
949 237 (set (reg/i:DI 0 $0) (const_int 1 [0x1]))
950 239 (set (pc) (label_ref 2382))
951 248 (code_label ("yybackup"))
952
953 This will be transformed to:
954
955 237 (set (reg/i:DI 0 $0)
956 (if_then_else:DI (eq (reg:DI 149) (const_int 0 [0x0]))
957 (const_int 1 [0x1]) (reg/i:DI 0 $0)))
958 239 (set (pc)
959 (if_then_else (eq (reg:DI 149) (const_int 0 [0x0]))
960 (label_ref 2382) (pc)))
961
962 which, from this narrow viewpoint looks fine. Except that
963 between this and 3 other ocurrences of the same pattern, $0
964 is now live for basically the entire function, and we'll
965 get an abort in caller_save.
966
967 Any replacement for this code should recall that a set of
968 a register that is not live need not, and indeed should not,
969 be conditionalized. Either that, or delay the transformation
970 until after register allocation. */
971
972 /* See if this is a conditional jump around a small number of
973 instructions that we can conditionalize. Don't do this before
974 the initial CSE pass or after reload.
975
976 We reject any insns that have side effects or may trap.
977 Strictly speaking, this is not needed since the machine may
978 support conditionalizing these too, but we won't deal with that
979 now. Specifically, this means that we can't conditionalize a
980 CALL_INSN, which some machines, such as the ARC, can do, but
981 this is a very minor optimization. */
982 if (this_is_condjump && ! this_is_simplejump
983 && cse_not_expected && ! reload_completed
984 && BRANCH_COST > 2
985 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (insn)), 0),
986 insn))
987 {
988 rtx ourcond = XEXP (SET_SRC (PATTERN (insn)), 0);
989 int num_insns = 0;
990 char *storage = (char *) oballoc (0);
991 int last_insn = 0, failed = 0;
992 rtx changed_jump = 0;
993
994 ourcond = gen_rtx (reverse_condition (GET_CODE (ourcond)),
995 VOIDmode, XEXP (ourcond, 0),
996 XEXP (ourcond, 1));
997
998 /* Scan forward BRANCH_COST real insns looking for the JUMP_LABEL
999 of this insn. We see if we think we can conditionalize the
1000 insns we pass. For now, we only deal with insns that have
1001 one SET. We stop after an insn that modifies anything in
1002 OURCOND, if we have too many insns, or if we have an insn
1003 with a side effect or that may trip. Note that we will
1004 be modifying any unconditional jumps we encounter to be
1005 conditional; this will have the effect of also doing this
1006 optimization on the "else" the next time around. */
1007 for (temp1 = NEXT_INSN (insn);
1008 num_insns <= BRANCH_COST && ! failed && temp1 != 0
1009 && GET_CODE (temp1) != CODE_LABEL;
1010 temp1 = NEXT_INSN (temp1))
1011 {
1012 /* Ignore everything but an active insn. */
1013 if (GET_RTX_CLASS (GET_CODE (temp1)) != 'i'
1014 || GET_CODE (PATTERN (temp1)) == USE
1015 || GET_CODE (PATTERN (temp1)) == CLOBBER)
1016 continue;
1017
1018 /* If this was an unconditional jump, record it since we'll
1019 need to remove the BARRIER if we succeed. We can only
1020 have one such jump since there must be a label after
1021 the BARRIER and it's either ours, in which case it's the
1022 only one or some other, in which case we'd fail.
1023 Likewise if it's a CALL_INSN followed by a BARRIER. */
1024
1025 if (simplejump_p (temp1)
1026 || (GET_CODE (temp1) == CALL_INSN
1027 && NEXT_INSN (temp1) != 0
1028 && GET_CODE (NEXT_INSN (temp1)) == BARRIER))
1029 {
1030 if (changed_jump == 0)
1031 changed_jump = temp1;
1032 else
1033 changed_jump
1034 = gen_rtx_INSN_LIST (VOIDmode, temp1, changed_jump);
1035 }
1036
1037 /* See if we are allowed another insn and if this insn
1038 if one we think we may be able to handle. */
1039 if (++num_insns > BRANCH_COST
1040 || last_insn
1041 || (((temp2 = single_set (temp1)) == 0
1042 || side_effects_p (SET_SRC (temp2))
1043 || may_trap_p (SET_SRC (temp2)))
1044 && GET_CODE (temp1) != CALL_INSN))
1045 failed = 1;
1046 else if (temp2 != 0)
1047 validate_change (temp1, &SET_SRC (temp2),
1048 gen_rtx_IF_THEN_ELSE
1049 (GET_MODE (SET_DEST (temp2)),
1050 copy_rtx (ourcond),
1051 SET_SRC (temp2), SET_DEST (temp2)),
1052 1);
1053 else
1054 {
1055 /* This is a CALL_INSN that doesn't have a SET. */
1056 rtx *call_loc = &PATTERN (temp1);
1057
1058 if (GET_CODE (*call_loc) == PARALLEL)
1059 call_loc = &XVECEXP (*call_loc, 0, 0);
1060
1061 validate_change (temp1, call_loc,
1062 gen_rtx_IF_THEN_ELSE
1063 (VOIDmode, copy_rtx (ourcond),
1064 *call_loc, const0_rtx),
1065 1);
1066 }
1067
1068
1069 if (modified_in_p (ourcond, temp1))
1070 last_insn = 1;
1071 }
1072
1073 /* If we've reached our jump label, haven't failed, and all
1074 the changes above are valid, we can delete this jump
1075 insn. Also remove a BARRIER after any jump that used
1076 to be unconditional and remove any REG_EQUAL or REG_EQUIV
1077 that might have previously been present on insns we
1078 made conditional. */
1079 if (temp1 == JUMP_LABEL (insn) && ! failed
1080 && apply_change_group ())
1081 {
1082 for (temp1 = NEXT_INSN (insn); temp1 != JUMP_LABEL (insn);
1083 temp1 = NEXT_INSN (temp1))
1084 if (GET_RTX_CLASS (GET_CODE (temp1)) == 'i')
1085 for (temp2 = REG_NOTES (temp1); temp2 != 0;
1086 temp2 = XEXP (temp2, 1))
1087 if (REG_NOTE_KIND (temp2) == REG_EQUAL
1088 || REG_NOTE_KIND (temp2) == REG_EQUIV)
1089 remove_note (temp1, temp2);
1090
1091 if (changed_jump != 0)
1092 {
1093 while (GET_CODE (changed_jump) == INSN_LIST)
1094 {
1095 delete_barrier (NEXT_INSN (XEXP (changed_jump, 0)));
1096 changed_jump = XEXP (changed_jump, 1);
1097 }
1098
1099 delete_barrier (NEXT_INSN (changed_jump));
1100 }
1101
1102 delete_insn (insn);
1103 changed = 1;
1104 continue;
1105 }
1106 else
1107 {
1108 cancel_changes (0);
1109 obfree (storage);
1110 }
1111 }
1112 #endif
1113 /* If branches are expensive, convert
1114 if (foo) bar++; to bar += (foo != 0);
1115 and similarly for "bar--;"
1116
1117 INSN is the conditional branch around the arithmetic. We set:
1118
1119 TEMP is the arithmetic insn.
1120 TEMP1 is the SET doing the arithmetic.
1121 TEMP2 is the operand being incremented or decremented.
1122 TEMP3 to the condition being tested.
1123 TEMP4 to the earliest insn used to find the condition. */
1124
1125 if ((BRANCH_COST >= 2
1126 #ifdef HAVE_incscc
1127 || HAVE_incscc
1128 #endif
1129 #ifdef HAVE_decscc
1130 || HAVE_decscc
1131 #endif
1132 )
1133 && ! reload_completed
1134 && this_is_condjump && ! this_is_simplejump
1135 && (temp = next_nonnote_insn (insn)) != 0
1136 && (temp1 = single_set (temp)) != 0
1137 && (temp2 = SET_DEST (temp1),
1138 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1139 && GET_CODE (SET_SRC (temp1)) == PLUS
1140 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1141 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1142 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1143 && ! side_effects_p (temp2)
1144 && ! may_trap_p (temp2)
1145 /* INSN must either branch to the insn after TEMP or the insn
1146 after TEMP must branch to the same place as INSN. */
1147 && (reallabelprev == temp
1148 || ((temp3 = next_active_insn (temp)) != 0
1149 && simplejump_p (temp3)
1150 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1151 && (temp3 = get_condition (insn, &temp4)) != 0
1152 /* We must be comparing objects whose modes imply the size.
1153 We could handle BLKmode if (1) emit_store_flag could
1154 and (2) we could find the size reliably. */
1155 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1156 && can_reverse_comparison_p (temp3, insn))
1157 {
1158 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1159 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1160
1161 start_sequence ();
1162
1163 /* It must be the case that TEMP2 is not modified in the range
1164 [TEMP4, INSN). The one exception we make is if the insn
1165 before INSN sets TEMP2 to something which is also unchanged
1166 in that range. In that case, we can move the initialization
1167 into our sequence. */
1168
1169 if ((temp5 = prev_active_insn (insn)) != 0
1170 && no_labels_between_p (temp5, insn)
1171 && GET_CODE (temp5) == INSN
1172 && (temp6 = single_set (temp5)) != 0
1173 && rtx_equal_p (temp2, SET_DEST (temp6))
1174 && (CONSTANT_P (SET_SRC (temp6))
1175 || GET_CODE (SET_SRC (temp6)) == REG
1176 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1177 {
1178 emit_insn (PATTERN (temp5));
1179 init_insn = temp5;
1180 init = SET_SRC (temp6);
1181 }
1182
1183 if (CONSTANT_P (init)
1184 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1185 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1186 XEXP (temp3, 0), XEXP (temp3, 1),
1187 VOIDmode,
1188 (code == LTU || code == LEU
1189 || code == GTU || code == GEU), 1);
1190
1191 /* If we can do the store-flag, do the addition or
1192 subtraction. */
1193
1194 if (target)
1195 target = expand_binop (GET_MODE (temp2),
1196 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1197 ? add_optab : sub_optab),
1198 temp2, target, temp2, 0, OPTAB_WIDEN);
1199
1200 if (target != 0)
1201 {
1202 /* Put the result back in temp2 in case it isn't already.
1203 Then replace the jump, possible a CC0-setting insn in
1204 front of the jump, and TEMP, with the sequence we have
1205 made. */
1206
1207 if (target != temp2)
1208 emit_move_insn (temp2, target);
1209
1210 seq = get_insns ();
1211 end_sequence ();
1212
1213 emit_insns_before (seq, temp4);
1214 delete_insn (temp);
1215
1216 if (init_insn)
1217 delete_insn (init_insn);
1218
1219 next = NEXT_INSN (insn);
1220 #ifdef HAVE_cc0
1221 delete_insn (prev_nonnote_insn (insn));
1222 #endif
1223 delete_insn (insn);
1224
1225 if (after_regscan)
1226 {
1227 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1228 old_max_reg = max_reg_num ();
1229 }
1230
1231 changed = 1;
1232 continue;
1233 }
1234 else
1235 end_sequence ();
1236 }
1237
1238 /* Try to use a conditional move (if the target has them), or a
1239 store-flag insn. If the target has conditional arithmetic as
1240 well as conditional move, the above code will have done something.
1241 Note that we prefer the above code since it is more general: the
1242 code below can make changes that require work to undo.
1243
1244 The general case here is:
1245
1246 1) x = a; if (...) x = b; and
1247 2) if (...) x = b;
1248
1249 If the jump would be faster, the machine should not have defined
1250 the movcc or scc insns!. These cases are often made by the
1251 previous optimization.
1252
1253 The second case is treated as x = x; if (...) x = b;.
1254
1255 INSN here is the jump around the store. We set:
1256
1257 TEMP to the "x op= b;" insn.
1258 TEMP1 to X.
1259 TEMP2 to B.
1260 TEMP3 to A (X in the second case).
1261 TEMP4 to the condition being tested.
1262 TEMP5 to the earliest insn used to find the condition.
1263 TEMP6 to the SET of TEMP. */
1264
1265 if (/* We can't do this after reload has completed. */
1266 ! reload_completed
1267 #ifdef HAVE_conditional_arithmetic
1268 /* Defer this until after CSE so the above code gets the
1269 first crack at it. */
1270 && cse_not_expected
1271 #endif
1272 && this_is_condjump && ! this_is_simplejump
1273 /* Set TEMP to the "x = b;" insn. */
1274 && (temp = next_nonnote_insn (insn)) != 0
1275 && GET_CODE (temp) == INSN
1276 && (temp6 = single_set (temp)) != NULL_RTX
1277 && GET_CODE (temp1 = SET_DEST (temp6)) == REG
1278 && (! SMALL_REGISTER_CLASSES
1279 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
1280 && ! side_effects_p (temp2 = SET_SRC (temp6))
1281 && ! may_trap_p (temp2)
1282 /* Allow either form, but prefer the former if both apply.
1283 There is no point in using the old value of TEMP1 if
1284 it is a register, since cse will alias them. It can
1285 lose if the old value were a hard register since CSE
1286 won't replace hard registers. Avoid using TEMP3 if
1287 small register classes and it is a hard register. */
1288 && (((temp3 = reg_set_last (temp1, insn)) != 0
1289 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
1290 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
1291 /* Make the latter case look like x = x; if (...) x = b; */
1292 || (temp3 = temp1, 1))
1293 /* INSN must either branch to the insn after TEMP or the insn
1294 after TEMP must branch to the same place as INSN. */
1295 && (reallabelprev == temp
1296 || ((temp4 = next_active_insn (temp)) != 0
1297 && simplejump_p (temp4)
1298 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1299 && (temp4 = get_condition (insn, &temp5)) != 0
1300 /* We must be comparing objects whose modes imply the size.
1301 We could handle BLKmode if (1) emit_store_flag could
1302 and (2) we could find the size reliably. */
1303 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1304 /* Even if branches are cheap, the store_flag optimization
1305 can win when the operation to be performed can be
1306 expressed directly. */
1307 #ifdef HAVE_cc0
1308 /* If the previous insn sets CC0 and something else, we can't
1309 do this since we are going to delete that insn. */
1310
1311 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1312 && GET_CODE (temp6) == INSN
1313 && (sets_cc0_p (PATTERN (temp6)) == -1
1314 || (sets_cc0_p (PATTERN (temp6)) == 1
1315 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1316 #endif
1317 )
1318 {
1319 #ifdef HAVE_conditional_move
1320 /* First try a conditional move. */
1321 {
1322 enum rtx_code code = GET_CODE (temp4);
1323 rtx var = temp1;
1324 rtx cond0, cond1, aval, bval;
1325 rtx target, new_insn;
1326
1327 /* Copy the compared variables into cond0 and cond1, so that
1328 any side effects performed in or after the old comparison,
1329 will not affect our compare which will come later. */
1330 /* ??? Is it possible to just use the comparison in the jump
1331 insn? After all, we're going to delete it. We'd have
1332 to modify emit_conditional_move to take a comparison rtx
1333 instead or write a new function. */
1334
1335 /* We want the target to be able to simplify comparisons with
1336 zero (and maybe other constants as well), so don't create
1337 pseudos for them. There's no need to either. */
1338 if (GET_CODE (XEXP (temp4, 0)) == CONST_INT
1339 || GET_CODE (XEXP (temp4, 0)) == CONST_DOUBLE)
1340 cond0 = XEXP (temp4, 0);
1341 else
1342 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1343
1344 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1345 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1346 cond1 = XEXP (temp4, 1);
1347 else
1348 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1349
1350 /* Careful about copying these values -- an IOR or what may
1351 need to do other things, like clobber flags. */
1352 /* ??? Assume for the moment that AVAL is ok. */
1353 aval = temp3;
1354
1355 start_sequence ();
1356
1357 /* We're dealing with a single_set insn with no side effects
1358 on SET_SRC. We do need to be reasonably certain that if
1359 we need to force BVAL into a register that we won't
1360 clobber the flags -- general_operand should suffice. */
1361 if (general_operand (temp2, GET_MODE (var)))
1362 bval = temp2;
1363 else
1364 {
1365 bval = gen_reg_rtx (GET_MODE (var));
1366 new_insn = copy_rtx (temp);
1367 temp6 = single_set (new_insn);
1368 SET_DEST (temp6) = bval;
1369 emit_insn (PATTERN (new_insn));
1370 }
1371
1372 target = emit_conditional_move (var, code,
1373 cond0, cond1, VOIDmode,
1374 aval, bval, GET_MODE (var),
1375 (code == LTU || code == GEU
1376 || code == LEU || code == GTU));
1377
1378 if (target)
1379 {
1380 rtx seq1, seq2, last;
1381 int copy_ok;
1382
1383 /* Save the conditional move sequence but don't emit it
1384 yet. On some machines, like the alpha, it is possible
1385 that temp5 == insn, so next generate the sequence that
1386 saves the compared values and then emit both
1387 sequences ensuring seq1 occurs before seq2. */
1388 seq2 = get_insns ();
1389 end_sequence ();
1390
1391 /* "Now that we can't fail..." Famous last words.
1392 Generate the copy insns that preserve the compared
1393 values. */
1394 start_sequence ();
1395 emit_move_insn (cond0, XEXP (temp4, 0));
1396 if (cond1 != XEXP (temp4, 1))
1397 emit_move_insn (cond1, XEXP (temp4, 1));
1398 seq1 = get_insns ();
1399 end_sequence ();
1400
1401 /* Validate the sequence -- this may be some weird
1402 bit-extract-and-test instruction for which there
1403 exists no complimentary bit-extract insn. */
1404 copy_ok = 1;
1405 for (last = seq1; last ; last = NEXT_INSN (last))
1406 if (recog_memoized (last) < 0)
1407 {
1408 copy_ok = 0;
1409 break;
1410 }
1411
1412 if (copy_ok)
1413 {
1414 emit_insns_before (seq1, temp5);
1415
1416 /* Insert conditional move after insn, to be sure
1417 that the jump and a possible compare won't be
1418 separated. */
1419 last = emit_insns_after (seq2, insn);
1420
1421 /* ??? We can also delete the insn that sets X to A.
1422 Flow will do it too though. */
1423 delete_insn (temp);
1424 next = NEXT_INSN (insn);
1425 delete_jump (insn);
1426
1427 if (after_regscan)
1428 {
1429 reg_scan_update (seq1, NEXT_INSN (last),
1430 old_max_reg);
1431 old_max_reg = max_reg_num ();
1432 }
1433
1434 changed = 1;
1435 continue;
1436 }
1437 }
1438 else
1439 end_sequence ();
1440 }
1441 #endif
1442
1443 /* That didn't work, try a store-flag insn.
1444
1445 We further divide the cases into:
1446
1447 1) x = a; if (...) x = b; and either A or B is zero,
1448 2) if (...) x = 0; and jumps are expensive,
1449 3) x = a; if (...) x = b; and A and B are constants where all
1450 the set bits in A are also set in B and jumps are expensive,
1451 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1452 more expensive, and
1453 5) if (...) x = b; if jumps are even more expensive. */
1454
1455 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1456 /* We will be passing this as operand into expand_and. No
1457 good if it's not valid as an operand. */
1458 && general_operand (temp2, GET_MODE (temp2))
1459 && ((GET_CODE (temp3) == CONST_INT)
1460 /* Make the latter case look like
1461 x = x; if (...) x = 0; */
1462 || (temp3 = temp1,
1463 ((BRANCH_COST >= 2
1464 && temp2 == const0_rtx)
1465 || BRANCH_COST >= 3)))
1466 /* If B is zero, OK; if A is zero, can only do (1) if we
1467 can reverse the condition. See if (3) applies possibly
1468 by reversing the condition. Prefer reversing to (4) when
1469 branches are very expensive. */
1470 && (((BRANCH_COST >= 2
1471 || STORE_FLAG_VALUE == -1
1472 || (STORE_FLAG_VALUE == 1
1473 /* Check that the mask is a power of two,
1474 so that it can probably be generated
1475 with a shift. */
1476 && GET_CODE (temp3) == CONST_INT
1477 && exact_log2 (INTVAL (temp3)) >= 0))
1478 && (reversep = 0, temp2 == const0_rtx))
1479 || ((BRANCH_COST >= 2
1480 || STORE_FLAG_VALUE == -1
1481 || (STORE_FLAG_VALUE == 1
1482 && GET_CODE (temp2) == CONST_INT
1483 && exact_log2 (INTVAL (temp2)) >= 0))
1484 && temp3 == const0_rtx
1485 && (reversep = can_reverse_comparison_p (temp4, insn)))
1486 || (BRANCH_COST >= 2
1487 && GET_CODE (temp2) == CONST_INT
1488 && GET_CODE (temp3) == CONST_INT
1489 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1490 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1491 && (reversep = can_reverse_comparison_p (temp4,
1492 insn)))))
1493 || BRANCH_COST >= 3)
1494 )
1495 {
1496 enum rtx_code code = GET_CODE (temp4);
1497 rtx uval, cval, var = temp1;
1498 int normalizep;
1499 rtx target;
1500
1501 /* If necessary, reverse the condition. */
1502 if (reversep)
1503 code = reverse_condition (code), uval = temp2, cval = temp3;
1504 else
1505 uval = temp3, cval = temp2;
1506
1507 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1508 is the constant 1, it is best to just compute the result
1509 directly. If UVAL is constant and STORE_FLAG_VALUE
1510 includes all of its bits, it is best to compute the flag
1511 value unnormalized and `and' it with UVAL. Otherwise,
1512 normalize to -1 and `and' with UVAL. */
1513 normalizep = (cval != const0_rtx ? -1
1514 : (uval == const1_rtx ? 1
1515 : (GET_CODE (uval) == CONST_INT
1516 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1517 ? 0 : -1));
1518
1519 /* We will be putting the store-flag insn immediately in
1520 front of the comparison that was originally being done,
1521 so we know all the variables in TEMP4 will be valid.
1522 However, this might be in front of the assignment of
1523 A to VAR. If it is, it would clobber the store-flag
1524 we will be emitting.
1525
1526 Therefore, emit into a temporary which will be copied to
1527 VAR immediately after TEMP. */
1528
1529 start_sequence ();
1530 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1531 XEXP (temp4, 0), XEXP (temp4, 1),
1532 VOIDmode,
1533 (code == LTU || code == LEU
1534 || code == GEU || code == GTU),
1535 normalizep);
1536 if (target)
1537 {
1538 rtx seq;
1539 rtx before = insn;
1540
1541 seq = get_insns ();
1542 end_sequence ();
1543
1544 /* Put the store-flag insns in front of the first insn
1545 used to compute the condition to ensure that we
1546 use the same values of them as the current
1547 comparison. However, the remainder of the insns we
1548 generate will be placed directly in front of the
1549 jump insn, in case any of the pseudos we use
1550 are modified earlier. */
1551
1552 emit_insns_before (seq, temp5);
1553
1554 start_sequence ();
1555
1556 /* Both CVAL and UVAL are non-zero. */
1557 if (cval != const0_rtx && uval != const0_rtx)
1558 {
1559 rtx tem1, tem2;
1560
1561 tem1 = expand_and (uval, target, NULL_RTX);
1562 if (GET_CODE (cval) == CONST_INT
1563 && GET_CODE (uval) == CONST_INT
1564 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1565 tem2 = cval;
1566 else
1567 {
1568 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1569 target, NULL_RTX, 0);
1570 tem2 = expand_and (cval, tem2,
1571 (GET_CODE (tem2) == REG
1572 ? tem2 : 0));
1573 }
1574
1575 /* If we usually make new pseudos, do so here. This
1576 turns out to help machines that have conditional
1577 move insns. */
1578 /* ??? Conditional moves have already been handled.
1579 This may be obsolete. */
1580
1581 if (flag_expensive_optimizations)
1582 target = 0;
1583
1584 target = expand_binop (GET_MODE (var), ior_optab,
1585 tem1, tem2, target,
1586 1, OPTAB_WIDEN);
1587 }
1588 else if (normalizep != 1)
1589 {
1590 /* We know that either CVAL or UVAL is zero. If
1591 UVAL is zero, negate TARGET and `and' with CVAL.
1592 Otherwise, `and' with UVAL. */
1593 if (uval == const0_rtx)
1594 {
1595 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1596 target, NULL_RTX, 0);
1597 uval = cval;
1598 }
1599
1600 target = expand_and (uval, target,
1601 (GET_CODE (target) == REG
1602 && ! preserve_subexpressions_p ()
1603 ? target : NULL_RTX));
1604 }
1605
1606 emit_move_insn (var, target);
1607 seq = get_insns ();
1608 end_sequence ();
1609 #ifdef HAVE_cc0
1610 /* If INSN uses CC0, we must not separate it from the
1611 insn that sets cc0. */
1612 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1613 before = prev_nonnote_insn (before);
1614 #endif
1615 emit_insns_before (seq, before);
1616
1617 delete_insn (temp);
1618 next = NEXT_INSN (insn);
1619 delete_jump (insn);
1620
1621 if (after_regscan)
1622 {
1623 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1624 old_max_reg = max_reg_num ();
1625 }
1626
1627 changed = 1;
1628 continue;
1629 }
1630 else
1631 end_sequence ();
1632 }
1633 }
1634
1635
1636 /* Simplify if (...) x = 1; else {...} if (x) ...
1637 We recognize this case scanning backwards as well.
1638
1639 TEMP is the assignment to x;
1640 TEMP1 is the label at the head of the second if. */
1641 /* ?? This should call get_condition to find the values being
1642 compared, instead of looking for a COMPARE insn when HAVE_cc0
1643 is not defined. This would allow it to work on the m88k. */
1644 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1645 is not defined and the condition is tested by a separate compare
1646 insn. This is because the code below assumes that the result
1647 of the compare dies in the following branch.
1648
1649 Not only that, but there might be other insns between the
1650 compare and branch whose results are live. Those insns need
1651 to be executed.
1652
1653 A way to fix this is to move the insns at JUMP_LABEL (insn)
1654 to before INSN. If we are running before flow, they will
1655 be deleted if they aren't needed. But this doesn't work
1656 well after flow.
1657
1658 This is really a special-case of jump threading, anyway. The
1659 right thing to do is to replace this and jump threading with
1660 much simpler code in cse.
1661
1662 This code has been turned off in the non-cc0 case in the
1663 meantime. */
1664
1665 #ifdef HAVE_cc0
1666 else if (this_is_simplejump
1667 /* Safe to skip USE and CLOBBER insns here
1668 since they will not be deleted. */
1669 && (temp = prev_active_insn (insn))
1670 && no_labels_between_p (temp, insn)
1671 && GET_CODE (temp) == INSN
1672 && GET_CODE (PATTERN (temp)) == SET
1673 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1674 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1675 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1676 /* If we find that the next value tested is `x'
1677 (TEMP1 is the insn where this happens), win. */
1678 && GET_CODE (temp1) == INSN
1679 && GET_CODE (PATTERN (temp1)) == SET
1680 #ifdef HAVE_cc0
1681 /* Does temp1 `tst' the value of x? */
1682 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1683 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1684 && (temp1 = next_nonnote_insn (temp1))
1685 #else
1686 /* Does temp1 compare the value of x against zero? */
1687 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1688 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1689 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1690 == SET_DEST (PATTERN (temp)))
1691 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1692 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1693 #endif
1694 && condjump_p (temp1))
1695 {
1696 /* Get the if_then_else from the condjump. */
1697 rtx choice = SET_SRC (PATTERN (temp1));
1698 if (GET_CODE (choice) == IF_THEN_ELSE)
1699 {
1700 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1701 rtx val = SET_SRC (PATTERN (temp));
1702 rtx cond
1703 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1704 val, const0_rtx);
1705 rtx ultimate;
1706
1707 if (cond == const_true_rtx)
1708 ultimate = XEXP (choice, 1);
1709 else if (cond == const0_rtx)
1710 ultimate = XEXP (choice, 2);
1711 else
1712 ultimate = 0;
1713
1714 if (ultimate == pc_rtx)
1715 ultimate = get_label_after (temp1);
1716 else if (ultimate && GET_CODE (ultimate) != RETURN)
1717 ultimate = XEXP (ultimate, 0);
1718
1719 if (ultimate && JUMP_LABEL(insn) != ultimate)
1720 changed |= redirect_jump (insn, ultimate);
1721 }
1722 }
1723 #endif
1724
1725 #if 0
1726 /* @@ This needs a bit of work before it will be right.
1727
1728 Any type of comparison can be accepted for the first and
1729 second compare. When rewriting the first jump, we must
1730 compute the what conditions can reach label3, and use the
1731 appropriate code. We can not simply reverse/swap the code
1732 of the first jump. In some cases, the second jump must be
1733 rewritten also.
1734
1735 For example,
1736 < == converts to > ==
1737 < != converts to == >
1738 etc.
1739
1740 If the code is written to only accept an '==' test for the second
1741 compare, then all that needs to be done is to swap the condition
1742 of the first branch.
1743
1744 It is questionable whether we want this optimization anyways,
1745 since if the user wrote code like this because he/she knew that
1746 the jump to label1 is taken most of the time, then rewriting
1747 this gives slower code. */
1748 /* @@ This should call get_condition to find the values being
1749 compared, instead of looking for a COMPARE insn when HAVE_cc0
1750 is not defined. This would allow it to work on the m88k. */
1751 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1752 is not defined and the condition is tested by a separate compare
1753 insn. This is because the code below assumes that the result
1754 of the compare dies in the following branch. */
1755
1756 /* Simplify test a ~= b
1757 condjump label1;
1758 test a == b
1759 condjump label2;
1760 jump label3;
1761 label1:
1762
1763 rewriting as
1764 test a ~~= b
1765 condjump label3
1766 test a == b
1767 condjump label2
1768 label1:
1769
1770 where ~= is an inequality, e.g. >, and ~~= is the swapped
1771 inequality, e.g. <.
1772
1773 We recognize this case scanning backwards.
1774
1775 TEMP is the conditional jump to `label2';
1776 TEMP1 is the test for `a == b';
1777 TEMP2 is the conditional jump to `label1';
1778 TEMP3 is the test for `a ~= b'. */
1779 else if (this_is_simplejump
1780 && (temp = prev_active_insn (insn))
1781 && no_labels_between_p (temp, insn)
1782 && condjump_p (temp)
1783 && (temp1 = prev_active_insn (temp))
1784 && no_labels_between_p (temp1, temp)
1785 && GET_CODE (temp1) == INSN
1786 && GET_CODE (PATTERN (temp1)) == SET
1787 #ifdef HAVE_cc0
1788 && sets_cc0_p (PATTERN (temp1)) == 1
1789 #else
1790 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1791 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1792 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1793 #endif
1794 && (temp2 = prev_active_insn (temp1))
1795 && no_labels_between_p (temp2, temp1)
1796 && condjump_p (temp2)
1797 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1798 && (temp3 = prev_active_insn (temp2))
1799 && no_labels_between_p (temp3, temp2)
1800 && GET_CODE (PATTERN (temp3)) == SET
1801 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1802 SET_DEST (PATTERN (temp1)))
1803 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1804 SET_SRC (PATTERN (temp3)))
1805 && ! inequality_comparisons_p (PATTERN (temp))
1806 && inequality_comparisons_p (PATTERN (temp2)))
1807 {
1808 rtx fallthrough_label = JUMP_LABEL (temp2);
1809
1810 ++LABEL_NUSES (fallthrough_label);
1811 if (swap_jump (temp2, JUMP_LABEL (insn)))
1812 {
1813 delete_insn (insn);
1814 changed = 1;
1815 }
1816
1817 if (--LABEL_NUSES (fallthrough_label) == 0)
1818 delete_insn (fallthrough_label);
1819 }
1820 #endif
1821 /* Simplify if (...) {... x = 1;} if (x) ...
1822
1823 We recognize this case backwards.
1824
1825 TEMP is the test of `x';
1826 TEMP1 is the assignment to `x' at the end of the
1827 previous statement. */
1828 /* @@ This should call get_condition to find the values being
1829 compared, instead of looking for a COMPARE insn when HAVE_cc0
1830 is not defined. This would allow it to work on the m88k. */
1831 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1832 is not defined and the condition is tested by a separate compare
1833 insn. This is because the code below assumes that the result
1834 of the compare dies in the following branch. */
1835
1836 /* ??? This has to be turned off. The problem is that the
1837 unconditional jump might indirectly end up branching to the
1838 label between TEMP1 and TEMP. We can't detect this, in general,
1839 since it may become a jump to there after further optimizations.
1840 If that jump is done, it will be deleted, so we will retry
1841 this optimization in the next pass, thus an infinite loop.
1842
1843 The present code prevents this by putting the jump after the
1844 label, but this is not logically correct. */
1845 #if 0
1846 else if (this_is_condjump
1847 /* Safe to skip USE and CLOBBER insns here
1848 since they will not be deleted. */
1849 && (temp = prev_active_insn (insn))
1850 && no_labels_between_p (temp, insn)
1851 && GET_CODE (temp) == INSN
1852 && GET_CODE (PATTERN (temp)) == SET
1853 #ifdef HAVE_cc0
1854 && sets_cc0_p (PATTERN (temp)) == 1
1855 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1856 #else
1857 /* Temp must be a compare insn, we can not accept a register
1858 to register move here, since it may not be simply a
1859 tst insn. */
1860 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1861 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1862 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1863 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1864 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1865 #endif
1866 /* May skip USE or CLOBBER insns here
1867 for checking for opportunity, since we
1868 take care of them later. */
1869 && (temp1 = prev_active_insn (temp))
1870 && GET_CODE (temp1) == INSN
1871 && GET_CODE (PATTERN (temp1)) == SET
1872 #ifdef HAVE_cc0
1873 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1874 #else
1875 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1876 == SET_DEST (PATTERN (temp1)))
1877 #endif
1878 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1879 /* If this isn't true, cse will do the job. */
1880 && ! no_labels_between_p (temp1, temp))
1881 {
1882 /* Get the if_then_else from the condjump. */
1883 rtx choice = SET_SRC (PATTERN (insn));
1884 if (GET_CODE (choice) == IF_THEN_ELSE
1885 && (GET_CODE (XEXP (choice, 0)) == EQ
1886 || GET_CODE (XEXP (choice, 0)) == NE))
1887 {
1888 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1889 rtx last_insn;
1890 rtx ultimate;
1891 rtx p;
1892
1893 /* Get the place that condjump will jump to
1894 if it is reached from here. */
1895 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1896 == want_nonzero)
1897 ultimate = XEXP (choice, 1);
1898 else
1899 ultimate = XEXP (choice, 2);
1900 /* Get it as a CODE_LABEL. */
1901 if (ultimate == pc_rtx)
1902 ultimate = get_label_after (insn);
1903 else
1904 /* Get the label out of the LABEL_REF. */
1905 ultimate = XEXP (ultimate, 0);
1906
1907 /* Insert the jump immediately before TEMP, specifically
1908 after the label that is between TEMP1 and TEMP. */
1909 last_insn = PREV_INSN (temp);
1910
1911 /* If we would be branching to the next insn, the jump
1912 would immediately be deleted and the re-inserted in
1913 a subsequent pass over the code. So don't do anything
1914 in that case. */
1915 if (next_active_insn (last_insn)
1916 != next_active_insn (ultimate))
1917 {
1918 emit_barrier_after (last_insn);
1919 p = emit_jump_insn_after (gen_jump (ultimate),
1920 last_insn);
1921 JUMP_LABEL (p) = ultimate;
1922 ++LABEL_NUSES (ultimate);
1923 if (INSN_UID (ultimate) < max_jump_chain
1924 && INSN_CODE (p) < max_jump_chain)
1925 {
1926 jump_chain[INSN_UID (p)]
1927 = jump_chain[INSN_UID (ultimate)];
1928 jump_chain[INSN_UID (ultimate)] = p;
1929 }
1930 changed = 1;
1931 continue;
1932 }
1933 }
1934 }
1935 #endif
1936 #ifdef HAVE_trap
1937 /* Detect a conditional jump jumping over an unconditional trap. */
1938 else if (HAVE_trap
1939 && this_is_condjump && ! this_is_simplejump
1940 && reallabelprev != 0
1941 && GET_CODE (reallabelprev) == INSN
1942 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1943 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1944 && prev_active_insn (reallabelprev) == insn
1945 && no_labels_between_p (insn, reallabelprev)
1946 && (temp2 = get_condition (insn, &temp4))
1947 && can_reverse_comparison_p (temp2, insn))
1948 {
1949 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1950 XEXP (temp2, 0), XEXP (temp2, 1),
1951 TRAP_CODE (PATTERN (reallabelprev)));
1952
1953 if (new)
1954 {
1955 emit_insn_before (new, temp4);
1956 delete_insn (reallabelprev);
1957 delete_jump (insn);
1958 changed = 1;
1959 continue;
1960 }
1961 }
1962 /* Detect a jump jumping to an unconditional trap. */
1963 else if (HAVE_trap && this_is_condjump
1964 && (temp = next_active_insn (JUMP_LABEL (insn)))
1965 && GET_CODE (temp) == INSN
1966 && GET_CODE (PATTERN (temp)) == TRAP_IF
1967 && (this_is_simplejump
1968 || (temp2 = get_condition (insn, &temp4))))
1969 {
1970 rtx tc = TRAP_CONDITION (PATTERN (temp));
1971
1972 if (tc == const_true_rtx
1973 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1974 {
1975 rtx new;
1976 /* Replace an unconditional jump to a trap with a trap. */
1977 if (this_is_simplejump)
1978 {
1979 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1980 delete_jump (insn);
1981 changed = 1;
1982 continue;
1983 }
1984 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1985 XEXP (temp2, 1),
1986 TRAP_CODE (PATTERN (temp)));
1987 if (new)
1988 {
1989 emit_insn_before (new, temp4);
1990 delete_jump (insn);
1991 changed = 1;
1992 continue;
1993 }
1994 }
1995 /* If the trap condition and jump condition are mutually
1996 exclusive, redirect the jump to the following insn. */
1997 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1998 && ! this_is_simplejump
1999 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
2000 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
2001 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
2002 && redirect_jump (insn, get_label_after (temp)))
2003 {
2004 changed = 1;
2005 continue;
2006 }
2007 }
2008 #endif
2009 else
2010 {
2011 /* Now that the jump has been tensioned,
2012 try cross jumping: check for identical code
2013 before the jump and before its target label. */
2014
2015 /* First, cross jumping of conditional jumps: */
2016
2017 if (cross_jump && condjump_p (insn))
2018 {
2019 rtx newjpos, newlpos;
2020 rtx x = prev_real_insn (JUMP_LABEL (insn));
2021
2022 /* A conditional jump may be crossjumped
2023 only if the place it jumps to follows
2024 an opposing jump that comes back here. */
2025
2026 if (x != 0 && ! jump_back_p (x, insn))
2027 /* We have no opposing jump;
2028 cannot cross jump this insn. */
2029 x = 0;
2030
2031 newjpos = 0;
2032 /* TARGET is nonzero if it is ok to cross jump
2033 to code before TARGET. If so, see if matches. */
2034 if (x != 0)
2035 find_cross_jump (insn, x, 2,
2036 &newjpos, &newlpos);
2037
2038 if (newjpos != 0)
2039 {
2040 do_cross_jump (insn, newjpos, newlpos);
2041 /* Make the old conditional jump
2042 into an unconditional one. */
2043 SET_SRC (PATTERN (insn))
2044 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
2045 INSN_CODE (insn) = -1;
2046 emit_barrier_after (insn);
2047 /* Add to jump_chain unless this is a new label
2048 whose UID is too large. */
2049 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2050 {
2051 jump_chain[INSN_UID (insn)]
2052 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2053 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2054 }
2055 changed = 1;
2056 next = insn;
2057 }
2058 }
2059
2060 /* Cross jumping of unconditional jumps:
2061 a few differences. */
2062
2063 if (cross_jump && simplejump_p (insn))
2064 {
2065 rtx newjpos, newlpos;
2066 rtx target;
2067
2068 newjpos = 0;
2069
2070 /* TARGET is nonzero if it is ok to cross jump
2071 to code before TARGET. If so, see if matches. */
2072 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2073 &newjpos, &newlpos);
2074
2075 /* If cannot cross jump to code before the label,
2076 see if we can cross jump to another jump to
2077 the same label. */
2078 /* Try each other jump to this label. */
2079 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2080 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2081 target != 0 && newjpos == 0;
2082 target = jump_chain[INSN_UID (target)])
2083 if (target != insn
2084 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2085 /* Ignore TARGET if it's deleted. */
2086 && ! INSN_DELETED_P (target))
2087 find_cross_jump (insn, target, 2,
2088 &newjpos, &newlpos);
2089
2090 if (newjpos != 0)
2091 {
2092 do_cross_jump (insn, newjpos, newlpos);
2093 changed = 1;
2094 next = insn;
2095 }
2096 }
2097
2098 /* This code was dead in the previous jump.c! */
2099 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2100 {
2101 /* Return insns all "jump to the same place"
2102 so we can cross-jump between any two of them. */
2103
2104 rtx newjpos, newlpos, target;
2105
2106 newjpos = 0;
2107
2108 /* If cannot cross jump to code before the label,
2109 see if we can cross jump to another jump to
2110 the same label. */
2111 /* Try each other jump to this label. */
2112 for (target = jump_chain[0];
2113 target != 0 && newjpos == 0;
2114 target = jump_chain[INSN_UID (target)])
2115 if (target != insn
2116 && ! INSN_DELETED_P (target)
2117 && GET_CODE (PATTERN (target)) == RETURN)
2118 find_cross_jump (insn, target, 2,
2119 &newjpos, &newlpos);
2120
2121 if (newjpos != 0)
2122 {
2123 do_cross_jump (insn, newjpos, newlpos);
2124 changed = 1;
2125 next = insn;
2126 }
2127 }
2128 }
2129 }
2130
2131 first = 0;
2132 }
2133
2134 /* Delete extraneous line number notes.
2135 Note that two consecutive notes for different lines are not really
2136 extraneous. There should be some indication where that line belonged,
2137 even if it became empty. */
2138
2139 {
2140 rtx last_note = 0;
2141
2142 for (insn = f; insn; insn = NEXT_INSN (insn))
2143 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2144 {
2145 /* Delete this note if it is identical to previous note. */
2146 if (last_note
2147 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2148 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2149 {
2150 delete_insn (insn);
2151 continue;
2152 }
2153
2154 last_note = insn;
2155 }
2156 }
2157
2158 /* CAN_REACH_END is persistent for each function. Once set it should
2159 not be cleared. This is especially true for the case where we
2160 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2161 the front-end before compiling each function. */
2162 if (! minimal && calculate_can_reach_end (last_insn, optimize != 0))
2163 can_reach_end = 1;
2164
2165 end:
2166 /* Clean up. */
2167 free (jump_chain);
2168 jump_chain = 0;
2169 }
2170 \f
2171 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2172 notes whose labels don't occur in the insn any more. Returns the
2173 largest INSN_UID found. */
2174 static int
2175 init_label_info (f)
2176 rtx f;
2177 {
2178 int largest_uid = 0;
2179 rtx insn;
2180
2181 for (insn = f; insn; insn = NEXT_INSN (insn))
2182 {
2183 if (GET_CODE (insn) == CODE_LABEL)
2184 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2185 else if (GET_CODE (insn) == JUMP_INSN)
2186 JUMP_LABEL (insn) = 0;
2187 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2188 {
2189 rtx note, next;
2190
2191 for (note = REG_NOTES (insn); note; note = next)
2192 {
2193 next = XEXP (note, 1);
2194 if (REG_NOTE_KIND (note) == REG_LABEL
2195 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2196 remove_note (insn, note);
2197 }
2198 }
2199 if (INSN_UID (insn) > largest_uid)
2200 largest_uid = INSN_UID (insn);
2201 }
2202
2203 return largest_uid;
2204 }
2205
2206 /* Delete insns following barriers, up to next label.
2207
2208 Also delete no-op jumps created by gcse. */
2209
2210 static void
2211 delete_barrier_successors (f)
2212 rtx f;
2213 {
2214 rtx insn;
2215
2216 for (insn = f; insn;)
2217 {
2218 if (GET_CODE (insn) == BARRIER)
2219 {
2220 insn = NEXT_INSN (insn);
2221
2222 never_reached_warning (insn);
2223
2224 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2225 {
2226 if (GET_CODE (insn) == NOTE
2227 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2228 insn = NEXT_INSN (insn);
2229 else
2230 insn = delete_insn (insn);
2231 }
2232 /* INSN is now the code_label. */
2233 }
2234
2235 /* Also remove (set (pc) (pc)) insns which can be created by
2236 gcse. We eliminate such insns now to avoid having them
2237 cause problems later. */
2238 else if (GET_CODE (insn) == JUMP_INSN
2239 && GET_CODE (PATTERN (insn)) == SET
2240 && SET_SRC (PATTERN (insn)) == pc_rtx
2241 && SET_DEST (PATTERN (insn)) == pc_rtx)
2242 insn = delete_insn (insn);
2243
2244 else
2245 insn = NEXT_INSN (insn);
2246 }
2247 }
2248
2249 /* Mark the label each jump jumps to.
2250 Combine consecutive labels, and count uses of labels.
2251
2252 For each label, make a chain (using `jump_chain')
2253 of all the *unconditional* jumps that jump to it;
2254 also make a chain of all returns.
2255
2256 CROSS_JUMP indicates whether we are doing cross jumping
2257 and if we are whether we will be paying attention to
2258 death notes or not. */
2259
2260 static void
2261 mark_all_labels (f, cross_jump)
2262 rtx f;
2263 int cross_jump;
2264 {
2265 rtx insn;
2266
2267 for (insn = f; insn; insn = NEXT_INSN (insn))
2268 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2269 {
2270 if (GET_CODE (insn) == CALL_INSN
2271 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2272 {
2273 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
2274 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
2275 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
2276 continue;
2277 }
2278
2279 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
2280 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2281 {
2282 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2283 {
2284 jump_chain[INSN_UID (insn)]
2285 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2286 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2287 }
2288 if (GET_CODE (PATTERN (insn)) == RETURN)
2289 {
2290 jump_chain[INSN_UID (insn)] = jump_chain[0];
2291 jump_chain[0] = insn;
2292 }
2293 }
2294 }
2295 }
2296
2297 /* Delete all labels already not referenced.
2298 Also find and return the last insn. */
2299
2300 static rtx
2301 delete_unreferenced_labels (f)
2302 rtx f;
2303 {
2304 rtx final = NULL_RTX;
2305 rtx insn;
2306
2307 for (insn = f; insn; )
2308 {
2309 if (GET_CODE (insn) == CODE_LABEL
2310 && LABEL_NUSES (insn) == 0
2311 && LABEL_ALTERNATE_NAME (insn) == NULL)
2312 insn = delete_insn (insn);
2313 else
2314 {
2315 final = insn;
2316 insn = NEXT_INSN (insn);
2317 }
2318 }
2319
2320 return final;
2321 }
2322
2323 /* Delete various simple forms of moves which have no necessary
2324 side effect. */
2325
2326 static void
2327 delete_noop_moves (f)
2328 rtx f;
2329 {
2330 rtx insn, next;
2331
2332 for (insn = f; insn; )
2333 {
2334 next = NEXT_INSN (insn);
2335
2336 if (GET_CODE (insn) == INSN)
2337 {
2338 register rtx body = PATTERN (insn);
2339
2340 /* Detect and delete no-op move instructions
2341 resulting from not allocating a parameter in a register. */
2342
2343 if (GET_CODE (body) == SET
2344 && (SET_DEST (body) == SET_SRC (body)
2345 || (GET_CODE (SET_DEST (body)) == MEM
2346 && GET_CODE (SET_SRC (body)) == MEM
2347 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2348 && ! (GET_CODE (SET_DEST (body)) == MEM
2349 && MEM_VOLATILE_P (SET_DEST (body)))
2350 && ! (GET_CODE (SET_SRC (body)) == MEM
2351 && MEM_VOLATILE_P (SET_SRC (body))))
2352 delete_computation (insn);
2353
2354 /* Detect and ignore no-op move instructions
2355 resulting from smart or fortuitous register allocation. */
2356
2357 else if (GET_CODE (body) == SET)
2358 {
2359 int sreg = true_regnum (SET_SRC (body));
2360 int dreg = true_regnum (SET_DEST (body));
2361
2362 if (sreg == dreg && sreg >= 0)
2363 delete_insn (insn);
2364 else if (sreg >= 0 && dreg >= 0)
2365 {
2366 rtx trial;
2367 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2368 sreg, NULL_PTR, dreg,
2369 GET_MODE (SET_SRC (body)));
2370
2371 if (tem != 0
2372 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2373 {
2374 /* DREG may have been the target of a REG_DEAD note in
2375 the insn which makes INSN redundant. If so, reorg
2376 would still think it is dead. So search for such a
2377 note and delete it if we find it. */
2378 if (! find_regno_note (insn, REG_UNUSED, dreg))
2379 for (trial = prev_nonnote_insn (insn);
2380 trial && GET_CODE (trial) != CODE_LABEL;
2381 trial = prev_nonnote_insn (trial))
2382 if (find_regno_note (trial, REG_DEAD, dreg))
2383 {
2384 remove_death (dreg, trial);
2385 break;
2386 }
2387
2388 /* Deleting insn could lose a death-note for SREG. */
2389 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2390 {
2391 /* Change this into a USE so that we won't emit
2392 code for it, but still can keep the note. */
2393 PATTERN (insn)
2394 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2395 INSN_CODE (insn) = -1;
2396 /* Remove all reg notes but the REG_DEAD one. */
2397 REG_NOTES (insn) = trial;
2398 XEXP (trial, 1) = NULL_RTX;
2399 }
2400 else
2401 delete_insn (insn);
2402 }
2403 }
2404 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2405 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2406 NULL_PTR, 0,
2407 GET_MODE (SET_DEST (body))))
2408 {
2409 /* This handles the case where we have two consecutive
2410 assignments of the same constant to pseudos that didn't
2411 get a hard reg. Each SET from the constant will be
2412 converted into a SET of the spill register and an
2413 output reload will be made following it. This produces
2414 two loads of the same constant into the same spill
2415 register. */
2416
2417 rtx in_insn = insn;
2418
2419 /* Look back for a death note for the first reg.
2420 If there is one, it is no longer accurate. */
2421 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2422 {
2423 if ((GET_CODE (in_insn) == INSN
2424 || GET_CODE (in_insn) == JUMP_INSN)
2425 && find_regno_note (in_insn, REG_DEAD, dreg))
2426 {
2427 remove_death (dreg, in_insn);
2428 break;
2429 }
2430 in_insn = PREV_INSN (in_insn);
2431 }
2432
2433 /* Delete the second load of the value. */
2434 delete_insn (insn);
2435 }
2436 }
2437 else if (GET_CODE (body) == PARALLEL)
2438 {
2439 /* If each part is a set between two identical registers or
2440 a USE or CLOBBER, delete the insn. */
2441 int i, sreg, dreg;
2442 rtx tem;
2443
2444 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2445 {
2446 tem = XVECEXP (body, 0, i);
2447 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2448 continue;
2449
2450 if (GET_CODE (tem) != SET
2451 || (sreg = true_regnum (SET_SRC (tem))) < 0
2452 || (dreg = true_regnum (SET_DEST (tem))) < 0
2453 || dreg != sreg)
2454 break;
2455 }
2456
2457 if (i < 0)
2458 delete_insn (insn);
2459 }
2460 /* Also delete insns to store bit fields if they are no-ops. */
2461 /* Not worth the hair to detect this in the big-endian case. */
2462 else if (! BYTES_BIG_ENDIAN
2463 && GET_CODE (body) == SET
2464 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2465 && XEXP (SET_DEST (body), 2) == const0_rtx
2466 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2467 && ! (GET_CODE (SET_SRC (body)) == MEM
2468 && MEM_VOLATILE_P (SET_SRC (body))))
2469 delete_insn (insn);
2470 }
2471 insn = next;
2472 }
2473 }
2474
2475 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2476 If so indicate that this function can drop off the end by returning
2477 1, else return 0.
2478
2479 CHECK_DELETED indicates whether we must check if the note being
2480 searched for has the deleted flag set.
2481
2482 DELETE_FINAL_NOTE indicates whether we should delete the note
2483 if we find it. */
2484
2485 static int
2486 calculate_can_reach_end (last, delete_final_note)
2487 rtx last;
2488 int delete_final_note;
2489 {
2490 rtx insn = last;
2491 int n_labels = 1;
2492
2493 while (insn != NULL_RTX)
2494 {
2495 int ok = 0;
2496
2497 /* One label can follow the end-note: the return label. */
2498 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2499 ok = 1;
2500 /* Ordinary insns can follow it if returning a structure. */
2501 else if (GET_CODE (insn) == INSN)
2502 ok = 1;
2503 /* If machine uses explicit RETURN insns, no epilogue,
2504 then one of them follows the note. */
2505 else if (GET_CODE (insn) == JUMP_INSN
2506 && GET_CODE (PATTERN (insn)) == RETURN)
2507 ok = 1;
2508 /* A barrier can follow the return insn. */
2509 else if (GET_CODE (insn) == BARRIER)
2510 ok = 1;
2511 /* Other kinds of notes can follow also. */
2512 else if (GET_CODE (insn) == NOTE
2513 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2514 ok = 1;
2515
2516 if (ok != 1)
2517 break;
2518
2519 insn = PREV_INSN (insn);
2520 }
2521
2522 /* See if we backed up to the appropriate type of note. */
2523 if (insn != NULL_RTX
2524 && GET_CODE (insn) == NOTE
2525 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2526 {
2527 if (delete_final_note)
2528 delete_insn (insn);
2529 return 1;
2530 }
2531
2532 return 0;
2533 }
2534
2535 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2536 jump. Assume that this unconditional jump is to the exit test code. If
2537 the code is sufficiently simple, make a copy of it before INSN,
2538 followed by a jump to the exit of the loop. Then delete the unconditional
2539 jump after INSN.
2540
2541 Return 1 if we made the change, else 0.
2542
2543 This is only safe immediately after a regscan pass because it uses the
2544 values of regno_first_uid and regno_last_uid. */
2545
2546 static int
2547 duplicate_loop_exit_test (loop_start)
2548 rtx loop_start;
2549 {
2550 rtx insn, set, reg, p, link;
2551 rtx copy = 0, first_copy = 0;
2552 int num_insns = 0;
2553 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2554 rtx lastexit;
2555 int max_reg = max_reg_num ();
2556 rtx *reg_map = 0;
2557
2558 /* Scan the exit code. We do not perform this optimization if any insn:
2559
2560 is a CALL_INSN
2561 is a CODE_LABEL
2562 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2563 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2564 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2565 is not valid.
2566
2567 We also do not do this if we find an insn with ASM_OPERANDS. While
2568 this restriction should not be necessary, copying an insn with
2569 ASM_OPERANDS can confuse asm_noperands in some cases.
2570
2571 Also, don't do this if the exit code is more than 20 insns. */
2572
2573 for (insn = exitcode;
2574 insn
2575 && ! (GET_CODE (insn) == NOTE
2576 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2577 insn = NEXT_INSN (insn))
2578 {
2579 switch (GET_CODE (insn))
2580 {
2581 case CODE_LABEL:
2582 case CALL_INSN:
2583 return 0;
2584 case NOTE:
2585 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2586 a jump immediately after the loop start that branches outside
2587 the loop but within an outer loop, near the exit test.
2588 If we copied this exit test and created a phony
2589 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2590 before the exit test look like these could be safely moved
2591 out of the loop even if they actually may be never executed.
2592 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2593
2594 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2595 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2596 return 0;
2597
2598 if (optimize < 2
2599 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2600 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2601 /* If we were to duplicate this code, we would not move
2602 the BLOCK notes, and so debugging the moved code would
2603 be difficult. Thus, we only move the code with -O2 or
2604 higher. */
2605 return 0;
2606
2607 break;
2608 case JUMP_INSN:
2609 case INSN:
2610 /* The code below would grossly mishandle REG_WAS_0 notes,
2611 so get rid of them here. */
2612 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2613 remove_note (insn, p);
2614 if (++num_insns > 20
2615 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2616 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2617 return 0;
2618 break;
2619 default:
2620 break;
2621 }
2622 }
2623
2624 /* Unless INSN is zero, we can do the optimization. */
2625 if (insn == 0)
2626 return 0;
2627
2628 lastexit = insn;
2629
2630 /* See if any insn sets a register only used in the loop exit code and
2631 not a user variable. If so, replace it with a new register. */
2632 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2633 if (GET_CODE (insn) == INSN
2634 && (set = single_set (insn)) != 0
2635 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2636 || (GET_CODE (reg) == SUBREG
2637 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2638 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2639 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2640 {
2641 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2642 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2643 break;
2644
2645 if (p != lastexit)
2646 {
2647 /* We can do the replacement. Allocate reg_map if this is the
2648 first replacement we found. */
2649 if (reg_map == 0)
2650 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
2651
2652 REG_LOOP_TEST_P (reg) = 1;
2653
2654 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2655 }
2656 }
2657
2658 /* Now copy each insn. */
2659 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2660 {
2661 switch (GET_CODE (insn))
2662 {
2663 case BARRIER:
2664 copy = emit_barrier_before (loop_start);
2665 break;
2666 case NOTE:
2667 /* Only copy line-number notes. */
2668 if (NOTE_LINE_NUMBER (insn) >= 0)
2669 {
2670 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2671 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2672 }
2673 break;
2674
2675 case INSN:
2676 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
2677 if (reg_map)
2678 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2679
2680 mark_jump_label (PATTERN (copy), copy, 0, 0);
2681
2682 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2683 make them. */
2684 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2685 if (REG_NOTE_KIND (link) != REG_LABEL)
2686 REG_NOTES (copy)
2687 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2688 XEXP (link, 0),
2689 REG_NOTES (copy)));
2690 if (reg_map && REG_NOTES (copy))
2691 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2692 break;
2693
2694 case JUMP_INSN:
2695 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)), loop_start);
2696 if (reg_map)
2697 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2698 mark_jump_label (PATTERN (copy), copy, 0, 0);
2699 if (REG_NOTES (insn))
2700 {
2701 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
2702 if (reg_map)
2703 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2704 }
2705
2706 /* If this is a simple jump, add it to the jump chain. */
2707
2708 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2709 && simplejump_p (copy))
2710 {
2711 jump_chain[INSN_UID (copy)]
2712 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2713 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2714 }
2715 break;
2716
2717 default:
2718 abort ();
2719 }
2720
2721 /* Record the first insn we copied. We need it so that we can
2722 scan the copied insns for new pseudo registers. */
2723 if (! first_copy)
2724 first_copy = copy;
2725 }
2726
2727 /* Now clean up by emitting a jump to the end label and deleting the jump
2728 at the start of the loop. */
2729 if (! copy || GET_CODE (copy) != BARRIER)
2730 {
2731 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2732 loop_start);
2733
2734 /* Record the first insn we copied. We need it so that we can
2735 scan the copied insns for new pseudo registers. This may not
2736 be strictly necessary since we should have copied at least one
2737 insn above. But I am going to be safe. */
2738 if (! first_copy)
2739 first_copy = copy;
2740
2741 mark_jump_label (PATTERN (copy), copy, 0, 0);
2742 if (INSN_UID (copy) < max_jump_chain
2743 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2744 {
2745 jump_chain[INSN_UID (copy)]
2746 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2747 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2748 }
2749 emit_barrier_before (loop_start);
2750 }
2751
2752 /* Now scan from the first insn we copied to the last insn we copied
2753 (copy) for new pseudo registers. Do this after the code to jump to
2754 the end label since that might create a new pseudo too. */
2755 reg_scan_update (first_copy, copy, max_reg);
2756
2757 /* Mark the exit code as the virtual top of the converted loop. */
2758 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2759
2760 delete_insn (next_nonnote_insn (loop_start));
2761
2762 /* Clean up. */
2763 if (reg_map)
2764 free (reg_map);
2765
2766 return 1;
2767 }
2768 \f
2769 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2770 loop-end notes between START and END out before START. Assume that
2771 END is not such a note. START may be such a note. Returns the value
2772 of the new starting insn, which may be different if the original start
2773 was such a note. */
2774
2775 rtx
2776 squeeze_notes (start, end)
2777 rtx start, end;
2778 {
2779 rtx insn;
2780 rtx next;
2781
2782 for (insn = start; insn != end; insn = next)
2783 {
2784 next = NEXT_INSN (insn);
2785 if (GET_CODE (insn) == NOTE
2786 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2787 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2788 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2789 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2790 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2791 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2792 {
2793 if (insn == start)
2794 start = next;
2795 else
2796 {
2797 rtx prev = PREV_INSN (insn);
2798 PREV_INSN (insn) = PREV_INSN (start);
2799 NEXT_INSN (insn) = start;
2800 NEXT_INSN (PREV_INSN (insn)) = insn;
2801 PREV_INSN (NEXT_INSN (insn)) = insn;
2802 NEXT_INSN (prev) = next;
2803 PREV_INSN (next) = prev;
2804 }
2805 }
2806 }
2807
2808 return start;
2809 }
2810 \f
2811 /* Compare the instructions before insn E1 with those before E2
2812 to find an opportunity for cross jumping.
2813 (This means detecting identical sequences of insns followed by
2814 jumps to the same place, or followed by a label and a jump
2815 to that label, and replacing one with a jump to the other.)
2816
2817 Assume E1 is a jump that jumps to label E2
2818 (that is not always true but it might as well be).
2819 Find the longest possible equivalent sequences
2820 and store the first insns of those sequences into *F1 and *F2.
2821 Store zero there if no equivalent preceding instructions are found.
2822
2823 We give up if we find a label in stream 1.
2824 Actually we could transfer that label into stream 2. */
2825
2826 static void
2827 find_cross_jump (e1, e2, minimum, f1, f2)
2828 rtx e1, e2;
2829 int minimum;
2830 rtx *f1, *f2;
2831 {
2832 register rtx i1 = e1, i2 = e2;
2833 register rtx p1, p2;
2834 int lose = 0;
2835
2836 rtx last1 = 0, last2 = 0;
2837 rtx afterlast1 = 0, afterlast2 = 0;
2838
2839 *f1 = 0;
2840 *f2 = 0;
2841
2842 while (1)
2843 {
2844 i1 = prev_nonnote_insn (i1);
2845
2846 i2 = PREV_INSN (i2);
2847 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2848 i2 = PREV_INSN (i2);
2849
2850 if (i1 == 0)
2851 break;
2852
2853 /* Don't allow the range of insns preceding E1 or E2
2854 to include the other (E2 or E1). */
2855 if (i2 == e1 || i1 == e2)
2856 break;
2857
2858 /* If we will get to this code by jumping, those jumps will be
2859 tensioned to go directly to the new label (before I2),
2860 so this cross-jumping won't cost extra. So reduce the minimum. */
2861 if (GET_CODE (i1) == CODE_LABEL)
2862 {
2863 --minimum;
2864 break;
2865 }
2866
2867 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2868 break;
2869
2870 /* Avoid moving insns across EH regions if either of the insns
2871 can throw. */
2872 if (flag_exceptions
2873 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2874 && !in_same_eh_region (i1, i2))
2875 break;
2876
2877 p1 = PATTERN (i1);
2878 p2 = PATTERN (i2);
2879
2880 /* If this is a CALL_INSN, compare register usage information.
2881 If we don't check this on stack register machines, the two
2882 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2883 numbers of stack registers in the same basic block.
2884 If we don't check this on machines with delay slots, a delay slot may
2885 be filled that clobbers a parameter expected by the subroutine.
2886
2887 ??? We take the simple route for now and assume that if they're
2888 equal, they were constructed identically. */
2889
2890 if (GET_CODE (i1) == CALL_INSN
2891 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2892 CALL_INSN_FUNCTION_USAGE (i2)))
2893 lose = 1;
2894
2895 #ifdef STACK_REGS
2896 /* If cross_jump_death_matters is not 0, the insn's mode
2897 indicates whether or not the insn contains any stack-like
2898 regs. */
2899
2900 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
2901 {
2902 /* If register stack conversion has already been done, then
2903 death notes must also be compared before it is certain that
2904 the two instruction streams match. */
2905
2906 rtx note;
2907 HARD_REG_SET i1_regset, i2_regset;
2908
2909 CLEAR_HARD_REG_SET (i1_regset);
2910 CLEAR_HARD_REG_SET (i2_regset);
2911
2912 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2913 if (REG_NOTE_KIND (note) == REG_DEAD
2914 && STACK_REG_P (XEXP (note, 0)))
2915 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2916
2917 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2918 if (REG_NOTE_KIND (note) == REG_DEAD
2919 && STACK_REG_P (XEXP (note, 0)))
2920 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2921
2922 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2923
2924 lose = 1;
2925
2926 done:
2927 ;
2928 }
2929 #endif
2930
2931 /* Don't allow old-style asm or volatile extended asms to be accepted
2932 for cross jumping purposes. It is conceptually correct to allow
2933 them, since cross-jumping preserves the dynamic instruction order
2934 even though it is changing the static instruction order. However,
2935 if an asm is being used to emit an assembler pseudo-op, such as
2936 the MIPS `.set reorder' pseudo-op, then the static instruction order
2937 matters and it must be preserved. */
2938 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2939 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2940 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2941 lose = 1;
2942
2943 if (lose || GET_CODE (p1) != GET_CODE (p2)
2944 || ! rtx_renumbered_equal_p (p1, p2))
2945 {
2946 /* The following code helps take care of G++ cleanups. */
2947 rtx equiv1;
2948 rtx equiv2;
2949
2950 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2951 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2952 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2953 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2954 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2955 /* If the equivalences are not to a constant, they may
2956 reference pseudos that no longer exist, so we can't
2957 use them. */
2958 && CONSTANT_P (XEXP (equiv1, 0))
2959 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2960 {
2961 rtx s1 = single_set (i1);
2962 rtx s2 = single_set (i2);
2963 if (s1 != 0 && s2 != 0
2964 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2965 {
2966 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2967 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2968 if (! rtx_renumbered_equal_p (p1, p2))
2969 cancel_changes (0);
2970 else if (apply_change_group ())
2971 goto win;
2972 }
2973 }
2974
2975 /* Insns fail to match; cross jumping is limited to the following
2976 insns. */
2977
2978 #ifdef HAVE_cc0
2979 /* Don't allow the insn after a compare to be shared by
2980 cross-jumping unless the compare is also shared.
2981 Here, if either of these non-matching insns is a compare,
2982 exclude the following insn from possible cross-jumping. */
2983 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2984 last1 = afterlast1, last2 = afterlast2, ++minimum;
2985 #endif
2986
2987 /* If cross-jumping here will feed a jump-around-jump
2988 optimization, this jump won't cost extra, so reduce
2989 the minimum. */
2990 if (GET_CODE (i1) == JUMP_INSN
2991 && JUMP_LABEL (i1)
2992 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2993 --minimum;
2994 break;
2995 }
2996
2997 win:
2998 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2999 {
3000 /* Ok, this insn is potentially includable in a cross-jump here. */
3001 afterlast1 = last1, afterlast2 = last2;
3002 last1 = i1, last2 = i2, --minimum;
3003 }
3004 }
3005
3006 if (minimum <= 0 && last1 != 0 && last1 != e1)
3007 *f1 = last1, *f2 = last2;
3008 }
3009
3010 static void
3011 do_cross_jump (insn, newjpos, newlpos)
3012 rtx insn, newjpos, newlpos;
3013 {
3014 /* Find an existing label at this point
3015 or make a new one if there is none. */
3016 register rtx label = get_label_before (newlpos);
3017
3018 /* Make the same jump insn jump to the new point. */
3019 if (GET_CODE (PATTERN (insn)) == RETURN)
3020 {
3021 /* Remove from jump chain of returns. */
3022 delete_from_jump_chain (insn);
3023 /* Change the insn. */
3024 PATTERN (insn) = gen_jump (label);
3025 INSN_CODE (insn) = -1;
3026 JUMP_LABEL (insn) = label;
3027 LABEL_NUSES (label)++;
3028 /* Add to new the jump chain. */
3029 if (INSN_UID (label) < max_jump_chain
3030 && INSN_UID (insn) < max_jump_chain)
3031 {
3032 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3033 jump_chain[INSN_UID (label)] = insn;
3034 }
3035 }
3036 else
3037 redirect_jump (insn, label);
3038
3039 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3040 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3041 the NEWJPOS stream. */
3042
3043 while (newjpos != insn)
3044 {
3045 rtx lnote;
3046
3047 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3048 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3049 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3050 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3051 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3052 remove_note (newlpos, lnote);
3053
3054 delete_insn (newjpos);
3055 newjpos = next_real_insn (newjpos);
3056 newlpos = next_real_insn (newlpos);
3057 }
3058 }
3059 \f
3060 /* Return the label before INSN, or put a new label there. */
3061
3062 rtx
3063 get_label_before (insn)
3064 rtx insn;
3065 {
3066 rtx label;
3067
3068 /* Find an existing label at this point
3069 or make a new one if there is none. */
3070 label = prev_nonnote_insn (insn);
3071
3072 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3073 {
3074 rtx prev = PREV_INSN (insn);
3075
3076 label = gen_label_rtx ();
3077 emit_label_after (label, prev);
3078 LABEL_NUSES (label) = 0;
3079 }
3080 return label;
3081 }
3082
3083 /* Return the label after INSN, or put a new label there. */
3084
3085 rtx
3086 get_label_after (insn)
3087 rtx insn;
3088 {
3089 rtx label;
3090
3091 /* Find an existing label at this point
3092 or make a new one if there is none. */
3093 label = next_nonnote_insn (insn);
3094
3095 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3096 {
3097 label = gen_label_rtx ();
3098 emit_label_after (label, insn);
3099 LABEL_NUSES (label) = 0;
3100 }
3101 return label;
3102 }
3103 \f
3104 /* Return 1 if INSN is a jump that jumps to right after TARGET
3105 only on the condition that TARGET itself would drop through.
3106 Assumes that TARGET is a conditional jump. */
3107
3108 static int
3109 jump_back_p (insn, target)
3110 rtx insn, target;
3111 {
3112 rtx cinsn, ctarget;
3113 enum rtx_code codei, codet;
3114
3115 if (simplejump_p (insn) || ! condjump_p (insn)
3116 || simplejump_p (target)
3117 || target != prev_real_insn (JUMP_LABEL (insn)))
3118 return 0;
3119
3120 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3121 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3122
3123 codei = GET_CODE (cinsn);
3124 codet = GET_CODE (ctarget);
3125
3126 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3127 {
3128 if (! can_reverse_comparison_p (cinsn, insn))
3129 return 0;
3130 codei = reverse_condition (codei);
3131 }
3132
3133 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3134 {
3135 if (! can_reverse_comparison_p (ctarget, target))
3136 return 0;
3137 codet = reverse_condition (codet);
3138 }
3139
3140 return (codei == codet
3141 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3142 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3143 }
3144 \f
3145 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3146 return non-zero if it is safe to reverse this comparison. It is if our
3147 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3148 this is known to be an integer comparison. */
3149
3150 int
3151 can_reverse_comparison_p (comparison, insn)
3152 rtx comparison;
3153 rtx insn;
3154 {
3155 rtx arg0;
3156
3157 /* If this is not actually a comparison, we can't reverse it. */
3158 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3159 return 0;
3160
3161 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3162 /* If this is an NE comparison, it is safe to reverse it to an EQ
3163 comparison and vice versa, even for floating point. If no operands
3164 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3165 always false and NE is always true, so the reversal is also valid. */
3166 || flag_fast_math
3167 || GET_CODE (comparison) == NE
3168 || GET_CODE (comparison) == EQ)
3169 return 1;
3170
3171 arg0 = XEXP (comparison, 0);
3172
3173 /* Make sure ARG0 is one of the actual objects being compared. If we
3174 can't do this, we can't be sure the comparison can be reversed.
3175
3176 Handle cc0 and a MODE_CC register. */
3177 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3178 #ifdef HAVE_cc0
3179 || arg0 == cc0_rtx
3180 #endif
3181 )
3182 {
3183 rtx prev = prev_nonnote_insn (insn);
3184 rtx set;
3185
3186 /* First see if the condition code mode alone if enough to say we can
3187 reverse the condition. If not, then search backwards for a set of
3188 ARG0. We do not need to check for an insn clobbering it since valid
3189 code will contain set a set with no intervening clobber. But
3190 stop when we reach a label. */
3191 #ifdef REVERSIBLE_CC_MODE
3192 if (GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC
3193 && REVERSIBLE_CC_MODE (GET_MODE (arg0)))
3194 return 1;
3195 #endif
3196
3197 for (prev = prev_nonnote_insn (insn);
3198 prev != 0 && GET_CODE (prev) != CODE_LABEL;
3199 prev = prev_nonnote_insn (prev))
3200 if ((set = single_set (prev)) != 0
3201 && rtx_equal_p (SET_DEST (set), arg0))
3202 {
3203 arg0 = SET_SRC (set);
3204
3205 if (GET_CODE (arg0) == COMPARE)
3206 arg0 = XEXP (arg0, 0);
3207 break;
3208 }
3209 }
3210
3211 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3212 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3213 return (GET_CODE (arg0) == CONST_INT
3214 || (GET_MODE (arg0) != VOIDmode
3215 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3216 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3217 }
3218
3219 /* Given an rtx-code for a comparison, return the code for the negated
3220 comparison. If no such code exists, return UNKNOWN.
3221
3222 WATCH OUT! reverse_condition is not safe to use on a jump that might
3223 be acting on the results of an IEEE floating point comparison, because
3224 of the special treatment of non-signaling nans in comparisons.
3225 Use can_reverse_comparison_p to be sure. */
3226
3227 enum rtx_code
3228 reverse_condition (code)
3229 enum rtx_code code;
3230 {
3231 switch (code)
3232 {
3233 case EQ:
3234 return NE;
3235 case NE:
3236 return EQ;
3237 case GT:
3238 return LE;
3239 case GE:
3240 return LT;
3241 case LT:
3242 return GE;
3243 case LE:
3244 return GT;
3245 case GTU:
3246 return LEU;
3247 case GEU:
3248 return LTU;
3249 case LTU:
3250 return GEU;
3251 case LEU:
3252 return GTU;
3253 case UNORDERED:
3254 return ORDERED;
3255 case ORDERED:
3256 return UNORDERED;
3257
3258 case UNLT:
3259 case UNLE:
3260 case UNGT:
3261 case UNGE:
3262 case UNEQ:
3263 case LTGT:
3264 return UNKNOWN;
3265
3266 default:
3267 abort ();
3268 }
3269 }
3270
3271 /* Similar, but we're allowed to generate unordered comparisons, which
3272 makes it safe for IEEE floating-point. Of course, we have to recognize
3273 that the target will support them too... */
3274
3275 enum rtx_code
3276 reverse_condition_maybe_unordered (code)
3277 enum rtx_code code;
3278 {
3279 /* Non-IEEE formats don't have unordered conditions. */
3280 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
3281 return reverse_condition (code);
3282
3283 switch (code)
3284 {
3285 case EQ:
3286 return NE;
3287 case NE:
3288 return EQ;
3289 case GT:
3290 return UNLE;
3291 case GE:
3292 return UNLT;
3293 case LT:
3294 return UNGE;
3295 case LE:
3296 return UNGT;
3297 case LTGT:
3298 return UNEQ;
3299 case GTU:
3300 return LEU;
3301 case GEU:
3302 return LTU;
3303 case LTU:
3304 return GEU;
3305 case LEU:
3306 return GTU;
3307 case UNORDERED:
3308 return ORDERED;
3309 case ORDERED:
3310 return UNORDERED;
3311 case UNLT:
3312 return GE;
3313 case UNLE:
3314 return GT;
3315 case UNGT:
3316 return LE;
3317 case UNGE:
3318 return LT;
3319 case UNEQ:
3320 return LTGT;
3321
3322 default:
3323 abort ();
3324 }
3325 }
3326
3327 /* Similar, but return the code when two operands of a comparison are swapped.
3328 This IS safe for IEEE floating-point. */
3329
3330 enum rtx_code
3331 swap_condition (code)
3332 enum rtx_code code;
3333 {
3334 switch (code)
3335 {
3336 case EQ:
3337 case NE:
3338 case UNORDERED:
3339 case ORDERED:
3340 case UNEQ:
3341 case LTGT:
3342 return code;
3343
3344 case GT:
3345 return LT;
3346 case GE:
3347 return LE;
3348 case LT:
3349 return GT;
3350 case LE:
3351 return GE;
3352 case GTU:
3353 return LTU;
3354 case GEU:
3355 return LEU;
3356 case LTU:
3357 return GTU;
3358 case LEU:
3359 return GEU;
3360 case UNLT:
3361 return UNGT;
3362 case UNLE:
3363 return UNGE;
3364 case UNGT:
3365 return UNLT;
3366 case UNGE:
3367 return UNLE;
3368
3369 default:
3370 abort ();
3371 }
3372 }
3373
3374 /* Given a comparison CODE, return the corresponding unsigned comparison.
3375 If CODE is an equality comparison or already an unsigned comparison,
3376 CODE is returned. */
3377
3378 enum rtx_code
3379 unsigned_condition (code)
3380 enum rtx_code code;
3381 {
3382 switch (code)
3383 {
3384 case EQ:
3385 case NE:
3386 case GTU:
3387 case GEU:
3388 case LTU:
3389 case LEU:
3390 return code;
3391
3392 case GT:
3393 return GTU;
3394 case GE:
3395 return GEU;
3396 case LT:
3397 return LTU;
3398 case LE:
3399 return LEU;
3400
3401 default:
3402 abort ();
3403 }
3404 }
3405
3406 /* Similarly, return the signed version of a comparison. */
3407
3408 enum rtx_code
3409 signed_condition (code)
3410 enum rtx_code code;
3411 {
3412 switch (code)
3413 {
3414 case EQ:
3415 case NE:
3416 case GT:
3417 case GE:
3418 case LT:
3419 case LE:
3420 return code;
3421
3422 case GTU:
3423 return GT;
3424 case GEU:
3425 return GE;
3426 case LTU:
3427 return LT;
3428 case LEU:
3429 return LE;
3430
3431 default:
3432 abort ();
3433 }
3434 }
3435 \f
3436 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3437 truth of CODE1 implies the truth of CODE2. */
3438
3439 int
3440 comparison_dominates_p (code1, code2)
3441 enum rtx_code code1, code2;
3442 {
3443 if (code1 == code2)
3444 return 1;
3445
3446 switch (code1)
3447 {
3448 case EQ:
3449 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
3450 || code2 == ORDERED)
3451 return 1;
3452 break;
3453
3454 case LT:
3455 if (code2 == LE || code2 == NE || code2 == ORDERED)
3456 return 1;
3457 break;
3458
3459 case GT:
3460 if (code2 == GE || code2 == NE || code2 == ORDERED)
3461 return 1;
3462 break;
3463
3464 case GE:
3465 case LE:
3466 if (code2 == ORDERED)
3467 return 1;
3468 break;
3469
3470 case LTGT:
3471 if (code2 == NE || code2 == ORDERED)
3472 return 1;
3473 break;
3474
3475 case LTU:
3476 if (code2 == LEU || code2 == NE)
3477 return 1;
3478 break;
3479
3480 case GTU:
3481 if (code2 == GEU || code2 == NE)
3482 return 1;
3483 break;
3484
3485 case UNORDERED:
3486 if (code2 == NE)
3487 return 1;
3488 break;
3489
3490 default:
3491 break;
3492 }
3493
3494 return 0;
3495 }
3496 \f
3497 /* Return 1 if INSN is an unconditional jump and nothing else. */
3498
3499 int
3500 simplejump_p (insn)
3501 rtx insn;
3502 {
3503 return (GET_CODE (insn) == JUMP_INSN
3504 && GET_CODE (PATTERN (insn)) == SET
3505 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3506 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3507 }
3508
3509 /* Return nonzero if INSN is a (possibly) conditional jump
3510 and nothing more. */
3511
3512 int
3513 condjump_p (insn)
3514 rtx insn;
3515 {
3516 register rtx x = PATTERN (insn);
3517
3518 if (GET_CODE (x) != SET
3519 || GET_CODE (SET_DEST (x)) != PC)
3520 return 0;
3521
3522 x = SET_SRC (x);
3523 if (GET_CODE (x) == LABEL_REF)
3524 return 1;
3525 else return (GET_CODE (x) == IF_THEN_ELSE
3526 && ((GET_CODE (XEXP (x, 2)) == PC
3527 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
3528 || GET_CODE (XEXP (x, 1)) == RETURN))
3529 || (GET_CODE (XEXP (x, 1)) == PC
3530 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
3531 || GET_CODE (XEXP (x, 2)) == RETURN))));
3532
3533 return 0;
3534 }
3535
3536 /* Return nonzero if INSN is a (possibly) conditional jump inside a
3537 PARALLEL. */
3538
3539 int
3540 condjump_in_parallel_p (insn)
3541 rtx insn;
3542 {
3543 register rtx x = PATTERN (insn);
3544
3545 if (GET_CODE (x) != PARALLEL)
3546 return 0;
3547 else
3548 x = XVECEXP (x, 0, 0);
3549
3550 if (GET_CODE (x) != SET)
3551 return 0;
3552 if (GET_CODE (SET_DEST (x)) != PC)
3553 return 0;
3554 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3555 return 1;
3556 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3557 return 0;
3558 if (XEXP (SET_SRC (x), 2) == pc_rtx
3559 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3560 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3561 return 1;
3562 if (XEXP (SET_SRC (x), 1) == pc_rtx
3563 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3564 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3565 return 1;
3566 return 0;
3567 }
3568
3569 /* Return the label of a conditional jump. */
3570
3571 rtx
3572 condjump_label (insn)
3573 rtx insn;
3574 {
3575 register rtx x = PATTERN (insn);
3576
3577 if (GET_CODE (x) == PARALLEL)
3578 x = XVECEXP (x, 0, 0);
3579 if (GET_CODE (x) != SET)
3580 return NULL_RTX;
3581 if (GET_CODE (SET_DEST (x)) != PC)
3582 return NULL_RTX;
3583 x = SET_SRC (x);
3584 if (GET_CODE (x) == LABEL_REF)
3585 return x;
3586 if (GET_CODE (x) != IF_THEN_ELSE)
3587 return NULL_RTX;
3588 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3589 return XEXP (x, 1);
3590 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3591 return XEXP (x, 2);
3592 return NULL_RTX;
3593 }
3594
3595 /* Return true if INSN is a (possibly conditional) return insn. */
3596
3597 static int
3598 returnjump_p_1 (loc, data)
3599 rtx *loc;
3600 void *data ATTRIBUTE_UNUSED;
3601 {
3602 rtx x = *loc;
3603 return x && GET_CODE (x) == RETURN;
3604 }
3605
3606 int
3607 returnjump_p (insn)
3608 rtx insn;
3609 {
3610 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3611 }
3612
3613 /* Return true if INSN is a jump that only transfers control and
3614 nothing more. */
3615
3616 int
3617 onlyjump_p (insn)
3618 rtx insn;
3619 {
3620 rtx set;
3621
3622 if (GET_CODE (insn) != JUMP_INSN)
3623 return 0;
3624
3625 set = single_set (insn);
3626 if (set == NULL)
3627 return 0;
3628 if (GET_CODE (SET_DEST (set)) != PC)
3629 return 0;
3630 if (side_effects_p (SET_SRC (set)))
3631 return 0;
3632
3633 return 1;
3634 }
3635
3636 #ifdef HAVE_cc0
3637
3638 /* Return 1 if X is an RTX that does nothing but set the condition codes
3639 and CLOBBER or USE registers.
3640 Return -1 if X does explicitly set the condition codes,
3641 but also does other things. */
3642
3643 int
3644 sets_cc0_p (x)
3645 rtx x ATTRIBUTE_UNUSED;
3646 {
3647 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3648 return 1;
3649 if (GET_CODE (x) == PARALLEL)
3650 {
3651 int i;
3652 int sets_cc0 = 0;
3653 int other_things = 0;
3654 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3655 {
3656 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3657 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3658 sets_cc0 = 1;
3659 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3660 other_things = 1;
3661 }
3662 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3663 }
3664 return 0;
3665 }
3666 #endif
3667 \f
3668 /* Follow any unconditional jump at LABEL;
3669 return the ultimate label reached by any such chain of jumps.
3670 If LABEL is not followed by a jump, return LABEL.
3671 If the chain loops or we can't find end, return LABEL,
3672 since that tells caller to avoid changing the insn.
3673
3674 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3675 a USE or CLOBBER. */
3676
3677 rtx
3678 follow_jumps (label)
3679 rtx label;
3680 {
3681 register rtx insn;
3682 register rtx next;
3683 register rtx value = label;
3684 register int depth;
3685
3686 for (depth = 0;
3687 (depth < 10
3688 && (insn = next_active_insn (value)) != 0
3689 && GET_CODE (insn) == JUMP_INSN
3690 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3691 || GET_CODE (PATTERN (insn)) == RETURN)
3692 && (next = NEXT_INSN (insn))
3693 && GET_CODE (next) == BARRIER);
3694 depth++)
3695 {
3696 /* Don't chain through the insn that jumps into a loop
3697 from outside the loop,
3698 since that would create multiple loop entry jumps
3699 and prevent loop optimization. */
3700 rtx tem;
3701 if (!reload_completed)
3702 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3703 if (GET_CODE (tem) == NOTE
3704 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3705 /* ??? Optional. Disables some optimizations, but makes
3706 gcov output more accurate with -O. */
3707 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3708 return value;
3709
3710 /* If we have found a cycle, make the insn jump to itself. */
3711 if (JUMP_LABEL (insn) == label)
3712 return label;
3713
3714 tem = next_active_insn (JUMP_LABEL (insn));
3715 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3716 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3717 break;
3718
3719 value = JUMP_LABEL (insn);
3720 }
3721 if (depth == 10)
3722 return label;
3723 return value;
3724 }
3725
3726 /* Assuming that field IDX of X is a vector of label_refs,
3727 replace each of them by the ultimate label reached by it.
3728 Return nonzero if a change is made.
3729 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3730
3731 static int
3732 tension_vector_labels (x, idx)
3733 register rtx x;
3734 register int idx;
3735 {
3736 int changed = 0;
3737 register int i;
3738 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3739 {
3740 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3741 register rtx nlabel = follow_jumps (olabel);
3742 if (nlabel && nlabel != olabel)
3743 {
3744 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3745 ++LABEL_NUSES (nlabel);
3746 if (--LABEL_NUSES (olabel) == 0)
3747 delete_insn (olabel);
3748 changed = 1;
3749 }
3750 }
3751 return changed;
3752 }
3753 \f
3754 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3755 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3756 in INSN, then store one of them in JUMP_LABEL (INSN).
3757 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3758 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3759 Also, when there are consecutive labels, canonicalize on the last of them.
3760
3761 Note that two labels separated by a loop-beginning note
3762 must be kept distinct if we have not yet done loop-optimization,
3763 because the gap between them is where loop-optimize
3764 will want to move invariant code to. CROSS_JUMP tells us
3765 that loop-optimization is done with.
3766
3767 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3768 two labels distinct if they are separated by only USE or CLOBBER insns. */
3769
3770 static void
3771 mark_jump_label (x, insn, cross_jump, in_mem)
3772 register rtx x;
3773 rtx insn;
3774 int cross_jump;
3775 int in_mem;
3776 {
3777 register RTX_CODE code = GET_CODE (x);
3778 register int i;
3779 register const char *fmt;
3780
3781 switch (code)
3782 {
3783 case PC:
3784 case CC0:
3785 case REG:
3786 case SUBREG:
3787 case CONST_INT:
3788 case CONST_DOUBLE:
3789 case CLOBBER:
3790 case CALL:
3791 return;
3792
3793 case MEM:
3794 in_mem = 1;
3795 break;
3796
3797 case SYMBOL_REF:
3798 if (!in_mem)
3799 return;
3800
3801 /* If this is a constant-pool reference, see if it is a label. */
3802 if (CONSTANT_POOL_ADDRESS_P (x))
3803 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
3804 break;
3805
3806 case LABEL_REF:
3807 {
3808 rtx label = XEXP (x, 0);
3809 rtx olabel = label;
3810 rtx note;
3811 rtx next;
3812
3813 if (GET_CODE (label) != CODE_LABEL)
3814 abort ();
3815
3816 /* Ignore references to labels of containing functions. */
3817 if (LABEL_REF_NONLOCAL_P (x))
3818 break;
3819
3820 /* If there are other labels following this one,
3821 replace it with the last of the consecutive labels. */
3822 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3823 {
3824 if (GET_CODE (next) == CODE_LABEL)
3825 label = next;
3826 else if (cross_jump && GET_CODE (next) == INSN
3827 && (GET_CODE (PATTERN (next)) == USE
3828 || GET_CODE (PATTERN (next)) == CLOBBER))
3829 continue;
3830 else if (GET_CODE (next) != NOTE)
3831 break;
3832 else if (! cross_jump
3833 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3834 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3835 /* ??? Optional. Disables some optimizations, but
3836 makes gcov output more accurate with -O. */
3837 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3838 break;
3839 }
3840
3841 XEXP (x, 0) = label;
3842 if (! insn || ! INSN_DELETED_P (insn))
3843 ++LABEL_NUSES (label);
3844
3845 if (insn)
3846 {
3847 if (GET_CODE (insn) == JUMP_INSN)
3848 JUMP_LABEL (insn) = label;
3849
3850 /* If we've changed OLABEL and we had a REG_LABEL note
3851 for it, update it as well. */
3852 else if (label != olabel
3853 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3854 XEXP (note, 0) = label;
3855
3856 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3857 is one. */
3858 else if (! find_reg_note (insn, REG_LABEL, label))
3859 {
3860 /* This code used to ignore labels which refered to dispatch
3861 tables to avoid flow.c generating worse code.
3862
3863 However, in the presense of global optimizations like
3864 gcse which call find_basic_blocks without calling
3865 life_analysis, not recording such labels will lead
3866 to compiler aborts because of inconsistencies in the
3867 flow graph. So we go ahead and record the label.
3868
3869 It may also be the case that the optimization argument
3870 is no longer valid because of the more accurate cfg
3871 we build in find_basic_blocks -- it no longer pessimizes
3872 code when it finds a REG_LABEL note. */
3873 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3874 REG_NOTES (insn));
3875 }
3876 }
3877 return;
3878 }
3879
3880 /* Do walk the labels in a vector, but not the first operand of an
3881 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3882 case ADDR_VEC:
3883 case ADDR_DIFF_VEC:
3884 if (! INSN_DELETED_P (insn))
3885 {
3886 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3887
3888 for (i = 0; i < XVECLEN (x, eltnum); i++)
3889 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
3890 cross_jump, in_mem);
3891 }
3892 return;
3893
3894 default:
3895 break;
3896 }
3897
3898 fmt = GET_RTX_FORMAT (code);
3899 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3900 {
3901 if (fmt[i] == 'e')
3902 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
3903 else if (fmt[i] == 'E')
3904 {
3905 register int j;
3906 for (j = 0; j < XVECLEN (x, i); j++)
3907 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
3908 }
3909 }
3910 }
3911
3912 /* If all INSN does is set the pc, delete it,
3913 and delete the insn that set the condition codes for it
3914 if that's what the previous thing was. */
3915
3916 void
3917 delete_jump (insn)
3918 rtx insn;
3919 {
3920 register rtx set = single_set (insn);
3921
3922 if (set && GET_CODE (SET_DEST (set)) == PC)
3923 delete_computation (insn);
3924 }
3925
3926 /* Verify INSN is a BARRIER and delete it. */
3927
3928 void
3929 delete_barrier (insn)
3930 rtx insn;
3931 {
3932 if (GET_CODE (insn) != BARRIER)
3933 abort ();
3934
3935 delete_insn (insn);
3936 }
3937
3938 /* Recursively delete prior insns that compute the value (used only by INSN
3939 which the caller is deleting) stored in the register mentioned by NOTE
3940 which is a REG_DEAD note associated with INSN. */
3941
3942 static void
3943 delete_prior_computation (note, insn)
3944 rtx note;
3945 rtx insn;
3946 {
3947 rtx our_prev;
3948 rtx reg = XEXP (note, 0);
3949
3950 for (our_prev = prev_nonnote_insn (insn);
3951 our_prev && (GET_CODE (our_prev) == INSN
3952 || GET_CODE (our_prev) == CALL_INSN);
3953 our_prev = prev_nonnote_insn (our_prev))
3954 {
3955 rtx pat = PATTERN (our_prev);
3956
3957 /* If we reach a CALL which is not calling a const function
3958 or the callee pops the arguments, then give up. */
3959 if (GET_CODE (our_prev) == CALL_INSN
3960 && (! CONST_CALL_P (our_prev)
3961 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
3962 break;
3963
3964 /* If we reach a SEQUENCE, it is too complex to try to
3965 do anything with it, so give up. */
3966 if (GET_CODE (pat) == SEQUENCE)
3967 break;
3968
3969 if (GET_CODE (pat) == USE
3970 && GET_CODE (XEXP (pat, 0)) == INSN)
3971 /* reorg creates USEs that look like this. We leave them
3972 alone because reorg needs them for its own purposes. */
3973 break;
3974
3975 if (reg_set_p (reg, pat))
3976 {
3977 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
3978 break;
3979
3980 if (GET_CODE (pat) == PARALLEL)
3981 {
3982 /* If we find a SET of something else, we can't
3983 delete the insn. */
3984
3985 int i;
3986
3987 for (i = 0; i < XVECLEN (pat, 0); i++)
3988 {
3989 rtx part = XVECEXP (pat, 0, i);
3990
3991 if (GET_CODE (part) == SET
3992 && SET_DEST (part) != reg)
3993 break;
3994 }
3995
3996 if (i == XVECLEN (pat, 0))
3997 delete_computation (our_prev);
3998 }
3999 else if (GET_CODE (pat) == SET
4000 && GET_CODE (SET_DEST (pat)) == REG)
4001 {
4002 int dest_regno = REGNO (SET_DEST (pat));
4003 int dest_endregno
4004 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4005 ? HARD_REGNO_NREGS (dest_regno,
4006 GET_MODE (SET_DEST (pat))) : 1);
4007 int regno = REGNO (reg);
4008 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
4009 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
4010
4011 if (dest_regno >= regno
4012 && dest_endregno <= endregno)
4013 delete_computation (our_prev);
4014
4015 /* We may have a multi-word hard register and some, but not
4016 all, of the words of the register are needed in subsequent
4017 insns. Write REG_UNUSED notes for those parts that were not
4018 needed. */
4019 else if (dest_regno <= regno
4020 && dest_endregno >= endregno)
4021 {
4022 int i;
4023
4024 REG_NOTES (our_prev)
4025 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
4026
4027 for (i = dest_regno; i < dest_endregno; i++)
4028 if (! find_regno_note (our_prev, REG_UNUSED, i))
4029 break;
4030
4031 if (i == dest_endregno)
4032 delete_computation (our_prev);
4033 }
4034 }
4035
4036 break;
4037 }
4038
4039 /* If PAT references the register that dies here, it is an
4040 additional use. Hence any prior SET isn't dead. However, this
4041 insn becomes the new place for the REG_DEAD note. */
4042 if (reg_overlap_mentioned_p (reg, pat))
4043 {
4044 XEXP (note, 1) = REG_NOTES (our_prev);
4045 REG_NOTES (our_prev) = note;
4046 break;
4047 }
4048 }
4049 }
4050
4051 /* Delete INSN and recursively delete insns that compute values used only
4052 by INSN. This uses the REG_DEAD notes computed during flow analysis.
4053 If we are running before flow.c, we need do nothing since flow.c will
4054 delete dead code. We also can't know if the registers being used are
4055 dead or not at this point.
4056
4057 Otherwise, look at all our REG_DEAD notes. If a previous insn does
4058 nothing other than set a register that dies in this insn, we can delete
4059 that insn as well.
4060
4061 On machines with CC0, if CC0 is used in this insn, we may be able to
4062 delete the insn that set it. */
4063
4064 static void
4065 delete_computation (insn)
4066 rtx insn;
4067 {
4068 rtx note, next;
4069 rtx set;
4070
4071 #ifdef HAVE_cc0
4072 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
4073 {
4074 rtx prev = prev_nonnote_insn (insn);
4075 /* We assume that at this stage
4076 CC's are always set explicitly
4077 and always immediately before the jump that
4078 will use them. So if the previous insn
4079 exists to set the CC's, delete it
4080 (unless it performs auto-increments, etc.). */
4081 if (prev && GET_CODE (prev) == INSN
4082 && sets_cc0_p (PATTERN (prev)))
4083 {
4084 if (sets_cc0_p (PATTERN (prev)) > 0
4085 && ! side_effects_p (PATTERN (prev)))
4086 delete_computation (prev);
4087 else
4088 /* Otherwise, show that cc0 won't be used. */
4089 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
4090 cc0_rtx, REG_NOTES (prev));
4091 }
4092 }
4093 #endif
4094
4095 #ifdef INSN_SCHEDULING
4096 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4097 reload has completed. The schedulers need to be fixed. Until
4098 they are, we must not rely on the death notes here. */
4099 if (reload_completed && flag_schedule_insns_after_reload)
4100 {
4101 delete_insn (insn);
4102 return;
4103 }
4104 #endif
4105
4106 /* The REG_DEAD note may have been omitted for a register
4107 which is both set and used by the insn. */
4108 set = single_set (insn);
4109 if (set && GET_CODE (SET_DEST (set)) == REG)
4110 {
4111 int dest_regno = REGNO (SET_DEST (set));
4112 int dest_endregno
4113 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4114 ? HARD_REGNO_NREGS (dest_regno,
4115 GET_MODE (SET_DEST (set))) : 1);
4116 int i;
4117
4118 for (i = dest_regno; i < dest_endregno; i++)
4119 {
4120 if (! refers_to_regno_p (i, i + 1, SET_SRC (set), NULL_PTR)
4121 || find_regno_note (insn, REG_DEAD, i))
4122 continue;
4123
4124 note = gen_rtx_EXPR_LIST (REG_DEAD, (i < FIRST_PSEUDO_REGISTER
4125 ? gen_rtx_REG (reg_raw_mode[i], i)
4126 : SET_DEST (set)), NULL_RTX);
4127 delete_prior_computation (note, insn);
4128 }
4129 }
4130
4131 for (note = REG_NOTES (insn); note; note = next)
4132 {
4133 next = XEXP (note, 1);
4134
4135 if (REG_NOTE_KIND (note) != REG_DEAD
4136 /* Verify that the REG_NOTE is legitimate. */
4137 || GET_CODE (XEXP (note, 0)) != REG)
4138 continue;
4139
4140 delete_prior_computation (note, insn);
4141 }
4142
4143 delete_insn (insn);
4144 }
4145 \f
4146 /* Delete insn INSN from the chain of insns and update label ref counts.
4147 May delete some following insns as a consequence; may even delete
4148 a label elsewhere and insns that follow it.
4149
4150 Returns the first insn after INSN that was not deleted. */
4151
4152 rtx
4153 delete_insn (insn)
4154 register rtx insn;
4155 {
4156 register rtx next = NEXT_INSN (insn);
4157 register rtx prev = PREV_INSN (insn);
4158 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4159 register int dont_really_delete = 0;
4160
4161 while (next && INSN_DELETED_P (next))
4162 next = NEXT_INSN (next);
4163
4164 /* This insn is already deleted => return first following nondeleted. */
4165 if (INSN_DELETED_P (insn))
4166 return next;
4167
4168 if (was_code_label)
4169 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4170
4171 /* Don't delete user-declared labels. When optimizing, convert them
4172 to special NOTEs instead. When not optimizing, leave them alone. */
4173 if (was_code_label && LABEL_NAME (insn) != 0)
4174 {
4175 if (! optimize)
4176 dont_really_delete = 1;
4177 else if (! dont_really_delete)
4178 {
4179 PUT_CODE (insn, NOTE);
4180 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4181 NOTE_SOURCE_FILE (insn) = 0;
4182 dont_really_delete = 1;
4183 }
4184 }
4185 else
4186 /* Mark this insn as deleted. */
4187 INSN_DELETED_P (insn) = 1;
4188
4189 /* If this is an unconditional jump, delete it from the jump chain. */
4190 if (simplejump_p (insn))
4191 delete_from_jump_chain (insn);
4192
4193 /* If instruction is followed by a barrier,
4194 delete the barrier too. */
4195
4196 if (next != 0 && GET_CODE (next) == BARRIER)
4197 {
4198 INSN_DELETED_P (next) = 1;
4199 next = NEXT_INSN (next);
4200 }
4201
4202 /* Patch out INSN (and the barrier if any) */
4203
4204 if (! dont_really_delete)
4205 {
4206 if (prev)
4207 {
4208 NEXT_INSN (prev) = next;
4209 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4210 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4211 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4212 }
4213
4214 if (next)
4215 {
4216 PREV_INSN (next) = prev;
4217 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4218 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4219 }
4220
4221 if (prev && NEXT_INSN (prev) == 0)
4222 set_last_insn (prev);
4223 }
4224
4225 /* If deleting a jump, decrement the count of the label,
4226 and delete the label if it is now unused. */
4227
4228 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4229 {
4230 rtx lab = JUMP_LABEL (insn), lab_next;
4231
4232 if (--LABEL_NUSES (lab) == 0)
4233 {
4234 /* This can delete NEXT or PREV,
4235 either directly if NEXT is JUMP_LABEL (INSN),
4236 or indirectly through more levels of jumps. */
4237 delete_insn (lab);
4238
4239 /* I feel a little doubtful about this loop,
4240 but I see no clean and sure alternative way
4241 to find the first insn after INSN that is not now deleted.
4242 I hope this works. */
4243 while (next && INSN_DELETED_P (next))
4244 next = NEXT_INSN (next);
4245 return next;
4246 }
4247 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4248 && GET_CODE (lab_next) == JUMP_INSN
4249 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4250 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4251 {
4252 /* If we're deleting the tablejump, delete the dispatch table.
4253 We may not be able to kill the label immediately preceeding
4254 just yet, as it might be referenced in code leading up to
4255 the tablejump. */
4256 delete_insn (lab_next);
4257 }
4258 }
4259
4260 /* Likewise if we're deleting a dispatch table. */
4261
4262 if (GET_CODE (insn) == JUMP_INSN
4263 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4264 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4265 {
4266 rtx pat = PATTERN (insn);
4267 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4268 int len = XVECLEN (pat, diff_vec_p);
4269
4270 for (i = 0; i < len; i++)
4271 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4272 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4273 while (next && INSN_DELETED_P (next))
4274 next = NEXT_INSN (next);
4275 return next;
4276 }
4277
4278 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4279 prev = PREV_INSN (prev);
4280
4281 /* If INSN was a label and a dispatch table follows it,
4282 delete the dispatch table. The tablejump must have gone already.
4283 It isn't useful to fall through into a table. */
4284
4285 if (was_code_label
4286 && NEXT_INSN (insn) != 0
4287 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4288 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4289 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4290 next = delete_insn (NEXT_INSN (insn));
4291
4292 /* If INSN was a label, delete insns following it if now unreachable. */
4293
4294 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4295 {
4296 register RTX_CODE code;
4297 while (next != 0
4298 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4299 || code == NOTE || code == BARRIER
4300 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4301 {
4302 if (code == NOTE
4303 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4304 next = NEXT_INSN (next);
4305 /* Keep going past other deleted labels to delete what follows. */
4306 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4307 next = NEXT_INSN (next);
4308 else
4309 /* Note: if this deletes a jump, it can cause more
4310 deletion of unreachable code, after a different label.
4311 As long as the value from this recursive call is correct,
4312 this invocation functions correctly. */
4313 next = delete_insn (next);
4314 }
4315 }
4316
4317 return next;
4318 }
4319
4320 /* Advance from INSN till reaching something not deleted
4321 then return that. May return INSN itself. */
4322
4323 rtx
4324 next_nondeleted_insn (insn)
4325 rtx insn;
4326 {
4327 while (INSN_DELETED_P (insn))
4328 insn = NEXT_INSN (insn);
4329 return insn;
4330 }
4331 \f
4332 /* Delete a range of insns from FROM to TO, inclusive.
4333 This is for the sake of peephole optimization, so assume
4334 that whatever these insns do will still be done by a new
4335 peephole insn that will replace them. */
4336
4337 void
4338 delete_for_peephole (from, to)
4339 register rtx from, to;
4340 {
4341 register rtx insn = from;
4342
4343 while (1)
4344 {
4345 register rtx next = NEXT_INSN (insn);
4346 register rtx prev = PREV_INSN (insn);
4347
4348 if (GET_CODE (insn) != NOTE)
4349 {
4350 INSN_DELETED_P (insn) = 1;
4351
4352 /* Patch this insn out of the chain. */
4353 /* We don't do this all at once, because we
4354 must preserve all NOTEs. */
4355 if (prev)
4356 NEXT_INSN (prev) = next;
4357
4358 if (next)
4359 PREV_INSN (next) = prev;
4360 }
4361
4362 if (insn == to)
4363 break;
4364 insn = next;
4365 }
4366
4367 /* Note that if TO is an unconditional jump
4368 we *do not* delete the BARRIER that follows,
4369 since the peephole that replaces this sequence
4370 is also an unconditional jump in that case. */
4371 }
4372 \f
4373 /* We have determined that INSN is never reached, and are about to
4374 delete it. Print a warning if the user asked for one.
4375
4376 To try to make this warning more useful, this should only be called
4377 once per basic block not reached, and it only warns when the basic
4378 block contains more than one line from the current function, and
4379 contains at least one operation. CSE and inlining can duplicate insns,
4380 so it's possible to get spurious warnings from this. */
4381
4382 void
4383 never_reached_warning (avoided_insn)
4384 rtx avoided_insn;
4385 {
4386 rtx insn;
4387 rtx a_line_note = NULL;
4388 int two_avoided_lines = 0;
4389 int contains_insn = 0;
4390
4391 if (! warn_notreached)
4392 return;
4393
4394 /* Scan forwards, looking at LINE_NUMBER notes, until
4395 we hit a LABEL or we run out of insns. */
4396
4397 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
4398 {
4399 if (GET_CODE (insn) == CODE_LABEL)
4400 break;
4401 else if (GET_CODE (insn) == NOTE /* A line number note? */
4402 && NOTE_LINE_NUMBER (insn) >= 0)
4403 {
4404 if (a_line_note == NULL)
4405 a_line_note = insn;
4406 else
4407 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
4408 != NOTE_LINE_NUMBER (insn));
4409 }
4410 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4411 contains_insn = 1;
4412 }
4413 if (two_avoided_lines && contains_insn)
4414 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
4415 NOTE_LINE_NUMBER (a_line_note),
4416 "will never be executed");
4417 }
4418 \f
4419 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
4420 NLABEL as a return. Accrue modifications into the change group. */
4421
4422 static void
4423 redirect_exp_1 (loc, olabel, nlabel, insn)
4424 rtx *loc;
4425 rtx olabel, nlabel;
4426 rtx insn;
4427 {
4428 register rtx x = *loc;
4429 register RTX_CODE code = GET_CODE (x);
4430 register int i;
4431 register const char *fmt;
4432
4433 if (code == LABEL_REF)
4434 {
4435 if (XEXP (x, 0) == olabel)
4436 {
4437 rtx n;
4438 if (nlabel)
4439 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4440 else
4441 n = gen_rtx_RETURN (VOIDmode);
4442
4443 validate_change (insn, loc, n, 1);
4444 return;
4445 }
4446 }
4447 else if (code == RETURN && olabel == 0)
4448 {
4449 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4450 if (loc == &PATTERN (insn))
4451 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4452 validate_change (insn, loc, x, 1);
4453 return;
4454 }
4455
4456 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4457 && GET_CODE (SET_SRC (x)) == LABEL_REF
4458 && XEXP (SET_SRC (x), 0) == olabel)
4459 {
4460 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
4461 return;
4462 }
4463
4464 fmt = GET_RTX_FORMAT (code);
4465 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4466 {
4467 if (fmt[i] == 'e')
4468 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
4469 else if (fmt[i] == 'E')
4470 {
4471 register int j;
4472 for (j = 0; j < XVECLEN (x, i); j++)
4473 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
4474 }
4475 }
4476 }
4477
4478 /* Similar, but apply the change group and report success or failure. */
4479
4480 int
4481 redirect_exp (loc, olabel, nlabel, insn)
4482 rtx *loc;
4483 rtx olabel, nlabel;
4484 rtx insn;
4485 {
4486 redirect_exp_1 (loc, olabel, nlabel, insn);
4487 if (num_validated_changes () == 0)
4488 return 0;
4489
4490 return apply_change_group ();
4491 }
4492
4493 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
4494 the modifications into the change group. Return false if we did
4495 not see how to do that. */
4496
4497 int
4498 redirect_jump_1 (jump, nlabel)
4499 rtx jump, nlabel;
4500 {
4501 int ochanges = num_validated_changes ();
4502 redirect_exp_1 (&PATTERN (jump), JUMP_LABEL (jump), nlabel, jump);
4503 return num_validated_changes () > ochanges;
4504 }
4505
4506 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
4507 jump target label is unused as a result, it and the code following
4508 it may be deleted.
4509
4510 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4511 RETURN insn.
4512
4513 The return value will be 1 if the change was made, 0 if it wasn't
4514 (this can only occur for NLABEL == 0). */
4515
4516 int
4517 redirect_jump (jump, nlabel)
4518 rtx jump, nlabel;
4519 {
4520 register rtx olabel = JUMP_LABEL (jump);
4521
4522 if (nlabel == olabel)
4523 return 1;
4524
4525 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4526 return 0;
4527
4528 /* If this is an unconditional branch, delete it from the jump_chain of
4529 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4530 have UID's in range and JUMP_CHAIN is valid). */
4531 if (jump_chain && (simplejump_p (jump)
4532 || GET_CODE (PATTERN (jump)) == RETURN))
4533 {
4534 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4535
4536 delete_from_jump_chain (jump);
4537 if (label_index < max_jump_chain
4538 && INSN_UID (jump) < max_jump_chain)
4539 {
4540 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4541 jump_chain[label_index] = jump;
4542 }
4543 }
4544
4545 JUMP_LABEL (jump) = nlabel;
4546 if (nlabel)
4547 ++LABEL_NUSES (nlabel);
4548
4549 /* If we're eliding the jump over exception cleanups at the end of a
4550 function, move the function end note so that -Wreturn-type works. */
4551 if (olabel && NEXT_INSN (olabel)
4552 && GET_CODE (NEXT_INSN (olabel)) == NOTE
4553 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
4554 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
4555
4556 if (olabel && --LABEL_NUSES (olabel) == 0)
4557 delete_insn (olabel);
4558
4559 return 1;
4560 }
4561
4562 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4563 Accrue the modifications into the change group. */
4564
4565 static void
4566 invert_exp_1 (x, insn)
4567 rtx x;
4568 rtx insn;
4569 {
4570 register RTX_CODE code;
4571 register int i;
4572 register const char *fmt;
4573
4574 code = GET_CODE (x);
4575
4576 if (code == IF_THEN_ELSE)
4577 {
4578 register rtx comp = XEXP (x, 0);
4579 register rtx tem;
4580
4581 /* We can do this in two ways: The preferable way, which can only
4582 be done if this is not an integer comparison, is to reverse
4583 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4584 of the IF_THEN_ELSE. If we can't do either, fail. */
4585
4586 if (can_reverse_comparison_p (comp, insn))
4587 {
4588 validate_change (insn, &XEXP (x, 0),
4589 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4590 GET_MODE (comp), XEXP (comp, 0),
4591 XEXP (comp, 1)),
4592 1);
4593 return;
4594 }
4595
4596 tem = XEXP (x, 1);
4597 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4598 validate_change (insn, &XEXP (x, 2), tem, 1);
4599 return;
4600 }
4601
4602 fmt = GET_RTX_FORMAT (code);
4603 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4604 {
4605 if (fmt[i] == 'e')
4606 invert_exp_1 (XEXP (x, i), insn);
4607 else if (fmt[i] == 'E')
4608 {
4609 register int j;
4610 for (j = 0; j < XVECLEN (x, i); j++)
4611 invert_exp_1 (XVECEXP (x, i, j), insn);
4612 }
4613 }
4614 }
4615
4616 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4617
4618 Return 1 if we can do so, 0 if we cannot find a way to do so that
4619 matches a pattern. */
4620
4621 int
4622 invert_exp (x, insn)
4623 rtx x;
4624 rtx insn;
4625 {
4626 invert_exp_1 (x, insn);
4627 if (num_validated_changes () == 0)
4628 return 0;
4629
4630 return apply_change_group ();
4631 }
4632
4633 /* Invert the condition of the jump JUMP, and make it jump to label
4634 NLABEL instead of where it jumps now. Accrue changes into the
4635 change group. Return false if we didn't see how to perform the
4636 inversion and redirection. */
4637
4638 int
4639 invert_jump_1 (jump, nlabel)
4640 rtx jump, nlabel;
4641 {
4642 int ochanges;
4643
4644 ochanges = num_validated_changes ();
4645 invert_exp_1 (PATTERN (jump), jump);
4646 if (num_validated_changes () == ochanges)
4647 return 0;
4648
4649 return redirect_jump_1 (jump, nlabel);
4650 }
4651
4652 /* Invert the condition of the jump JUMP, and make it jump to label
4653 NLABEL instead of where it jumps now. Return true if successful. */
4654
4655 int
4656 invert_jump (jump, nlabel)
4657 rtx jump, nlabel;
4658 {
4659 /* We have to either invert the condition and change the label or
4660 do neither. Either operation could fail. We first try to invert
4661 the jump. If that succeeds, we try changing the label. If that fails,
4662 we invert the jump back to what it was. */
4663
4664 if (! invert_exp (PATTERN (jump), jump))
4665 return 0;
4666
4667 if (redirect_jump (jump, nlabel))
4668 {
4669 /* An inverted jump means that a probability taken becomes a
4670 probability not taken. Subtract the branch probability from the
4671 probability base to convert it back to a taken probability. */
4672
4673 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
4674 if (note)
4675 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
4676
4677 return 1;
4678 }
4679
4680 if (! invert_exp (PATTERN (jump), jump))
4681 /* This should just be putting it back the way it was. */
4682 abort ();
4683
4684 return 0;
4685 }
4686
4687 /* Delete the instruction JUMP from any jump chain it might be on. */
4688
4689 static void
4690 delete_from_jump_chain (jump)
4691 rtx jump;
4692 {
4693 int index;
4694 rtx olabel = JUMP_LABEL (jump);
4695
4696 /* Handle unconditional jumps. */
4697 if (jump_chain && olabel != 0
4698 && INSN_UID (olabel) < max_jump_chain
4699 && simplejump_p (jump))
4700 index = INSN_UID (olabel);
4701 /* Handle return insns. */
4702 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4703 index = 0;
4704 else return;
4705
4706 if (jump_chain[index] == jump)
4707 jump_chain[index] = jump_chain[INSN_UID (jump)];
4708 else
4709 {
4710 rtx insn;
4711
4712 for (insn = jump_chain[index];
4713 insn != 0;
4714 insn = jump_chain[INSN_UID (insn)])
4715 if (jump_chain[INSN_UID (insn)] == jump)
4716 {
4717 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4718 break;
4719 }
4720 }
4721 }
4722 \f
4723 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4724
4725 If the old jump target label (before the dispatch table) becomes unused,
4726 it and the dispatch table may be deleted. In that case, find the insn
4727 before the jump references that label and delete it and logical successors
4728 too. */
4729
4730 static void
4731 redirect_tablejump (jump, nlabel)
4732 rtx jump, nlabel;
4733 {
4734 register rtx olabel = JUMP_LABEL (jump);
4735
4736 /* Add this jump to the jump_chain of NLABEL. */
4737 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4738 && INSN_UID (jump) < max_jump_chain)
4739 {
4740 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4741 jump_chain[INSN_UID (nlabel)] = jump;
4742 }
4743
4744 PATTERN (jump) = gen_jump (nlabel);
4745 JUMP_LABEL (jump) = nlabel;
4746 ++LABEL_NUSES (nlabel);
4747 INSN_CODE (jump) = -1;
4748
4749 if (--LABEL_NUSES (olabel) == 0)
4750 {
4751 delete_labelref_insn (jump, olabel, 0);
4752 delete_insn (olabel);
4753 }
4754 }
4755
4756 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4757 If we found one, delete it and then delete this insn if DELETE_THIS is
4758 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4759
4760 static int
4761 delete_labelref_insn (insn, label, delete_this)
4762 rtx insn, label;
4763 int delete_this;
4764 {
4765 int deleted = 0;
4766 rtx link;
4767
4768 if (GET_CODE (insn) != NOTE
4769 && reg_mentioned_p (label, PATTERN (insn)))
4770 {
4771 if (delete_this)
4772 {
4773 delete_insn (insn);
4774 deleted = 1;
4775 }
4776 else
4777 return 1;
4778 }
4779
4780 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4781 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4782 {
4783 if (delete_this)
4784 {
4785 delete_insn (insn);
4786 deleted = 1;
4787 }
4788 else
4789 return 1;
4790 }
4791
4792 return deleted;
4793 }
4794 \f
4795 /* Like rtx_equal_p except that it considers two REGs as equal
4796 if they renumber to the same value and considers two commutative
4797 operations to be the same if the order of the operands has been
4798 reversed.
4799
4800 ??? Addition is not commutative on the PA due to the weird implicit
4801 space register selection rules for memory addresses. Therefore, we
4802 don't consider a + b == b + a.
4803
4804 We could/should make this test a little tighter. Possibly only
4805 disabling it on the PA via some backend macro or only disabling this
4806 case when the PLUS is inside a MEM. */
4807
4808 int
4809 rtx_renumbered_equal_p (x, y)
4810 rtx x, y;
4811 {
4812 register int i;
4813 register RTX_CODE code = GET_CODE (x);
4814 register const char *fmt;
4815
4816 if (x == y)
4817 return 1;
4818
4819 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4820 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4821 && GET_CODE (SUBREG_REG (y)) == REG)))
4822 {
4823 int reg_x = -1, reg_y = -1;
4824 int word_x = 0, word_y = 0;
4825
4826 if (GET_MODE (x) != GET_MODE (y))
4827 return 0;
4828
4829 /* If we haven't done any renumbering, don't
4830 make any assumptions. */
4831 if (reg_renumber == 0)
4832 return rtx_equal_p (x, y);
4833
4834 if (code == SUBREG)
4835 {
4836 reg_x = REGNO (SUBREG_REG (x));
4837 word_x = SUBREG_WORD (x);
4838
4839 if (reg_renumber[reg_x] >= 0)
4840 {
4841 reg_x = reg_renumber[reg_x] + word_x;
4842 word_x = 0;
4843 }
4844 }
4845
4846 else
4847 {
4848 reg_x = REGNO (x);
4849 if (reg_renumber[reg_x] >= 0)
4850 reg_x = reg_renumber[reg_x];
4851 }
4852
4853 if (GET_CODE (y) == SUBREG)
4854 {
4855 reg_y = REGNO (SUBREG_REG (y));
4856 word_y = SUBREG_WORD (y);
4857
4858 if (reg_renumber[reg_y] >= 0)
4859 {
4860 reg_y = reg_renumber[reg_y];
4861 word_y = 0;
4862 }
4863 }
4864
4865 else
4866 {
4867 reg_y = REGNO (y);
4868 if (reg_renumber[reg_y] >= 0)
4869 reg_y = reg_renumber[reg_y];
4870 }
4871
4872 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4873 }
4874
4875 /* Now we have disposed of all the cases
4876 in which different rtx codes can match. */
4877 if (code != GET_CODE (y))
4878 return 0;
4879
4880 switch (code)
4881 {
4882 case PC:
4883 case CC0:
4884 case ADDR_VEC:
4885 case ADDR_DIFF_VEC:
4886 return 0;
4887
4888 case CONST_INT:
4889 return INTVAL (x) == INTVAL (y);
4890
4891 case LABEL_REF:
4892 /* We can't assume nonlocal labels have their following insns yet. */
4893 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4894 return XEXP (x, 0) == XEXP (y, 0);
4895
4896 /* Two label-refs are equivalent if they point at labels
4897 in the same position in the instruction stream. */
4898 return (next_real_insn (XEXP (x, 0))
4899 == next_real_insn (XEXP (y, 0)));
4900
4901 case SYMBOL_REF:
4902 return XSTR (x, 0) == XSTR (y, 0);
4903
4904 case CODE_LABEL:
4905 /* If we didn't match EQ equality above, they aren't the same. */
4906 return 0;
4907
4908 default:
4909 break;
4910 }
4911
4912 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4913
4914 if (GET_MODE (x) != GET_MODE (y))
4915 return 0;
4916
4917 /* For commutative operations, the RTX match if the operand match in any
4918 order. Also handle the simple binary and unary cases without a loop.
4919
4920 ??? Don't consider PLUS a commutative operator; see comments above. */
4921 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4922 && code != PLUS)
4923 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4924 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4925 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4926 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4927 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4928 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4929 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4930 else if (GET_RTX_CLASS (code) == '1')
4931 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4932
4933 /* Compare the elements. If any pair of corresponding elements
4934 fail to match, return 0 for the whole things. */
4935
4936 fmt = GET_RTX_FORMAT (code);
4937 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4938 {
4939 register int j;
4940 switch (fmt[i])
4941 {
4942 case 'w':
4943 if (XWINT (x, i) != XWINT (y, i))
4944 return 0;
4945 break;
4946
4947 case 'i':
4948 if (XINT (x, i) != XINT (y, i))
4949 return 0;
4950 break;
4951
4952 case 's':
4953 if (strcmp (XSTR (x, i), XSTR (y, i)))
4954 return 0;
4955 break;
4956
4957 case 'e':
4958 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4959 return 0;
4960 break;
4961
4962 case 'u':
4963 if (XEXP (x, i) != XEXP (y, i))
4964 return 0;
4965 /* fall through. */
4966 case '0':
4967 break;
4968
4969 case 'E':
4970 if (XVECLEN (x, i) != XVECLEN (y, i))
4971 return 0;
4972 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4973 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4974 return 0;
4975 break;
4976
4977 default:
4978 abort ();
4979 }
4980 }
4981 return 1;
4982 }
4983 \f
4984 /* If X is a hard register or equivalent to one or a subregister of one,
4985 return the hard register number. If X is a pseudo register that was not
4986 assigned a hard register, return the pseudo register number. Otherwise,
4987 return -1. Any rtx is valid for X. */
4988
4989 int
4990 true_regnum (x)
4991 rtx x;
4992 {
4993 if (GET_CODE (x) == REG)
4994 {
4995 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4996 return reg_renumber[REGNO (x)];
4997 return REGNO (x);
4998 }
4999 if (GET_CODE (x) == SUBREG)
5000 {
5001 int base = true_regnum (SUBREG_REG (x));
5002 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
5003 return SUBREG_WORD (x) + base;
5004 }
5005 return -1;
5006 }
5007 \f
5008 /* Optimize code of the form:
5009
5010 for (x = a[i]; x; ...)
5011 ...
5012 for (x = a[i]; x; ...)
5013 ...
5014 foo:
5015
5016 Loop optimize will change the above code into
5017
5018 if (x = a[i])
5019 for (;;)
5020 { ...; if (! (x = ...)) break; }
5021 if (x = a[i])
5022 for (;;)
5023 { ...; if (! (x = ...)) break; }
5024 foo:
5025
5026 In general, if the first test fails, the program can branch
5027 directly to `foo' and skip the second try which is doomed to fail.
5028 We run this after loop optimization and before flow analysis. */
5029
5030 /* When comparing the insn patterns, we track the fact that different
5031 pseudo-register numbers may have been used in each computation.
5032 The following array stores an equivalence -- same_regs[I] == J means
5033 that pseudo register I was used in the first set of tests in a context
5034 where J was used in the second set. We also count the number of such
5035 pending equivalences. If nonzero, the expressions really aren't the
5036 same. */
5037
5038 static int *same_regs;
5039
5040 static int num_same_regs;
5041
5042 /* Track any registers modified between the target of the first jump and
5043 the second jump. They never compare equal. */
5044
5045 static char *modified_regs;
5046
5047 /* Record if memory was modified. */
5048
5049 static int modified_mem;
5050
5051 /* Called via note_stores on each insn between the target of the first
5052 branch and the second branch. It marks any changed registers. */
5053
5054 static void
5055 mark_modified_reg (dest, x, data)
5056 rtx dest;
5057 rtx x ATTRIBUTE_UNUSED;
5058 void *data ATTRIBUTE_UNUSED;
5059 {
5060 int regno;
5061 unsigned int i;
5062
5063 if (GET_CODE (dest) == SUBREG)
5064 dest = SUBREG_REG (dest);
5065
5066 if (GET_CODE (dest) == MEM)
5067 modified_mem = 1;
5068
5069 if (GET_CODE (dest) != REG)
5070 return;
5071
5072 regno = REGNO (dest);
5073 if (regno >= FIRST_PSEUDO_REGISTER)
5074 modified_regs[regno] = 1;
5075 else
5076 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
5077 modified_regs[regno + i] = 1;
5078 }
5079
5080 /* F is the first insn in the chain of insns. */
5081
5082 void
5083 thread_jumps (f, max_reg, flag_before_loop)
5084 rtx f;
5085 int max_reg;
5086 int flag_before_loop;
5087 {
5088 /* Basic algorithm is to find a conditional branch,
5089 the label it may branch to, and the branch after
5090 that label. If the two branches test the same condition,
5091 walk back from both branch paths until the insn patterns
5092 differ, or code labels are hit. If we make it back to
5093 the target of the first branch, then we know that the first branch
5094 will either always succeed or always fail depending on the relative
5095 senses of the two branches. So adjust the first branch accordingly
5096 in this case. */
5097
5098 rtx label, b1, b2, t1, t2;
5099 enum rtx_code code1, code2;
5100 rtx b1op0, b1op1, b2op0, b2op1;
5101 int changed = 1;
5102 int i;
5103 int *all_reset;
5104
5105 /* Allocate register tables and quick-reset table. */
5106 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
5107 same_regs = (int *) xmalloc (max_reg * sizeof (int));
5108 all_reset = (int *) xmalloc (max_reg * sizeof (int));
5109 for (i = 0; i < max_reg; i++)
5110 all_reset[i] = -1;
5111
5112 while (changed)
5113 {
5114 changed = 0;
5115
5116 for (b1 = f; b1; b1 = NEXT_INSN (b1))
5117 {
5118 /* Get to a candidate branch insn. */
5119 if (GET_CODE (b1) != JUMP_INSN
5120 || ! condjump_p (b1) || simplejump_p (b1)
5121 || JUMP_LABEL (b1) == 0)
5122 continue;
5123
5124 bzero (modified_regs, max_reg * sizeof (char));
5125 modified_mem = 0;
5126
5127 bcopy ((char *) all_reset, (char *) same_regs,
5128 max_reg * sizeof (int));
5129 num_same_regs = 0;
5130
5131 label = JUMP_LABEL (b1);
5132
5133 /* Look for a branch after the target. Record any registers and
5134 memory modified between the target and the branch. Stop when we
5135 get to a label since we can't know what was changed there. */
5136 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
5137 {
5138 if (GET_CODE (b2) == CODE_LABEL)
5139 break;
5140
5141 else if (GET_CODE (b2) == JUMP_INSN)
5142 {
5143 /* If this is an unconditional jump and is the only use of
5144 its target label, we can follow it. */
5145 if (simplejump_p (b2)
5146 && JUMP_LABEL (b2) != 0
5147 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
5148 {
5149 b2 = JUMP_LABEL (b2);
5150 continue;
5151 }
5152 else
5153 break;
5154 }
5155
5156 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
5157 continue;
5158
5159 if (GET_CODE (b2) == CALL_INSN)
5160 {
5161 modified_mem = 1;
5162 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5163 if (call_used_regs[i] && ! fixed_regs[i]
5164 && i != STACK_POINTER_REGNUM
5165 && i != FRAME_POINTER_REGNUM
5166 && i != HARD_FRAME_POINTER_REGNUM
5167 && i != ARG_POINTER_REGNUM)
5168 modified_regs[i] = 1;
5169 }
5170
5171 note_stores (PATTERN (b2), mark_modified_reg, NULL);
5172 }
5173
5174 /* Check the next candidate branch insn from the label
5175 of the first. */
5176 if (b2 == 0
5177 || GET_CODE (b2) != JUMP_INSN
5178 || b2 == b1
5179 || ! condjump_p (b2)
5180 || simplejump_p (b2))
5181 continue;
5182
5183 /* Get the comparison codes and operands, reversing the
5184 codes if appropriate. If we don't have comparison codes,
5185 we can't do anything. */
5186 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
5187 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
5188 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
5189 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
5190 code1 = reverse_condition (code1);
5191
5192 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
5193 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
5194 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
5195 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
5196 code2 = reverse_condition (code2);
5197
5198 /* If they test the same things and knowing that B1 branches
5199 tells us whether or not B2 branches, check if we
5200 can thread the branch. */
5201 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
5202 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
5203 && (comparison_dominates_p (code1, code2)
5204 || (can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
5205 0),
5206 b1)
5207 && comparison_dominates_p (code1, reverse_condition (code2)))))
5208
5209 {
5210 t1 = prev_nonnote_insn (b1);
5211 t2 = prev_nonnote_insn (b2);
5212
5213 while (t1 != 0 && t2 != 0)
5214 {
5215 if (t2 == label)
5216 {
5217 /* We have reached the target of the first branch.
5218 If there are no pending register equivalents,
5219 we know that this branch will either always
5220 succeed (if the senses of the two branches are
5221 the same) or always fail (if not). */
5222 rtx new_label;
5223
5224 if (num_same_regs != 0)
5225 break;
5226
5227 if (comparison_dominates_p (code1, code2))
5228 new_label = JUMP_LABEL (b2);
5229 else
5230 new_label = get_label_after (b2);
5231
5232 if (JUMP_LABEL (b1) != new_label)
5233 {
5234 rtx prev = PREV_INSN (new_label);
5235
5236 if (flag_before_loop
5237 && GET_CODE (prev) == NOTE
5238 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5239 {
5240 /* Don't thread to the loop label. If a loop
5241 label is reused, loop optimization will
5242 be disabled for that loop. */
5243 new_label = gen_label_rtx ();
5244 emit_label_after (new_label, PREV_INSN (prev));
5245 }
5246 changed |= redirect_jump (b1, new_label);
5247 }
5248 break;
5249 }
5250
5251 /* If either of these is not a normal insn (it might be
5252 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5253 have already been skipped above.) Similarly, fail
5254 if the insns are different. */
5255 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5256 || recog_memoized (t1) != recog_memoized (t2)
5257 || ! rtx_equal_for_thread_p (PATTERN (t1),
5258 PATTERN (t2), t2))
5259 break;
5260
5261 t1 = prev_nonnote_insn (t1);
5262 t2 = prev_nonnote_insn (t2);
5263 }
5264 }
5265 }
5266 }
5267
5268 /* Clean up. */
5269 free (modified_regs);
5270 free (same_regs);
5271 free (all_reset);
5272 }
5273 \f
5274 /* This is like RTX_EQUAL_P except that it knows about our handling of
5275 possibly equivalent registers and knows to consider volatile and
5276 modified objects as not equal.
5277
5278 YINSN is the insn containing Y. */
5279
5280 int
5281 rtx_equal_for_thread_p (x, y, yinsn)
5282 rtx x, y;
5283 rtx yinsn;
5284 {
5285 register int i;
5286 register int j;
5287 register enum rtx_code code;
5288 register const char *fmt;
5289
5290 code = GET_CODE (x);
5291 /* Rtx's of different codes cannot be equal. */
5292 if (code != GET_CODE (y))
5293 return 0;
5294
5295 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5296 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5297
5298 if (GET_MODE (x) != GET_MODE (y))
5299 return 0;
5300
5301 /* For floating-point, consider everything unequal. This is a bit
5302 pessimistic, but this pass would only rarely do anything for FP
5303 anyway. */
5304 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5305 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5306 return 0;
5307
5308 /* For commutative operations, the RTX match if the operand match in any
5309 order. Also handle the simple binary and unary cases without a loop. */
5310 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5311 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5312 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5313 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5314 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5315 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5316 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5317 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5318 else if (GET_RTX_CLASS (code) == '1')
5319 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5320
5321 /* Handle special-cases first. */
5322 switch (code)
5323 {
5324 case REG:
5325 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5326 return 1;
5327
5328 /* If neither is user variable or hard register, check for possible
5329 equivalence. */
5330 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5331 || REGNO (x) < FIRST_PSEUDO_REGISTER
5332 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5333 return 0;
5334
5335 if (same_regs[REGNO (x)] == -1)
5336 {
5337 same_regs[REGNO (x)] = REGNO (y);
5338 num_same_regs++;
5339
5340 /* If this is the first time we are seeing a register on the `Y'
5341 side, see if it is the last use. If not, we can't thread the
5342 jump, so mark it as not equivalent. */
5343 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5344 return 0;
5345
5346 return 1;
5347 }
5348 else
5349 return (same_regs[REGNO (x)] == (int) REGNO (y));
5350
5351 break;
5352
5353 case MEM:
5354 /* If memory modified or either volatile, not equivalent.
5355 Else, check address. */
5356 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5357 return 0;
5358
5359 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5360
5361 case ASM_INPUT:
5362 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5363 return 0;
5364
5365 break;
5366
5367 case SET:
5368 /* Cancel a pending `same_regs' if setting equivalenced registers.
5369 Then process source. */
5370 if (GET_CODE (SET_DEST (x)) == REG
5371 && GET_CODE (SET_DEST (y)) == REG)
5372 {
5373 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
5374 {
5375 same_regs[REGNO (SET_DEST (x))] = -1;
5376 num_same_regs--;
5377 }
5378 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5379 return 0;
5380 }
5381 else
5382 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5383 return 0;
5384
5385 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5386
5387 case LABEL_REF:
5388 return XEXP (x, 0) == XEXP (y, 0);
5389
5390 case SYMBOL_REF:
5391 return XSTR (x, 0) == XSTR (y, 0);
5392
5393 default:
5394 break;
5395 }
5396
5397 if (x == y)
5398 return 1;
5399
5400 fmt = GET_RTX_FORMAT (code);
5401 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5402 {
5403 switch (fmt[i])
5404 {
5405 case 'w':
5406 if (XWINT (x, i) != XWINT (y, i))
5407 return 0;
5408 break;
5409
5410 case 'n':
5411 case 'i':
5412 if (XINT (x, i) != XINT (y, i))
5413 return 0;
5414 break;
5415
5416 case 'V':
5417 case 'E':
5418 /* Two vectors must have the same length. */
5419 if (XVECLEN (x, i) != XVECLEN (y, i))
5420 return 0;
5421
5422 /* And the corresponding elements must match. */
5423 for (j = 0; j < XVECLEN (x, i); j++)
5424 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5425 XVECEXP (y, i, j), yinsn) == 0)
5426 return 0;
5427 break;
5428
5429 case 'e':
5430 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5431 return 0;
5432 break;
5433
5434 case 'S':
5435 case 's':
5436 if (strcmp (XSTR (x, i), XSTR (y, i)))
5437 return 0;
5438 break;
5439
5440 case 'u':
5441 /* These are just backpointers, so they don't matter. */
5442 break;
5443
5444 case '0':
5445 case 't':
5446 break;
5447
5448 /* It is believed that rtx's at this level will never
5449 contain anything but integers and other rtx's,
5450 except for within LABEL_REFs and SYMBOL_REFs. */
5451 default:
5452 abort ();
5453 }
5454 }
5455 return 1;
5456 }
5457 \f
5458
5459 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
5460 /* Return the insn that NEW can be safely inserted in front of starting at
5461 the jump insn INSN. Return 0 if it is not safe to do this jump
5462 optimization. Note that NEW must contain a single set. */
5463
5464 static rtx
5465 find_insert_position (insn, new)
5466 rtx insn;
5467 rtx new;
5468 {
5469 int i;
5470 rtx prev;
5471
5472 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5473 if (GET_CODE (PATTERN (new)) != PARALLEL)
5474 return insn;
5475
5476 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5477 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5478 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5479 insn))
5480 break;
5481
5482 if (i < 0)
5483 return insn;
5484
5485 /* There is a good chance that the previous insn PREV sets the thing
5486 being clobbered (often the CC in a hard reg). If PREV does not
5487 use what NEW sets, we can insert NEW before PREV. */
5488
5489 prev = prev_active_insn (insn);
5490 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5491 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5492 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5493 insn)
5494 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5495 prev))
5496 return 0;
5497
5498 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5499 }
5500 #endif /* !HAVE_cc0 */