flow.c (flow_delete_insn, [...]): Export.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-99, 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
25
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
30
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
38
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
43
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
46
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
50
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
53
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
70
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
83
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
91
92 static rtx *jump_chain;
93
94 /* Maximum index in jump_chain. */
95
96 static int max_jump_chain;
97
98 /* Set nonzero by jump_optimize if control can fall through
99 to the end of the function. */
100 int can_reach_end;
101
102 /* Indicates whether death notes are significant in cross jump analysis.
103 Normally they are not significant, because of A and B jump to C,
104 and R dies in A, it must die in B. But this might not be true after
105 stack register conversion, and we must compare death notes in that
106 case. */
107
108 static int cross_jump_death_matters = 0;
109
110 static int init_label_info PARAMS ((rtx));
111 static void delete_barrier_successors PARAMS ((rtx));
112 static void mark_all_labels PARAMS ((rtx, int));
113 static rtx delete_unreferenced_labels PARAMS ((rtx));
114 static void delete_noop_moves PARAMS ((rtx));
115 static int calculate_can_reach_end PARAMS ((rtx, int));
116 static int duplicate_loop_exit_test PARAMS ((rtx));
117 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
118 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
119 static int jump_back_p PARAMS ((rtx, rtx));
120 static int tension_vector_labels PARAMS ((rtx, int));
121 static void mark_jump_label PARAMS ((rtx, rtx, int, int));
122 static void delete_computation PARAMS ((rtx));
123 static void delete_from_jump_chain PARAMS ((rtx));
124 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
125 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
126 static void redirect_tablejump PARAMS ((rtx, rtx));
127 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int));
128 #if ! defined(HAVE_cc0) && ! defined(HAVE_conditional_arithmetic)
129 static rtx find_insert_position PARAMS ((rtx, rtx));
130 #endif
131 static int returnjump_p_1 PARAMS ((rtx *, void *));
132 static void delete_prior_computation PARAMS ((rtx, rtx));
133
134 /* Main external entry point into the jump optimizer. See comments before
135 jump_optimize_1 for descriptions of the arguments. */
136 void
137 jump_optimize (f, cross_jump, noop_moves, after_regscan)
138 rtx f;
139 int cross_jump;
140 int noop_moves;
141 int after_regscan;
142 {
143 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
144 }
145
146 /* Alternate entry into the jump optimizer. This entry point only rebuilds
147 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
148 instructions. */
149 void
150 rebuild_jump_labels (f)
151 rtx f;
152 {
153 jump_optimize_1 (f, 0, 0, 0, 1);
154 }
155
156 \f
157 /* Delete no-op jumps and optimize jumps to jumps
158 and jumps around jumps.
159 Delete unused labels and unreachable code.
160
161 If CROSS_JUMP is 1, detect matching code
162 before a jump and its destination and unify them.
163 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
164
165 If NOOP_MOVES is nonzero, delete no-op move insns.
166
167 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
168 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
169
170 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
171 and JUMP_LABEL field for jumping insns.
172
173 If `optimize' is zero, don't change any code,
174 just determine whether control drops off the end of the function.
175 This case occurs when we have -W and not -O.
176 It works because `delete_insn' checks the value of `optimize'
177 and refrains from actually deleting when that is 0. */
178
179 static void
180 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
181 rtx f;
182 int cross_jump;
183 int noop_moves;
184 int after_regscan;
185 int mark_labels_only;
186 {
187 register rtx insn, next;
188 int changed;
189 int old_max_reg;
190 int first = 1;
191 int max_uid = 0;
192 rtx last_insn;
193
194 cross_jump_death_matters = (cross_jump == 2);
195 max_uid = init_label_info (f) + 1;
196
197 /* If we are performing cross jump optimizations, then initialize
198 tables mapping UIDs to EH regions to avoid incorrect movement
199 of insns from one EH region to another. */
200 if (flag_exceptions && cross_jump)
201 init_insn_eh_region (f, max_uid);
202
203 delete_barrier_successors (f);
204
205 /* Leave some extra room for labels and duplicate exit test insns
206 we make. */
207 max_jump_chain = max_uid * 14 / 10;
208 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
209
210 mark_all_labels (f, cross_jump);
211
212 /* Keep track of labels used from static data;
213 they cannot ever be deleted. */
214
215 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
216 LABEL_NUSES (XEXP (insn, 0))++;
217
218 check_exception_handler_labels ();
219
220 /* Keep track of labels used for marking handlers for exception
221 regions; they cannot usually be deleted. */
222
223 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
224 LABEL_NUSES (XEXP (insn, 0))++;
225
226 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
227 notes and recompute LABEL_NUSES. */
228 if (mark_labels_only)
229 goto end;
230
231 exception_optimize ();
232
233 last_insn = delete_unreferenced_labels (f);
234
235 if (noop_moves)
236 delete_noop_moves (f);
237
238 /* If we haven't yet gotten to reload and we have just run regscan,
239 delete any insn that sets a register that isn't used elsewhere.
240 This helps some of the optimizations below by having less insns
241 being jumped around. */
242
243 if (optimize && ! reload_completed && after_regscan)
244 for (insn = f; insn; insn = next)
245 {
246 rtx set = single_set (insn);
247
248 next = NEXT_INSN (insn);
249
250 if (set && GET_CODE (SET_DEST (set)) == REG
251 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
252 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
253 /* We use regno_last_note_uid so as not to delete the setting
254 of a reg that's used in notes. A subsequent optimization
255 might arrange to use that reg for real. */
256 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
257 && ! side_effects_p (SET_SRC (set))
258 && ! find_reg_note (insn, REG_RETVAL, 0)
259 /* An ADDRESSOF expression can turn into a use of the internal arg
260 pointer, so do not delete the initialization of the internal
261 arg pointer yet. If it is truly dead, flow will delete the
262 initializing insn. */
263 && SET_DEST (set) != current_function_internal_arg_pointer)
264 delete_insn (insn);
265 }
266
267 /* Now iterate optimizing jumps until nothing changes over one pass. */
268 changed = 1;
269 old_max_reg = max_reg_num ();
270 while (changed)
271 {
272 changed = 0;
273
274 for (insn = f; insn; insn = next)
275 {
276 rtx reallabelprev;
277 rtx temp, temp1, temp2 = NULL_RTX, temp3, temp4, temp5, temp6;
278 rtx nlabel;
279 int this_is_simplejump, this_is_condjump, reversep = 0;
280 int this_is_condjump_in_parallel;
281
282 next = NEXT_INSN (insn);
283
284 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
285 jump. Try to optimize by duplicating the loop exit test if so.
286 This is only safe immediately after regscan, because it uses
287 the values of regno_first_uid and regno_last_uid. */
288 if (after_regscan && GET_CODE (insn) == NOTE
289 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
290 && (temp1 = next_nonnote_insn (insn)) != 0
291 && simplejump_p (temp1))
292 {
293 temp = PREV_INSN (insn);
294 if (duplicate_loop_exit_test (insn))
295 {
296 changed = 1;
297 next = NEXT_INSN (temp);
298 continue;
299 }
300 }
301
302 if (GET_CODE (insn) != JUMP_INSN)
303 continue;
304
305 this_is_simplejump = simplejump_p (insn);
306 this_is_condjump = condjump_p (insn);
307 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
308
309 /* Tension the labels in dispatch tables. */
310
311 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
312 changed |= tension_vector_labels (PATTERN (insn), 0);
313 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
314 changed |= tension_vector_labels (PATTERN (insn), 1);
315
316 /* See if this jump goes to another jump and redirect if so. */
317 nlabel = follow_jumps (JUMP_LABEL (insn));
318 if (nlabel != JUMP_LABEL (insn))
319 changed |= redirect_jump (insn, nlabel);
320
321 if (! optimize)
322 continue;
323
324 /* If a dispatch table always goes to the same place,
325 get rid of it and replace the insn that uses it. */
326
327 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
328 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
329 {
330 int i;
331 rtx pat = PATTERN (insn);
332 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
333 int len = XVECLEN (pat, diff_vec_p);
334 rtx dispatch = prev_real_insn (insn);
335 rtx set;
336
337 for (i = 0; i < len; i++)
338 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
339 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
340 break;
341
342 if (i == len
343 && dispatch != 0
344 && GET_CODE (dispatch) == JUMP_INSN
345 && JUMP_LABEL (dispatch) != 0
346 /* Don't mess with a casesi insn.
347 XXX according to the comment before computed_jump_p(),
348 all casesi insns should be a parallel of the jump
349 and a USE of a LABEL_REF. */
350 && ! ((set = single_set (dispatch)) != NULL
351 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
352 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
353 {
354 redirect_tablejump (dispatch,
355 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
356 changed = 1;
357 }
358 }
359
360 /* If a jump references the end of the function, try to turn
361 it into a RETURN insn, possibly a conditional one. */
362 if (JUMP_LABEL (insn) != 0
363 && (next_active_insn (JUMP_LABEL (insn)) == 0
364 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
365 == RETURN))
366 changed |= redirect_jump (insn, NULL_RTX);
367
368 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
369
370 /* Detect jump to following insn. */
371 if (reallabelprev == insn && this_is_condjump)
372 {
373 next = next_real_insn (JUMP_LABEL (insn));
374 delete_jump (insn);
375 changed = 1;
376 continue;
377 }
378
379 /* Detect a conditional jump going to the same place
380 as an immediately following unconditional jump. */
381 else if (this_is_condjump
382 && (temp = next_active_insn (insn)) != 0
383 && simplejump_p (temp)
384 && (next_active_insn (JUMP_LABEL (insn))
385 == next_active_insn (JUMP_LABEL (temp))))
386 {
387 /* Don't mess up test coverage analysis. */
388 temp2 = temp;
389 if (flag_test_coverage && !reload_completed)
390 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
391 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
392 break;
393
394 if (temp2 == temp)
395 {
396 delete_jump (insn);
397 changed = 1;
398 continue;
399 }
400 }
401
402 /* Detect a conditional jump jumping over an unconditional jump. */
403
404 else if ((this_is_condjump || this_is_condjump_in_parallel)
405 && ! this_is_simplejump
406 && reallabelprev != 0
407 && GET_CODE (reallabelprev) == JUMP_INSN
408 && prev_active_insn (reallabelprev) == insn
409 && no_labels_between_p (insn, reallabelprev)
410 && simplejump_p (reallabelprev))
411 {
412 /* When we invert the unconditional jump, we will be
413 decrementing the usage count of its old label.
414 Make sure that we don't delete it now because that
415 might cause the following code to be deleted. */
416 rtx prev_uses = prev_nonnote_insn (reallabelprev);
417 rtx prev_label = JUMP_LABEL (insn);
418
419 if (prev_label)
420 ++LABEL_NUSES (prev_label);
421
422 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
423 {
424 /* It is very likely that if there are USE insns before
425 this jump, they hold REG_DEAD notes. These REG_DEAD
426 notes are no longer valid due to this optimization,
427 and will cause the life-analysis that following passes
428 (notably delayed-branch scheduling) to think that
429 these registers are dead when they are not.
430
431 To prevent this trouble, we just remove the USE insns
432 from the insn chain. */
433
434 while (prev_uses && GET_CODE (prev_uses) == INSN
435 && GET_CODE (PATTERN (prev_uses)) == USE)
436 {
437 rtx useless = prev_uses;
438 prev_uses = prev_nonnote_insn (prev_uses);
439 delete_insn (useless);
440 }
441
442 delete_insn (reallabelprev);
443 changed = 1;
444 }
445
446 /* We can now safely delete the label if it is unreferenced
447 since the delete_insn above has deleted the BARRIER. */
448 if (prev_label && --LABEL_NUSES (prev_label) == 0)
449 delete_insn (prev_label);
450
451 next = NEXT_INSN (insn);
452 }
453
454 /* If we have an unconditional jump preceded by a USE, try to put
455 the USE before the target and jump there. This simplifies many
456 of the optimizations below since we don't have to worry about
457 dealing with these USE insns. We only do this if the label
458 being branch to already has the identical USE or if code
459 never falls through to that label. */
460
461 else if (this_is_simplejump
462 && (temp = prev_nonnote_insn (insn)) != 0
463 && GET_CODE (temp) == INSN
464 && GET_CODE (PATTERN (temp)) == USE
465 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
466 && (GET_CODE (temp1) == BARRIER
467 || (GET_CODE (temp1) == INSN
468 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
469 /* Don't do this optimization if we have a loop containing
470 only the USE instruction, and the loop start label has
471 a usage count of 1. This is because we will redo this
472 optimization everytime through the outer loop, and jump
473 opt will never exit. */
474 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
475 && temp2 == JUMP_LABEL (insn)
476 && LABEL_NUSES (temp2) == 1))
477 {
478 if (GET_CODE (temp1) == BARRIER)
479 {
480 emit_insn_after (PATTERN (temp), temp1);
481 temp1 = NEXT_INSN (temp1);
482 }
483
484 delete_insn (temp);
485 redirect_jump (insn, get_label_before (temp1));
486 reallabelprev = prev_real_insn (temp1);
487 changed = 1;
488 next = NEXT_INSN (insn);
489 }
490
491 /* Simplify if (...) x = a; else x = b; by converting it
492 to x = b; if (...) x = a;
493 if B is sufficiently simple, the test doesn't involve X,
494 and nothing in the test modifies B or X.
495
496 If we have small register classes, we also can't do this if X
497 is a hard register.
498
499 If the "x = b;" insn has any REG_NOTES, we don't do this because
500 of the possibility that we are running after CSE and there is a
501 REG_EQUAL note that is only valid if the branch has already been
502 taken. If we move the insn with the REG_EQUAL note, we may
503 fold the comparison to always be false in a later CSE pass.
504 (We could also delete the REG_NOTES when moving the insn, but it
505 seems simpler to not move it.) An exception is that we can move
506 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
507 value is the same as "b".
508
509 INSN is the branch over the `else' part.
510
511 We set:
512
513 TEMP to the jump insn preceding "x = a;"
514 TEMP1 to X
515 TEMP2 to the insn that sets "x = b;"
516 TEMP3 to the insn that sets "x = a;"
517 TEMP4 to the set of "x = b"; */
518
519 if (this_is_simplejump
520 && (temp3 = prev_active_insn (insn)) != 0
521 && GET_CODE (temp3) == INSN
522 && (temp4 = single_set (temp3)) != 0
523 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
524 && (! SMALL_REGISTER_CLASSES
525 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
526 && (temp2 = next_active_insn (insn)) != 0
527 && GET_CODE (temp2) == INSN
528 && (temp4 = single_set (temp2)) != 0
529 && rtx_equal_p (SET_DEST (temp4), temp1)
530 && ! side_effects_p (SET_SRC (temp4))
531 && ! may_trap_p (SET_SRC (temp4))
532 && (REG_NOTES (temp2) == 0
533 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
534 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
535 && XEXP (REG_NOTES (temp2), 1) == 0
536 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
537 SET_SRC (temp4))))
538 && (temp = prev_active_insn (temp3)) != 0
539 && condjump_p (temp) && ! simplejump_p (temp)
540 /* TEMP must skip over the "x = a;" insn */
541 && prev_real_insn (JUMP_LABEL (temp)) == insn
542 && no_labels_between_p (insn, JUMP_LABEL (temp))
543 /* There must be no other entries to the "x = b;" insn. */
544 && no_labels_between_p (JUMP_LABEL (temp), temp2)
545 /* INSN must either branch to the insn after TEMP2 or the insn
546 after TEMP2 must branch to the same place as INSN. */
547 && (reallabelprev == temp2
548 || ((temp5 = next_active_insn (temp2)) != 0
549 && simplejump_p (temp5)
550 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
551 {
552 /* The test expression, X, may be a complicated test with
553 multiple branches. See if we can find all the uses of
554 the label that TEMP branches to without hitting a CALL_INSN
555 or a jump to somewhere else. */
556 rtx target = JUMP_LABEL (temp);
557 int nuses = LABEL_NUSES (target);
558 rtx p;
559 #ifdef HAVE_cc0
560 rtx q;
561 #endif
562
563 /* Set P to the first jump insn that goes around "x = a;". */
564 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
565 {
566 if (GET_CODE (p) == JUMP_INSN)
567 {
568 if (condjump_p (p) && ! simplejump_p (p)
569 && JUMP_LABEL (p) == target)
570 {
571 nuses--;
572 if (nuses == 0)
573 break;
574 }
575 else
576 break;
577 }
578 else if (GET_CODE (p) == CALL_INSN)
579 break;
580 }
581
582 #ifdef HAVE_cc0
583 /* We cannot insert anything between a set of cc and its use
584 so if P uses cc0, we must back up to the previous insn. */
585 q = prev_nonnote_insn (p);
586 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
587 && sets_cc0_p (PATTERN (q)))
588 p = q;
589 #endif
590
591 if (p)
592 p = PREV_INSN (p);
593
594 /* If we found all the uses and there was no data conflict, we
595 can move the assignment unless we can branch into the middle
596 from somewhere. */
597 if (nuses == 0 && p
598 && no_labels_between_p (p, insn)
599 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
600 && ! reg_set_between_p (temp1, p, temp3)
601 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
602 || ! modified_between_p (SET_SRC (temp4), p, temp2))
603 /* Verify that registers used by the jump are not clobbered
604 by the instruction being moved. */
605 && ! regs_set_between_p (PATTERN (temp),
606 PREV_INSN (temp2),
607 NEXT_INSN (temp2)))
608 {
609 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
610 delete_insn (temp2);
611
612 /* Set NEXT to an insn that we know won't go away. */
613 next = next_active_insn (insn);
614
615 /* Delete the jump around the set. Note that we must do
616 this before we redirect the test jumps so that it won't
617 delete the code immediately following the assignment
618 we moved (which might be a jump). */
619
620 delete_insn (insn);
621
622 /* We either have two consecutive labels or a jump to
623 a jump, so adjust all the JUMP_INSNs to branch to where
624 INSN branches to. */
625 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
626 if (GET_CODE (p) == JUMP_INSN)
627 redirect_jump (p, target);
628
629 changed = 1;
630 next = NEXT_INSN (insn);
631 continue;
632 }
633 }
634
635 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
636 to x = a; if (...) goto l; x = b;
637 if A is sufficiently simple, the test doesn't involve X,
638 and nothing in the test modifies A or X.
639
640 If we have small register classes, we also can't do this if X
641 is a hard register.
642
643 If the "x = a;" insn has any REG_NOTES, we don't do this because
644 of the possibility that we are running after CSE and there is a
645 REG_EQUAL note that is only valid if the branch has already been
646 taken. If we move the insn with the REG_EQUAL note, we may
647 fold the comparison to always be false in a later CSE pass.
648 (We could also delete the REG_NOTES when moving the insn, but it
649 seems simpler to not move it.) An exception is that we can move
650 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
651 value is the same as "a".
652
653 INSN is the goto.
654
655 We set:
656
657 TEMP to the jump insn preceding "x = a;"
658 TEMP1 to X
659 TEMP2 to the insn that sets "x = b;"
660 TEMP3 to the insn that sets "x = a;"
661 TEMP4 to the set of "x = a"; */
662
663 if (this_is_simplejump
664 && (temp2 = next_active_insn (insn)) != 0
665 && GET_CODE (temp2) == INSN
666 && (temp4 = single_set (temp2)) != 0
667 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
668 && (! SMALL_REGISTER_CLASSES
669 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
670 && (temp3 = prev_active_insn (insn)) != 0
671 && GET_CODE (temp3) == INSN
672 && (temp4 = single_set (temp3)) != 0
673 && rtx_equal_p (SET_DEST (temp4), temp1)
674 && ! side_effects_p (SET_SRC (temp4))
675 && ! may_trap_p (SET_SRC (temp4))
676 && (REG_NOTES (temp3) == 0
677 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
678 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
679 && XEXP (REG_NOTES (temp3), 1) == 0
680 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
681 SET_SRC (temp4))))
682 && (temp = prev_active_insn (temp3)) != 0
683 && condjump_p (temp) && ! simplejump_p (temp)
684 /* TEMP must skip over the "x = a;" insn */
685 && prev_real_insn (JUMP_LABEL (temp)) == insn
686 && no_labels_between_p (temp, insn))
687 {
688 rtx prev_label = JUMP_LABEL (temp);
689 rtx insert_after = prev_nonnote_insn (temp);
690
691 #ifdef HAVE_cc0
692 /* We cannot insert anything between a set of cc and its use. */
693 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
694 && sets_cc0_p (PATTERN (insert_after)))
695 insert_after = prev_nonnote_insn (insert_after);
696 #endif
697 ++LABEL_NUSES (prev_label);
698
699 if (insert_after
700 && no_labels_between_p (insert_after, temp)
701 && ! reg_referenced_between_p (temp1, insert_after, temp3)
702 && ! reg_referenced_between_p (temp1, temp3,
703 NEXT_INSN (temp2))
704 && ! reg_set_between_p (temp1, insert_after, temp)
705 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
706 /* Verify that registers used by the jump are not clobbered
707 by the instruction being moved. */
708 && ! regs_set_between_p (PATTERN (temp),
709 PREV_INSN (temp3),
710 NEXT_INSN (temp3))
711 && invert_jump (temp, JUMP_LABEL (insn)))
712 {
713 emit_insn_after_with_line_notes (PATTERN (temp3),
714 insert_after, temp3);
715 delete_insn (temp3);
716 delete_insn (insn);
717 /* Set NEXT to an insn that we know won't go away. */
718 next = temp2;
719 changed = 1;
720 }
721 if (prev_label && --LABEL_NUSES (prev_label) == 0)
722 delete_insn (prev_label);
723 if (changed)
724 continue;
725 }
726
727 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
728
729 /* If we have if (...) x = exp; and branches are expensive,
730 EXP is a single insn, does not have any side effects, cannot
731 trap, and is not too costly, convert this to
732 t = exp; if (...) x = t;
733
734 Don't do this when we have CC0 because it is unlikely to help
735 and we'd need to worry about where to place the new insn and
736 the potential for conflicts. We also can't do this when we have
737 notes on the insn for the same reason as above.
738
739 If we have conditional arithmetic, this will make this
740 harder to optimize later and isn't needed, so don't do it
741 in that case either.
742
743 We set:
744
745 TEMP to the "x = exp;" insn.
746 TEMP1 to the single set in the "x = exp;" insn.
747 TEMP2 to "x". */
748
749 if (! reload_completed
750 && this_is_condjump && ! this_is_simplejump
751 && BRANCH_COST >= 3
752 && (temp = next_nonnote_insn (insn)) != 0
753 && GET_CODE (temp) == INSN
754 && REG_NOTES (temp) == 0
755 && (reallabelprev == temp
756 || ((temp2 = next_active_insn (temp)) != 0
757 && simplejump_p (temp2)
758 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
759 && (temp1 = single_set (temp)) != 0
760 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
761 && (! SMALL_REGISTER_CLASSES
762 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
763 && GET_CODE (SET_SRC (temp1)) != REG
764 && GET_CODE (SET_SRC (temp1)) != SUBREG
765 && GET_CODE (SET_SRC (temp1)) != CONST_INT
766 && ! side_effects_p (SET_SRC (temp1))
767 && ! may_trap_p (SET_SRC (temp1))
768 && rtx_cost (SET_SRC (temp1), SET) < 10)
769 {
770 rtx new = gen_reg_rtx (GET_MODE (temp2));
771
772 if ((temp3 = find_insert_position (insn, temp))
773 && validate_change (temp, &SET_DEST (temp1), new, 0))
774 {
775 next = emit_insn_after (gen_move_insn (temp2, new), insn);
776 emit_insn_after_with_line_notes (PATTERN (temp),
777 PREV_INSN (temp3), temp);
778 delete_insn (temp);
779 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
780
781 if (after_regscan)
782 {
783 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
784 old_max_reg = max_reg_num ();
785 }
786 }
787 }
788
789 /* Similarly, if it takes two insns to compute EXP but they
790 have the same destination. Here TEMP3 will be the second
791 insn and TEMP4 the SET from that insn. */
792
793 if (! reload_completed
794 && this_is_condjump && ! this_is_simplejump
795 && BRANCH_COST >= 4
796 && (temp = next_nonnote_insn (insn)) != 0
797 && GET_CODE (temp) == INSN
798 && REG_NOTES (temp) == 0
799 && (temp3 = next_nonnote_insn (temp)) != 0
800 && GET_CODE (temp3) == INSN
801 && REG_NOTES (temp3) == 0
802 && (reallabelprev == temp3
803 || ((temp2 = next_active_insn (temp3)) != 0
804 && simplejump_p (temp2)
805 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
806 && (temp1 = single_set (temp)) != 0
807 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
808 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
809 && (! SMALL_REGISTER_CLASSES
810 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
811 && ! side_effects_p (SET_SRC (temp1))
812 && ! may_trap_p (SET_SRC (temp1))
813 && rtx_cost (SET_SRC (temp1), SET) < 10
814 && (temp4 = single_set (temp3)) != 0
815 && rtx_equal_p (SET_DEST (temp4), temp2)
816 && ! side_effects_p (SET_SRC (temp4))
817 && ! may_trap_p (SET_SRC (temp4))
818 && rtx_cost (SET_SRC (temp4), SET) < 10)
819 {
820 rtx new = gen_reg_rtx (GET_MODE (temp2));
821
822 if ((temp5 = find_insert_position (insn, temp))
823 && (temp6 = find_insert_position (insn, temp3))
824 && validate_change (temp, &SET_DEST (temp1), new, 0))
825 {
826 /* Use the earliest of temp5 and temp6. */
827 if (temp5 != insn)
828 temp6 = temp5;
829 next = emit_insn_after (gen_move_insn (temp2, new), insn);
830 emit_insn_after_with_line_notes (PATTERN (temp),
831 PREV_INSN (temp6), temp);
832 emit_insn_after_with_line_notes
833 (replace_rtx (PATTERN (temp3), temp2, new),
834 PREV_INSN (temp6), temp3);
835 delete_insn (temp);
836 delete_insn (temp3);
837 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
838
839 if (after_regscan)
840 {
841 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
842 old_max_reg = max_reg_num ();
843 }
844 }
845 }
846
847 /* Finally, handle the case where two insns are used to
848 compute EXP but a temporary register is used. Here we must
849 ensure that the temporary register is not used anywhere else. */
850
851 if (! reload_completed
852 && after_regscan
853 && this_is_condjump && ! this_is_simplejump
854 && BRANCH_COST >= 4
855 && (temp = next_nonnote_insn (insn)) != 0
856 && GET_CODE (temp) == INSN
857 && REG_NOTES (temp) == 0
858 && (temp3 = next_nonnote_insn (temp)) != 0
859 && GET_CODE (temp3) == INSN
860 && REG_NOTES (temp3) == 0
861 && (reallabelprev == temp3
862 || ((temp2 = next_active_insn (temp3)) != 0
863 && simplejump_p (temp2)
864 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
865 && (temp1 = single_set (temp)) != 0
866 && (temp5 = SET_DEST (temp1),
867 (GET_CODE (temp5) == REG
868 || (GET_CODE (temp5) == SUBREG
869 && (temp5 = SUBREG_REG (temp5),
870 GET_CODE (temp5) == REG))))
871 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
872 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
873 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
874 && ! side_effects_p (SET_SRC (temp1))
875 && ! may_trap_p (SET_SRC (temp1))
876 && rtx_cost (SET_SRC (temp1), SET) < 10
877 && (temp4 = single_set (temp3)) != 0
878 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
879 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
880 && (! SMALL_REGISTER_CLASSES
881 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
882 && rtx_equal_p (SET_DEST (temp4), temp2)
883 && ! side_effects_p (SET_SRC (temp4))
884 && ! may_trap_p (SET_SRC (temp4))
885 && rtx_cost (SET_SRC (temp4), SET) < 10)
886 {
887 rtx new = gen_reg_rtx (GET_MODE (temp2));
888
889 if ((temp5 = find_insert_position (insn, temp))
890 && (temp6 = find_insert_position (insn, temp3))
891 && validate_change (temp3, &SET_DEST (temp4), new, 0))
892 {
893 /* Use the earliest of temp5 and temp6. */
894 if (temp5 != insn)
895 temp6 = temp5;
896 next = emit_insn_after (gen_move_insn (temp2, new), insn);
897 emit_insn_after_with_line_notes (PATTERN (temp),
898 PREV_INSN (temp6), temp);
899 emit_insn_after_with_line_notes (PATTERN (temp3),
900 PREV_INSN (temp6), temp3);
901 delete_insn (temp);
902 delete_insn (temp3);
903 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
904
905 if (after_regscan)
906 {
907 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
908 old_max_reg = max_reg_num ();
909 }
910 }
911 }
912 #endif /* HAVE_cc0 */
913
914 #ifdef HAVE_conditional_arithmetic
915 /* ??? This is disabled in genconfig, as this simple-minded
916 transformation can incredibly lengthen register lifetimes.
917
918 Consider this example from cexp.c's yyparse:
919
920 234 (set (pc)
921 (if_then_else (ne (reg:DI 149) (const_int 0 [0x0]))
922 (label_ref 248) (pc)))
923 237 (set (reg/i:DI 0 $0) (const_int 1 [0x1]))
924 239 (set (pc) (label_ref 2382))
925 248 (code_label ("yybackup"))
926
927 This will be transformed to:
928
929 237 (set (reg/i:DI 0 $0)
930 (if_then_else:DI (eq (reg:DI 149) (const_int 0 [0x0]))
931 (const_int 1 [0x1]) (reg/i:DI 0 $0)))
932 239 (set (pc)
933 (if_then_else (eq (reg:DI 149) (const_int 0 [0x0]))
934 (label_ref 2382) (pc)))
935
936 which, from this narrow viewpoint looks fine. Except that
937 between this and 3 other ocurrences of the same pattern, $0
938 is now live for basically the entire function, and we'll
939 get an abort in caller_save.
940
941 Any replacement for this code should recall that a set of
942 a register that is not live need not, and indeed should not,
943 be conditionalized. Either that, or delay the transformation
944 until after register allocation. */
945
946 /* See if this is a conditional jump around a small number of
947 instructions that we can conditionalize. Don't do this before
948 the initial CSE pass or after reload.
949
950 We reject any insns that have side effects or may trap.
951 Strictly speaking, this is not needed since the machine may
952 support conditionalizing these too, but we won't deal with that
953 now. Specifically, this means that we can't conditionalize a
954 CALL_INSN, which some machines, such as the ARC, can do, but
955 this is a very minor optimization. */
956 if (this_is_condjump && ! this_is_simplejump
957 && cse_not_expected && ! reload_completed
958 && BRANCH_COST > 2
959 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (insn)), 0),
960 insn))
961 {
962 rtx ourcond = XEXP (SET_SRC (PATTERN (insn)), 0);
963 int num_insns = 0;
964 char *storage = (char *) oballoc (0);
965 int last_insn = 0, failed = 0;
966 rtx changed_jump = 0;
967
968 ourcond = gen_rtx (reverse_condition (GET_CODE (ourcond)),
969 VOIDmode, XEXP (ourcond, 0),
970 XEXP (ourcond, 1));
971
972 /* Scan forward BRANCH_COST real insns looking for the JUMP_LABEL
973 of this insn. We see if we think we can conditionalize the
974 insns we pass. For now, we only deal with insns that have
975 one SET. We stop after an insn that modifies anything in
976 OURCOND, if we have too many insns, or if we have an insn
977 with a side effect or that may trip. Note that we will
978 be modifying any unconditional jumps we encounter to be
979 conditional; this will have the effect of also doing this
980 optimization on the "else" the next time around. */
981 for (temp1 = NEXT_INSN (insn);
982 num_insns <= BRANCH_COST && ! failed && temp1 != 0
983 && GET_CODE (temp1) != CODE_LABEL;
984 temp1 = NEXT_INSN (temp1))
985 {
986 /* Ignore everything but an active insn. */
987 if (GET_RTX_CLASS (GET_CODE (temp1)) != 'i'
988 || GET_CODE (PATTERN (temp1)) == USE
989 || GET_CODE (PATTERN (temp1)) == CLOBBER)
990 continue;
991
992 /* If this was an unconditional jump, record it since we'll
993 need to remove the BARRIER if we succeed. We can only
994 have one such jump since there must be a label after
995 the BARRIER and it's either ours, in which case it's the
996 only one or some other, in which case we'd fail.
997 Likewise if it's a CALL_INSN followed by a BARRIER. */
998
999 if (simplejump_p (temp1)
1000 || (GET_CODE (temp1) == CALL_INSN
1001 && NEXT_INSN (temp1) != 0
1002 && GET_CODE (NEXT_INSN (temp1)) == BARRIER))
1003 {
1004 if (changed_jump == 0)
1005 changed_jump = temp1;
1006 else
1007 changed_jump
1008 = gen_rtx_INSN_LIST (VOIDmode, temp1, changed_jump);
1009 }
1010
1011 /* See if we are allowed another insn and if this insn
1012 if one we think we may be able to handle. */
1013 if (++num_insns > BRANCH_COST
1014 || last_insn
1015 || (((temp2 = single_set (temp1)) == 0
1016 || side_effects_p (SET_SRC (temp2))
1017 || may_trap_p (SET_SRC (temp2)))
1018 && GET_CODE (temp1) != CALL_INSN))
1019 failed = 1;
1020 else if (temp2 != 0)
1021 validate_change (temp1, &SET_SRC (temp2),
1022 gen_rtx_IF_THEN_ELSE
1023 (GET_MODE (SET_DEST (temp2)),
1024 copy_rtx (ourcond),
1025 SET_SRC (temp2), SET_DEST (temp2)),
1026 1);
1027 else
1028 {
1029 /* This is a CALL_INSN that doesn't have a SET. */
1030 rtx *call_loc = &PATTERN (temp1);
1031
1032 if (GET_CODE (*call_loc) == PARALLEL)
1033 call_loc = &XVECEXP (*call_loc, 0, 0);
1034
1035 validate_change (temp1, call_loc,
1036 gen_rtx_IF_THEN_ELSE
1037 (VOIDmode, copy_rtx (ourcond),
1038 *call_loc, const0_rtx),
1039 1);
1040 }
1041
1042
1043 if (modified_in_p (ourcond, temp1))
1044 last_insn = 1;
1045 }
1046
1047 /* If we've reached our jump label, haven't failed, and all
1048 the changes above are valid, we can delete this jump
1049 insn. Also remove a BARRIER after any jump that used
1050 to be unconditional and remove any REG_EQUAL or REG_EQUIV
1051 that might have previously been present on insns we
1052 made conditional. */
1053 if (temp1 == JUMP_LABEL (insn) && ! failed
1054 && apply_change_group ())
1055 {
1056 for (temp1 = NEXT_INSN (insn); temp1 != JUMP_LABEL (insn);
1057 temp1 = NEXT_INSN (temp1))
1058 if (GET_RTX_CLASS (GET_CODE (temp1)) == 'i')
1059 for (temp2 = REG_NOTES (temp1); temp2 != 0;
1060 temp2 = XEXP (temp2, 1))
1061 if (REG_NOTE_KIND (temp2) == REG_EQUAL
1062 || REG_NOTE_KIND (temp2) == REG_EQUIV)
1063 remove_note (temp1, temp2);
1064
1065 if (changed_jump != 0)
1066 {
1067 while (GET_CODE (changed_jump) == INSN_LIST)
1068 {
1069 delete_barrier (NEXT_INSN (XEXP (changed_jump, 0)));
1070 changed_jump = XEXP (changed_jump, 1);
1071 }
1072
1073 delete_barrier (NEXT_INSN (changed_jump));
1074 }
1075
1076 delete_insn (insn);
1077 changed = 1;
1078 continue;
1079 }
1080 else
1081 {
1082 cancel_changes (0);
1083 obfree (storage);
1084 }
1085 }
1086 #endif
1087 /* If branches are expensive, convert
1088 if (foo) bar++; to bar += (foo != 0);
1089 and similarly for "bar--;"
1090
1091 INSN is the conditional branch around the arithmetic. We set:
1092
1093 TEMP is the arithmetic insn.
1094 TEMP1 is the SET doing the arithmetic.
1095 TEMP2 is the operand being incremented or decremented.
1096 TEMP3 to the condition being tested.
1097 TEMP4 to the earliest insn used to find the condition. */
1098
1099 if ((BRANCH_COST >= 2
1100 #ifdef HAVE_incscc
1101 || HAVE_incscc
1102 #endif
1103 #ifdef HAVE_decscc
1104 || HAVE_decscc
1105 #endif
1106 )
1107 && ! reload_completed
1108 && this_is_condjump && ! this_is_simplejump
1109 && (temp = next_nonnote_insn (insn)) != 0
1110 && (temp1 = single_set (temp)) != 0
1111 && (temp2 = SET_DEST (temp1),
1112 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1113 && GET_CODE (SET_SRC (temp1)) == PLUS
1114 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1115 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1116 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1117 && ! side_effects_p (temp2)
1118 && ! may_trap_p (temp2)
1119 /* INSN must either branch to the insn after TEMP or the insn
1120 after TEMP must branch to the same place as INSN. */
1121 && (reallabelprev == temp
1122 || ((temp3 = next_active_insn (temp)) != 0
1123 && simplejump_p (temp3)
1124 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1125 && (temp3 = get_condition (insn, &temp4)) != 0
1126 /* We must be comparing objects whose modes imply the size.
1127 We could handle BLKmode if (1) emit_store_flag could
1128 and (2) we could find the size reliably. */
1129 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1130 && can_reverse_comparison_p (temp3, insn))
1131 {
1132 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1133 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1134
1135 start_sequence ();
1136
1137 /* It must be the case that TEMP2 is not modified in the range
1138 [TEMP4, INSN). The one exception we make is if the insn
1139 before INSN sets TEMP2 to something which is also unchanged
1140 in that range. In that case, we can move the initialization
1141 into our sequence. */
1142
1143 if ((temp5 = prev_active_insn (insn)) != 0
1144 && no_labels_between_p (temp5, insn)
1145 && GET_CODE (temp5) == INSN
1146 && (temp6 = single_set (temp5)) != 0
1147 && rtx_equal_p (temp2, SET_DEST (temp6))
1148 && (CONSTANT_P (SET_SRC (temp6))
1149 || GET_CODE (SET_SRC (temp6)) == REG
1150 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1151 {
1152 emit_insn (PATTERN (temp5));
1153 init_insn = temp5;
1154 init = SET_SRC (temp6);
1155 }
1156
1157 if (CONSTANT_P (init)
1158 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1159 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1160 XEXP (temp3, 0), XEXP (temp3, 1),
1161 VOIDmode,
1162 (code == LTU || code == LEU
1163 || code == GTU || code == GEU), 1);
1164
1165 /* If we can do the store-flag, do the addition or
1166 subtraction. */
1167
1168 if (target)
1169 target = expand_binop (GET_MODE (temp2),
1170 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1171 ? add_optab : sub_optab),
1172 temp2, target, temp2, 0, OPTAB_WIDEN);
1173
1174 if (target != 0)
1175 {
1176 /* Put the result back in temp2 in case it isn't already.
1177 Then replace the jump, possible a CC0-setting insn in
1178 front of the jump, and TEMP, with the sequence we have
1179 made. */
1180
1181 if (target != temp2)
1182 emit_move_insn (temp2, target);
1183
1184 seq = get_insns ();
1185 end_sequence ();
1186
1187 emit_insns_before (seq, temp4);
1188 delete_insn (temp);
1189
1190 if (init_insn)
1191 delete_insn (init_insn);
1192
1193 next = NEXT_INSN (insn);
1194 #ifdef HAVE_cc0
1195 delete_insn (prev_nonnote_insn (insn));
1196 #endif
1197 delete_insn (insn);
1198
1199 if (after_regscan)
1200 {
1201 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1202 old_max_reg = max_reg_num ();
1203 }
1204
1205 changed = 1;
1206 continue;
1207 }
1208 else
1209 end_sequence ();
1210 }
1211
1212 /* Try to use a conditional move (if the target has them), or a
1213 store-flag insn. If the target has conditional arithmetic as
1214 well as conditional move, the above code will have done something.
1215 Note that we prefer the above code since it is more general: the
1216 code below can make changes that require work to undo.
1217
1218 The general case here is:
1219
1220 1) x = a; if (...) x = b; and
1221 2) if (...) x = b;
1222
1223 If the jump would be faster, the machine should not have defined
1224 the movcc or scc insns!. These cases are often made by the
1225 previous optimization.
1226
1227 The second case is treated as x = x; if (...) x = b;.
1228
1229 INSN here is the jump around the store. We set:
1230
1231 TEMP to the "x op= b;" insn.
1232 TEMP1 to X.
1233 TEMP2 to B.
1234 TEMP3 to A (X in the second case).
1235 TEMP4 to the condition being tested.
1236 TEMP5 to the earliest insn used to find the condition.
1237 TEMP6 to the SET of TEMP. */
1238
1239 if (/* We can't do this after reload has completed. */
1240 ! reload_completed
1241 #ifdef HAVE_conditional_arithmetic
1242 /* Defer this until after CSE so the above code gets the
1243 first crack at it. */
1244 && cse_not_expected
1245 #endif
1246 && this_is_condjump && ! this_is_simplejump
1247 /* Set TEMP to the "x = b;" insn. */
1248 && (temp = next_nonnote_insn (insn)) != 0
1249 && GET_CODE (temp) == INSN
1250 && (temp6 = single_set (temp)) != NULL_RTX
1251 && GET_CODE (temp1 = SET_DEST (temp6)) == REG
1252 && (! SMALL_REGISTER_CLASSES
1253 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
1254 && ! side_effects_p (temp2 = SET_SRC (temp6))
1255 && ! may_trap_p (temp2)
1256 /* Allow either form, but prefer the former if both apply.
1257 There is no point in using the old value of TEMP1 if
1258 it is a register, since cse will alias them. It can
1259 lose if the old value were a hard register since CSE
1260 won't replace hard registers. Avoid using TEMP3 if
1261 small register classes and it is a hard register. */
1262 && (((temp3 = reg_set_last (temp1, insn)) != 0
1263 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
1264 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
1265 /* Make the latter case look like x = x; if (...) x = b; */
1266 || (temp3 = temp1, 1))
1267 /* INSN must either branch to the insn after TEMP or the insn
1268 after TEMP must branch to the same place as INSN. */
1269 && (reallabelprev == temp
1270 || ((temp4 = next_active_insn (temp)) != 0
1271 && simplejump_p (temp4)
1272 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1273 && (temp4 = get_condition (insn, &temp5)) != 0
1274 /* We must be comparing objects whose modes imply the size.
1275 We could handle BLKmode if (1) emit_store_flag could
1276 and (2) we could find the size reliably. */
1277 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1278 /* Even if branches are cheap, the store_flag optimization
1279 can win when the operation to be performed can be
1280 expressed directly. */
1281 #ifdef HAVE_cc0
1282 /* If the previous insn sets CC0 and something else, we can't
1283 do this since we are going to delete that insn. */
1284
1285 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1286 && GET_CODE (temp6) == INSN
1287 && (sets_cc0_p (PATTERN (temp6)) == -1
1288 || (sets_cc0_p (PATTERN (temp6)) == 1
1289 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1290 #endif
1291 )
1292 {
1293 #ifdef HAVE_conditional_move
1294 /* First try a conditional move. */
1295 {
1296 enum rtx_code code = GET_CODE (temp4);
1297 rtx var = temp1;
1298 rtx cond0, cond1, aval, bval;
1299 rtx target, new_insn;
1300
1301 /* Copy the compared variables into cond0 and cond1, so that
1302 any side effects performed in or after the old comparison,
1303 will not affect our compare which will come later. */
1304 /* ??? Is it possible to just use the comparison in the jump
1305 insn? After all, we're going to delete it. We'd have
1306 to modify emit_conditional_move to take a comparison rtx
1307 instead or write a new function. */
1308 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1309 /* We want the target to be able to simplify comparisons with
1310 zero (and maybe other constants as well), so don't create
1311 pseudos for them. There's no need to either. */
1312 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1313 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1314 cond1 = XEXP (temp4, 1);
1315 else
1316 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1317
1318 /* Careful about copying these values -- an IOR or what may
1319 need to do other things, like clobber flags. */
1320 /* ??? Assume for the moment that AVAL is ok. */
1321 aval = temp3;
1322
1323 start_sequence ();
1324
1325 /* We're dealing with a single_set insn with no side effects
1326 on SET_SRC. We do need to be reasonably certain that if
1327 we need to force BVAL into a register that we won't
1328 clobber the flags -- general_operand should suffice. */
1329 if (general_operand (temp2, GET_MODE (var)))
1330 bval = temp2;
1331 else
1332 {
1333 bval = gen_reg_rtx (GET_MODE (var));
1334 new_insn = copy_rtx (temp);
1335 temp6 = single_set (new_insn);
1336 SET_DEST (temp6) = bval;
1337 emit_insn (PATTERN (new_insn));
1338 }
1339
1340 target = emit_conditional_move (var, code,
1341 cond0, cond1, VOIDmode,
1342 aval, bval, GET_MODE (var),
1343 (code == LTU || code == GEU
1344 || code == LEU || code == GTU));
1345
1346 if (target)
1347 {
1348 rtx seq1, seq2, last;
1349 int copy_ok;
1350
1351 /* Save the conditional move sequence but don't emit it
1352 yet. On some machines, like the alpha, it is possible
1353 that temp5 == insn, so next generate the sequence that
1354 saves the compared values and then emit both
1355 sequences ensuring seq1 occurs before seq2. */
1356 seq2 = get_insns ();
1357 end_sequence ();
1358
1359 /* "Now that we can't fail..." Famous last words.
1360 Generate the copy insns that preserve the compared
1361 values. */
1362 start_sequence ();
1363 emit_move_insn (cond0, XEXP (temp4, 0));
1364 if (cond1 != XEXP (temp4, 1))
1365 emit_move_insn (cond1, XEXP (temp4, 1));
1366 seq1 = get_insns ();
1367 end_sequence ();
1368
1369 /* Validate the sequence -- this may be some weird
1370 bit-extract-and-test instruction for which there
1371 exists no complimentary bit-extract insn. */
1372 copy_ok = 1;
1373 for (last = seq1; last ; last = NEXT_INSN (last))
1374 if (recog_memoized (last) < 0)
1375 {
1376 copy_ok = 0;
1377 break;
1378 }
1379
1380 if (copy_ok)
1381 {
1382 emit_insns_before (seq1, temp5);
1383
1384 /* Insert conditional move after insn, to be sure
1385 that the jump and a possible compare won't be
1386 separated. */
1387 last = emit_insns_after (seq2, insn);
1388
1389 /* ??? We can also delete the insn that sets X to A.
1390 Flow will do it too though. */
1391 delete_insn (temp);
1392 next = NEXT_INSN (insn);
1393 delete_jump (insn);
1394
1395 if (after_regscan)
1396 {
1397 reg_scan_update (seq1, NEXT_INSN (last),
1398 old_max_reg);
1399 old_max_reg = max_reg_num ();
1400 }
1401
1402 changed = 1;
1403 continue;
1404 }
1405 }
1406 else
1407 end_sequence ();
1408 }
1409 #endif
1410
1411 /* That didn't work, try a store-flag insn.
1412
1413 We further divide the cases into:
1414
1415 1) x = a; if (...) x = b; and either A or B is zero,
1416 2) if (...) x = 0; and jumps are expensive,
1417 3) x = a; if (...) x = b; and A and B are constants where all
1418 the set bits in A are also set in B and jumps are expensive,
1419 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1420 more expensive, and
1421 5) if (...) x = b; if jumps are even more expensive. */
1422
1423 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1424 /* We will be passing this as operand into expand_and. No
1425 good if it's not valid as an operand. */
1426 && general_operand (temp2, GET_MODE (temp2))
1427 && ((GET_CODE (temp3) == CONST_INT)
1428 /* Make the latter case look like
1429 x = x; if (...) x = 0; */
1430 || (temp3 = temp1,
1431 ((BRANCH_COST >= 2
1432 && temp2 == const0_rtx)
1433 || BRANCH_COST >= 3)))
1434 /* If B is zero, OK; if A is zero, can only do (1) if we
1435 can reverse the condition. See if (3) applies possibly
1436 by reversing the condition. Prefer reversing to (4) when
1437 branches are very expensive. */
1438 && (((BRANCH_COST >= 2
1439 || STORE_FLAG_VALUE == -1
1440 || (STORE_FLAG_VALUE == 1
1441 /* Check that the mask is a power of two,
1442 so that it can probably be generated
1443 with a shift. */
1444 && GET_CODE (temp3) == CONST_INT
1445 && exact_log2 (INTVAL (temp3)) >= 0))
1446 && (reversep = 0, temp2 == const0_rtx))
1447 || ((BRANCH_COST >= 2
1448 || STORE_FLAG_VALUE == -1
1449 || (STORE_FLAG_VALUE == 1
1450 && GET_CODE (temp2) == CONST_INT
1451 && exact_log2 (INTVAL (temp2)) >= 0))
1452 && temp3 == const0_rtx
1453 && (reversep = can_reverse_comparison_p (temp4, insn)))
1454 || (BRANCH_COST >= 2
1455 && GET_CODE (temp2) == CONST_INT
1456 && GET_CODE (temp3) == CONST_INT
1457 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1458 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1459 && (reversep = can_reverse_comparison_p (temp4,
1460 insn)))))
1461 || BRANCH_COST >= 3)
1462 )
1463 {
1464 enum rtx_code code = GET_CODE (temp4);
1465 rtx uval, cval, var = temp1;
1466 int normalizep;
1467 rtx target;
1468
1469 /* If necessary, reverse the condition. */
1470 if (reversep)
1471 code = reverse_condition (code), uval = temp2, cval = temp3;
1472 else
1473 uval = temp3, cval = temp2;
1474
1475 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1476 is the constant 1, it is best to just compute the result
1477 directly. If UVAL is constant and STORE_FLAG_VALUE
1478 includes all of its bits, it is best to compute the flag
1479 value unnormalized and `and' it with UVAL. Otherwise,
1480 normalize to -1 and `and' with UVAL. */
1481 normalizep = (cval != const0_rtx ? -1
1482 : (uval == const1_rtx ? 1
1483 : (GET_CODE (uval) == CONST_INT
1484 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1485 ? 0 : -1));
1486
1487 /* We will be putting the store-flag insn immediately in
1488 front of the comparison that was originally being done,
1489 so we know all the variables in TEMP4 will be valid.
1490 However, this might be in front of the assignment of
1491 A to VAR. If it is, it would clobber the store-flag
1492 we will be emitting.
1493
1494 Therefore, emit into a temporary which will be copied to
1495 VAR immediately after TEMP. */
1496
1497 start_sequence ();
1498 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1499 XEXP (temp4, 0), XEXP (temp4, 1),
1500 VOIDmode,
1501 (code == LTU || code == LEU
1502 || code == GEU || code == GTU),
1503 normalizep);
1504 if (target)
1505 {
1506 rtx seq;
1507 rtx before = insn;
1508
1509 seq = get_insns ();
1510 end_sequence ();
1511
1512 /* Put the store-flag insns in front of the first insn
1513 used to compute the condition to ensure that we
1514 use the same values of them as the current
1515 comparison. However, the remainder of the insns we
1516 generate will be placed directly in front of the
1517 jump insn, in case any of the pseudos we use
1518 are modified earlier. */
1519
1520 emit_insns_before (seq, temp5);
1521
1522 start_sequence ();
1523
1524 /* Both CVAL and UVAL are non-zero. */
1525 if (cval != const0_rtx && uval != const0_rtx)
1526 {
1527 rtx tem1, tem2;
1528
1529 tem1 = expand_and (uval, target, NULL_RTX);
1530 if (GET_CODE (cval) == CONST_INT
1531 && GET_CODE (uval) == CONST_INT
1532 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1533 tem2 = cval;
1534 else
1535 {
1536 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1537 target, NULL_RTX, 0);
1538 tem2 = expand_and (cval, tem2,
1539 (GET_CODE (tem2) == REG
1540 ? tem2 : 0));
1541 }
1542
1543 /* If we usually make new pseudos, do so here. This
1544 turns out to help machines that have conditional
1545 move insns. */
1546 /* ??? Conditional moves have already been handled.
1547 This may be obsolete. */
1548
1549 if (flag_expensive_optimizations)
1550 target = 0;
1551
1552 target = expand_binop (GET_MODE (var), ior_optab,
1553 tem1, tem2, target,
1554 1, OPTAB_WIDEN);
1555 }
1556 else if (normalizep != 1)
1557 {
1558 /* We know that either CVAL or UVAL is zero. If
1559 UVAL is zero, negate TARGET and `and' with CVAL.
1560 Otherwise, `and' with UVAL. */
1561 if (uval == const0_rtx)
1562 {
1563 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1564 target, NULL_RTX, 0);
1565 uval = cval;
1566 }
1567
1568 target = expand_and (uval, target,
1569 (GET_CODE (target) == REG
1570 && ! preserve_subexpressions_p ()
1571 ? target : NULL_RTX));
1572 }
1573
1574 emit_move_insn (var, target);
1575 seq = get_insns ();
1576 end_sequence ();
1577 #ifdef HAVE_cc0
1578 /* If INSN uses CC0, we must not separate it from the
1579 insn that sets cc0. */
1580 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1581 before = prev_nonnote_insn (before);
1582 #endif
1583 emit_insns_before (seq, before);
1584
1585 delete_insn (temp);
1586 next = NEXT_INSN (insn);
1587 delete_jump (insn);
1588
1589 if (after_regscan)
1590 {
1591 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1592 old_max_reg = max_reg_num ();
1593 }
1594
1595 changed = 1;
1596 continue;
1597 }
1598 else
1599 end_sequence ();
1600 }
1601 }
1602
1603
1604 /* Simplify if (...) x = 1; else {...} if (x) ...
1605 We recognize this case scanning backwards as well.
1606
1607 TEMP is the assignment to x;
1608 TEMP1 is the label at the head of the second if. */
1609 /* ?? This should call get_condition to find the values being
1610 compared, instead of looking for a COMPARE insn when HAVE_cc0
1611 is not defined. This would allow it to work on the m88k. */
1612 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1613 is not defined and the condition is tested by a separate compare
1614 insn. This is because the code below assumes that the result
1615 of the compare dies in the following branch.
1616
1617 Not only that, but there might be other insns between the
1618 compare and branch whose results are live. Those insns need
1619 to be executed.
1620
1621 A way to fix this is to move the insns at JUMP_LABEL (insn)
1622 to before INSN. If we are running before flow, they will
1623 be deleted if they aren't needed. But this doesn't work
1624 well after flow.
1625
1626 This is really a special-case of jump threading, anyway. The
1627 right thing to do is to replace this and jump threading with
1628 much simpler code in cse.
1629
1630 This code has been turned off in the non-cc0 case in the
1631 meantime. */
1632
1633 #ifdef HAVE_cc0
1634 else if (this_is_simplejump
1635 /* Safe to skip USE and CLOBBER insns here
1636 since they will not be deleted. */
1637 && (temp = prev_active_insn (insn))
1638 && no_labels_between_p (temp, insn)
1639 && GET_CODE (temp) == INSN
1640 && GET_CODE (PATTERN (temp)) == SET
1641 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1642 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1643 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1644 /* If we find that the next value tested is `x'
1645 (TEMP1 is the insn where this happens), win. */
1646 && GET_CODE (temp1) == INSN
1647 && GET_CODE (PATTERN (temp1)) == SET
1648 #ifdef HAVE_cc0
1649 /* Does temp1 `tst' the value of x? */
1650 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1651 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1652 && (temp1 = next_nonnote_insn (temp1))
1653 #else
1654 /* Does temp1 compare the value of x against zero? */
1655 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1656 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1657 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1658 == SET_DEST (PATTERN (temp)))
1659 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1660 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1661 #endif
1662 && condjump_p (temp1))
1663 {
1664 /* Get the if_then_else from the condjump. */
1665 rtx choice = SET_SRC (PATTERN (temp1));
1666 if (GET_CODE (choice) == IF_THEN_ELSE)
1667 {
1668 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1669 rtx val = SET_SRC (PATTERN (temp));
1670 rtx cond
1671 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1672 val, const0_rtx);
1673 rtx ultimate;
1674
1675 if (cond == const_true_rtx)
1676 ultimate = XEXP (choice, 1);
1677 else if (cond == const0_rtx)
1678 ultimate = XEXP (choice, 2);
1679 else
1680 ultimate = 0;
1681
1682 if (ultimate == pc_rtx)
1683 ultimate = get_label_after (temp1);
1684 else if (ultimate && GET_CODE (ultimate) != RETURN)
1685 ultimate = XEXP (ultimate, 0);
1686
1687 if (ultimate && JUMP_LABEL(insn) != ultimate)
1688 changed |= redirect_jump (insn, ultimate);
1689 }
1690 }
1691 #endif
1692
1693 #if 0
1694 /* @@ This needs a bit of work before it will be right.
1695
1696 Any type of comparison can be accepted for the first and
1697 second compare. When rewriting the first jump, we must
1698 compute the what conditions can reach label3, and use the
1699 appropriate code. We can not simply reverse/swap the code
1700 of the first jump. In some cases, the second jump must be
1701 rewritten also.
1702
1703 For example,
1704 < == converts to > ==
1705 < != converts to == >
1706 etc.
1707
1708 If the code is written to only accept an '==' test for the second
1709 compare, then all that needs to be done is to swap the condition
1710 of the first branch.
1711
1712 It is questionable whether we want this optimization anyways,
1713 since if the user wrote code like this because he/she knew that
1714 the jump to label1 is taken most of the time, then rewriting
1715 this gives slower code. */
1716 /* @@ This should call get_condition to find the values being
1717 compared, instead of looking for a COMPARE insn when HAVE_cc0
1718 is not defined. This would allow it to work on the m88k. */
1719 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1720 is not defined and the condition is tested by a separate compare
1721 insn. This is because the code below assumes that the result
1722 of the compare dies in the following branch. */
1723
1724 /* Simplify test a ~= b
1725 condjump label1;
1726 test a == b
1727 condjump label2;
1728 jump label3;
1729 label1:
1730
1731 rewriting as
1732 test a ~~= b
1733 condjump label3
1734 test a == b
1735 condjump label2
1736 label1:
1737
1738 where ~= is an inequality, e.g. >, and ~~= is the swapped
1739 inequality, e.g. <.
1740
1741 We recognize this case scanning backwards.
1742
1743 TEMP is the conditional jump to `label2';
1744 TEMP1 is the test for `a == b';
1745 TEMP2 is the conditional jump to `label1';
1746 TEMP3 is the test for `a ~= b'. */
1747 else if (this_is_simplejump
1748 && (temp = prev_active_insn (insn))
1749 && no_labels_between_p (temp, insn)
1750 && condjump_p (temp)
1751 && (temp1 = prev_active_insn (temp))
1752 && no_labels_between_p (temp1, temp)
1753 && GET_CODE (temp1) == INSN
1754 && GET_CODE (PATTERN (temp1)) == SET
1755 #ifdef HAVE_cc0
1756 && sets_cc0_p (PATTERN (temp1)) == 1
1757 #else
1758 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1759 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1760 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1761 #endif
1762 && (temp2 = prev_active_insn (temp1))
1763 && no_labels_between_p (temp2, temp1)
1764 && condjump_p (temp2)
1765 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1766 && (temp3 = prev_active_insn (temp2))
1767 && no_labels_between_p (temp3, temp2)
1768 && GET_CODE (PATTERN (temp3)) == SET
1769 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1770 SET_DEST (PATTERN (temp1)))
1771 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1772 SET_SRC (PATTERN (temp3)))
1773 && ! inequality_comparisons_p (PATTERN (temp))
1774 && inequality_comparisons_p (PATTERN (temp2)))
1775 {
1776 rtx fallthrough_label = JUMP_LABEL (temp2);
1777
1778 ++LABEL_NUSES (fallthrough_label);
1779 if (swap_jump (temp2, JUMP_LABEL (insn)))
1780 {
1781 delete_insn (insn);
1782 changed = 1;
1783 }
1784
1785 if (--LABEL_NUSES (fallthrough_label) == 0)
1786 delete_insn (fallthrough_label);
1787 }
1788 #endif
1789 /* Simplify if (...) {... x = 1;} if (x) ...
1790
1791 We recognize this case backwards.
1792
1793 TEMP is the test of `x';
1794 TEMP1 is the assignment to `x' at the end of the
1795 previous statement. */
1796 /* @@ This should call get_condition to find the values being
1797 compared, instead of looking for a COMPARE insn when HAVE_cc0
1798 is not defined. This would allow it to work on the m88k. */
1799 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1800 is not defined and the condition is tested by a separate compare
1801 insn. This is because the code below assumes that the result
1802 of the compare dies in the following branch. */
1803
1804 /* ??? This has to be turned off. The problem is that the
1805 unconditional jump might indirectly end up branching to the
1806 label between TEMP1 and TEMP. We can't detect this, in general,
1807 since it may become a jump to there after further optimizations.
1808 If that jump is done, it will be deleted, so we will retry
1809 this optimization in the next pass, thus an infinite loop.
1810
1811 The present code prevents this by putting the jump after the
1812 label, but this is not logically correct. */
1813 #if 0
1814 else if (this_is_condjump
1815 /* Safe to skip USE and CLOBBER insns here
1816 since they will not be deleted. */
1817 && (temp = prev_active_insn (insn))
1818 && no_labels_between_p (temp, insn)
1819 && GET_CODE (temp) == INSN
1820 && GET_CODE (PATTERN (temp)) == SET
1821 #ifdef HAVE_cc0
1822 && sets_cc0_p (PATTERN (temp)) == 1
1823 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1824 #else
1825 /* Temp must be a compare insn, we can not accept a register
1826 to register move here, since it may not be simply a
1827 tst insn. */
1828 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1829 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1830 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1831 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1832 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1833 #endif
1834 /* May skip USE or CLOBBER insns here
1835 for checking for opportunity, since we
1836 take care of them later. */
1837 && (temp1 = prev_active_insn (temp))
1838 && GET_CODE (temp1) == INSN
1839 && GET_CODE (PATTERN (temp1)) == SET
1840 #ifdef HAVE_cc0
1841 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1842 #else
1843 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1844 == SET_DEST (PATTERN (temp1)))
1845 #endif
1846 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1847 /* If this isn't true, cse will do the job. */
1848 && ! no_labels_between_p (temp1, temp))
1849 {
1850 /* Get the if_then_else from the condjump. */
1851 rtx choice = SET_SRC (PATTERN (insn));
1852 if (GET_CODE (choice) == IF_THEN_ELSE
1853 && (GET_CODE (XEXP (choice, 0)) == EQ
1854 || GET_CODE (XEXP (choice, 0)) == NE))
1855 {
1856 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1857 rtx last_insn;
1858 rtx ultimate;
1859 rtx p;
1860
1861 /* Get the place that condjump will jump to
1862 if it is reached from here. */
1863 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1864 == want_nonzero)
1865 ultimate = XEXP (choice, 1);
1866 else
1867 ultimate = XEXP (choice, 2);
1868 /* Get it as a CODE_LABEL. */
1869 if (ultimate == pc_rtx)
1870 ultimate = get_label_after (insn);
1871 else
1872 /* Get the label out of the LABEL_REF. */
1873 ultimate = XEXP (ultimate, 0);
1874
1875 /* Insert the jump immediately before TEMP, specifically
1876 after the label that is between TEMP1 and TEMP. */
1877 last_insn = PREV_INSN (temp);
1878
1879 /* If we would be branching to the next insn, the jump
1880 would immediately be deleted and the re-inserted in
1881 a subsequent pass over the code. So don't do anything
1882 in that case. */
1883 if (next_active_insn (last_insn)
1884 != next_active_insn (ultimate))
1885 {
1886 emit_barrier_after (last_insn);
1887 p = emit_jump_insn_after (gen_jump (ultimate),
1888 last_insn);
1889 JUMP_LABEL (p) = ultimate;
1890 ++LABEL_NUSES (ultimate);
1891 if (INSN_UID (ultimate) < max_jump_chain
1892 && INSN_CODE (p) < max_jump_chain)
1893 {
1894 jump_chain[INSN_UID (p)]
1895 = jump_chain[INSN_UID (ultimate)];
1896 jump_chain[INSN_UID (ultimate)] = p;
1897 }
1898 changed = 1;
1899 continue;
1900 }
1901 }
1902 }
1903 #endif
1904 #ifdef HAVE_trap
1905 /* Detect a conditional jump jumping over an unconditional trap. */
1906 else if (HAVE_trap
1907 && this_is_condjump && ! this_is_simplejump
1908 && reallabelprev != 0
1909 && GET_CODE (reallabelprev) == INSN
1910 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1911 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1912 && prev_active_insn (reallabelprev) == insn
1913 && no_labels_between_p (insn, reallabelprev)
1914 && (temp2 = get_condition (insn, &temp4))
1915 && can_reverse_comparison_p (temp2, insn))
1916 {
1917 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1918 XEXP (temp2, 0), XEXP (temp2, 1),
1919 TRAP_CODE (PATTERN (reallabelprev)));
1920
1921 if (new)
1922 {
1923 emit_insn_before (new, temp4);
1924 delete_insn (reallabelprev);
1925 delete_jump (insn);
1926 changed = 1;
1927 continue;
1928 }
1929 }
1930 /* Detect a jump jumping to an unconditional trap. */
1931 else if (HAVE_trap && this_is_condjump
1932 && (temp = next_active_insn (JUMP_LABEL (insn)))
1933 && GET_CODE (temp) == INSN
1934 && GET_CODE (PATTERN (temp)) == TRAP_IF
1935 && (this_is_simplejump
1936 || (temp2 = get_condition (insn, &temp4))))
1937 {
1938 rtx tc = TRAP_CONDITION (PATTERN (temp));
1939
1940 if (tc == const_true_rtx
1941 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1942 {
1943 rtx new;
1944 /* Replace an unconditional jump to a trap with a trap. */
1945 if (this_is_simplejump)
1946 {
1947 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1948 delete_jump (insn);
1949 changed = 1;
1950 continue;
1951 }
1952 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1953 XEXP (temp2, 1),
1954 TRAP_CODE (PATTERN (temp)));
1955 if (new)
1956 {
1957 emit_insn_before (new, temp4);
1958 delete_jump (insn);
1959 changed = 1;
1960 continue;
1961 }
1962 }
1963 /* If the trap condition and jump condition are mutually
1964 exclusive, redirect the jump to the following insn. */
1965 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1966 && ! this_is_simplejump
1967 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1968 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1969 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1970 && redirect_jump (insn, get_label_after (temp)))
1971 {
1972 changed = 1;
1973 continue;
1974 }
1975 }
1976 #endif
1977 else
1978 {
1979 /* Now that the jump has been tensioned,
1980 try cross jumping: check for identical code
1981 before the jump and before its target label. */
1982
1983 /* First, cross jumping of conditional jumps: */
1984
1985 if (cross_jump && condjump_p (insn))
1986 {
1987 rtx newjpos, newlpos;
1988 rtx x = prev_real_insn (JUMP_LABEL (insn));
1989
1990 /* A conditional jump may be crossjumped
1991 only if the place it jumps to follows
1992 an opposing jump that comes back here. */
1993
1994 if (x != 0 && ! jump_back_p (x, insn))
1995 /* We have no opposing jump;
1996 cannot cross jump this insn. */
1997 x = 0;
1998
1999 newjpos = 0;
2000 /* TARGET is nonzero if it is ok to cross jump
2001 to code before TARGET. If so, see if matches. */
2002 if (x != 0)
2003 find_cross_jump (insn, x, 2,
2004 &newjpos, &newlpos);
2005
2006 if (newjpos != 0)
2007 {
2008 do_cross_jump (insn, newjpos, newlpos);
2009 /* Make the old conditional jump
2010 into an unconditional one. */
2011 SET_SRC (PATTERN (insn))
2012 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
2013 INSN_CODE (insn) = -1;
2014 emit_barrier_after (insn);
2015 /* Add to jump_chain unless this is a new label
2016 whose UID is too large. */
2017 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2018 {
2019 jump_chain[INSN_UID (insn)]
2020 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2021 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2022 }
2023 changed = 1;
2024 next = insn;
2025 }
2026 }
2027
2028 /* Cross jumping of unconditional jumps:
2029 a few differences. */
2030
2031 if (cross_jump && simplejump_p (insn))
2032 {
2033 rtx newjpos, newlpos;
2034 rtx target;
2035
2036 newjpos = 0;
2037
2038 /* TARGET is nonzero if it is ok to cross jump
2039 to code before TARGET. If so, see if matches. */
2040 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2041 &newjpos, &newlpos);
2042
2043 /* If cannot cross jump to code before the label,
2044 see if we can cross jump to another jump to
2045 the same label. */
2046 /* Try each other jump to this label. */
2047 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2048 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2049 target != 0 && newjpos == 0;
2050 target = jump_chain[INSN_UID (target)])
2051 if (target != insn
2052 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2053 /* Ignore TARGET if it's deleted. */
2054 && ! INSN_DELETED_P (target))
2055 find_cross_jump (insn, target, 2,
2056 &newjpos, &newlpos);
2057
2058 if (newjpos != 0)
2059 {
2060 do_cross_jump (insn, newjpos, newlpos);
2061 changed = 1;
2062 next = insn;
2063 }
2064 }
2065
2066 /* This code was dead in the previous jump.c! */
2067 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2068 {
2069 /* Return insns all "jump to the same place"
2070 so we can cross-jump between any two of them. */
2071
2072 rtx newjpos, newlpos, target;
2073
2074 newjpos = 0;
2075
2076 /* If cannot cross jump to code before the label,
2077 see if we can cross jump to another jump to
2078 the same label. */
2079 /* Try each other jump to this label. */
2080 for (target = jump_chain[0];
2081 target != 0 && newjpos == 0;
2082 target = jump_chain[INSN_UID (target)])
2083 if (target != insn
2084 && ! INSN_DELETED_P (target)
2085 && GET_CODE (PATTERN (target)) == RETURN)
2086 find_cross_jump (insn, target, 2,
2087 &newjpos, &newlpos);
2088
2089 if (newjpos != 0)
2090 {
2091 do_cross_jump (insn, newjpos, newlpos);
2092 changed = 1;
2093 next = insn;
2094 }
2095 }
2096 }
2097 }
2098
2099 first = 0;
2100 }
2101
2102 /* Delete extraneous line number notes.
2103 Note that two consecutive notes for different lines are not really
2104 extraneous. There should be some indication where that line belonged,
2105 even if it became empty. */
2106
2107 {
2108 rtx last_note = 0;
2109
2110 for (insn = f; insn; insn = NEXT_INSN (insn))
2111 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2112 {
2113 /* Delete this note if it is identical to previous note. */
2114 if (last_note
2115 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2116 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2117 {
2118 delete_insn (insn);
2119 continue;
2120 }
2121
2122 last_note = insn;
2123 }
2124 }
2125
2126 /* CAN_REACH_END is persistent for each function. Once set it should
2127 not be cleared. This is especially true for the case where we
2128 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2129 the front-end before compiling each function. */
2130 if (calculate_can_reach_end (last_insn, optimize != 0))
2131 can_reach_end = 1;
2132
2133 end:
2134 /* Clean up. */
2135 free (jump_chain);
2136 jump_chain = 0;
2137 }
2138 \f
2139 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2140 notes whose labels don't occur in the insn any more. Returns the
2141 largest INSN_UID found. */
2142 static int
2143 init_label_info (f)
2144 rtx f;
2145 {
2146 int largest_uid = 0;
2147 rtx insn;
2148
2149 for (insn = f; insn; insn = NEXT_INSN (insn))
2150 {
2151 if (GET_CODE (insn) == CODE_LABEL)
2152 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2153 else if (GET_CODE (insn) == JUMP_INSN)
2154 JUMP_LABEL (insn) = 0;
2155 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2156 {
2157 rtx note, next;
2158
2159 for (note = REG_NOTES (insn); note; note = next)
2160 {
2161 next = XEXP (note, 1);
2162 if (REG_NOTE_KIND (note) == REG_LABEL
2163 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2164 remove_note (insn, note);
2165 }
2166 }
2167 if (INSN_UID (insn) > largest_uid)
2168 largest_uid = INSN_UID (insn);
2169 }
2170
2171 return largest_uid;
2172 }
2173
2174 /* Delete insns following barriers, up to next label.
2175
2176 Also delete no-op jumps created by gcse. */
2177 static void
2178 delete_barrier_successors (f)
2179 rtx f;
2180 {
2181 rtx insn;
2182
2183 for (insn = f; insn;)
2184 {
2185 if (GET_CODE (insn) == BARRIER)
2186 {
2187 insn = NEXT_INSN (insn);
2188
2189 never_reached_warning (insn);
2190
2191 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2192 {
2193 if (GET_CODE (insn) == NOTE
2194 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2195 insn = NEXT_INSN (insn);
2196 else
2197 insn = delete_insn (insn);
2198 }
2199 /* INSN is now the code_label. */
2200 }
2201 /* Also remove (set (pc) (pc)) insns which can be created by
2202 gcse. We eliminate such insns now to avoid having them
2203 cause problems later. */
2204 else if (GET_CODE (insn) == JUMP_INSN
2205 && GET_CODE (PATTERN (insn)) == SET
2206 && SET_SRC (PATTERN (insn)) == pc_rtx
2207 && SET_DEST (PATTERN (insn)) == pc_rtx)
2208 insn = delete_insn (insn);
2209
2210 else
2211 insn = NEXT_INSN (insn);
2212 }
2213 }
2214
2215 /* Mark the label each jump jumps to.
2216 Combine consecutive labels, and count uses of labels.
2217
2218 For each label, make a chain (using `jump_chain')
2219 of all the *unconditional* jumps that jump to it;
2220 also make a chain of all returns.
2221
2222 CROSS_JUMP indicates whether we are doing cross jumping
2223 and if we are whether we will be paying attention to
2224 death notes or not. */
2225
2226 static void
2227 mark_all_labels (f, cross_jump)
2228 rtx f;
2229 int cross_jump;
2230 {
2231 rtx insn;
2232
2233 for (insn = f; insn; insn = NEXT_INSN (insn))
2234 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2235 {
2236 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
2237 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2238 {
2239 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2240 {
2241 jump_chain[INSN_UID (insn)]
2242 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2243 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2244 }
2245 if (GET_CODE (PATTERN (insn)) == RETURN)
2246 {
2247 jump_chain[INSN_UID (insn)] = jump_chain[0];
2248 jump_chain[0] = insn;
2249 }
2250 }
2251 }
2252 }
2253
2254 /* Delete all labels already not referenced.
2255 Also find and return the last insn. */
2256
2257 static rtx
2258 delete_unreferenced_labels (f)
2259 rtx f;
2260 {
2261 rtx final = NULL_RTX;
2262 rtx insn;
2263
2264 for (insn = f; insn; )
2265 {
2266 if (GET_CODE (insn) == CODE_LABEL
2267 && LABEL_NUSES (insn) == 0
2268 && LABEL_ALTERNATE_NAME (insn) == NULL)
2269 insn = delete_insn (insn);
2270 else
2271 {
2272 final = insn;
2273 insn = NEXT_INSN (insn);
2274 }
2275 }
2276
2277 return final;
2278 }
2279
2280 /* Delete various simple forms of moves which have no necessary
2281 side effect. */
2282
2283 static void
2284 delete_noop_moves (f)
2285 rtx f;
2286 {
2287 rtx insn, next;
2288
2289 for (insn = f; insn; )
2290 {
2291 next = NEXT_INSN (insn);
2292
2293 if (GET_CODE (insn) == INSN)
2294 {
2295 register rtx body = PATTERN (insn);
2296
2297 /* Combine stack_adjusts with following push_insns. */
2298 #ifdef PUSH_ROUNDING
2299 if (GET_CODE (body) == SET
2300 && SET_DEST (body) == stack_pointer_rtx
2301 && GET_CODE (SET_SRC (body)) == PLUS
2302 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2303 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2304 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2305 {
2306 rtx p;
2307 rtx stack_adjust_insn = insn;
2308 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2309 int total_pushed = 0;
2310 int pushes = 0;
2311
2312 /* Find all successive push insns. */
2313 p = insn;
2314 /* Don't convert more than three pushes;
2315 that starts adding too many displaced addresses
2316 and the whole thing starts becoming a losing
2317 proposition. */
2318 while (pushes < 3)
2319 {
2320 rtx pbody, dest;
2321 p = next_nonnote_insn (p);
2322 if (p == 0 || GET_CODE (p) != INSN)
2323 break;
2324 pbody = PATTERN (p);
2325 if (GET_CODE (pbody) != SET)
2326 break;
2327 dest = SET_DEST (pbody);
2328 /* Allow a no-op move between the adjust and the push. */
2329 if (GET_CODE (dest) == REG
2330 && GET_CODE (SET_SRC (pbody)) == REG
2331 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2332 continue;
2333 if (! (GET_CODE (dest) == MEM
2334 && GET_CODE (XEXP (dest, 0)) == POST_INC
2335 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2336 break;
2337 pushes++;
2338 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2339 > stack_adjust_amount)
2340 break;
2341 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2342 }
2343
2344 /* Discard the amount pushed from the stack adjust;
2345 maybe eliminate it entirely. */
2346 if (total_pushed >= stack_adjust_amount)
2347 {
2348 delete_computation (stack_adjust_insn);
2349 total_pushed = stack_adjust_amount;
2350 }
2351 else
2352 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2353 = GEN_INT (stack_adjust_amount - total_pushed);
2354
2355 /* Change the appropriate push insns to ordinary stores. */
2356 p = insn;
2357 while (total_pushed > 0)
2358 {
2359 rtx pbody, dest;
2360 p = next_nonnote_insn (p);
2361 if (GET_CODE (p) != INSN)
2362 break;
2363 pbody = PATTERN (p);
2364 if (GET_CODE (pbody) != SET)
2365 break;
2366 dest = SET_DEST (pbody);
2367 /* Allow a no-op move between the adjust and the push. */
2368 if (GET_CODE (dest) == REG
2369 && GET_CODE (SET_SRC (pbody)) == REG
2370 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2371 continue;
2372 if (! (GET_CODE (dest) == MEM
2373 && GET_CODE (XEXP (dest, 0)) == POST_INC
2374 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2375 break;
2376 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2377 /* If this push doesn't fully fit in the space
2378 of the stack adjust that we deleted,
2379 make another stack adjust here for what we
2380 didn't use up. There should be peepholes
2381 to recognize the resulting sequence of insns. */
2382 if (total_pushed < 0)
2383 {
2384 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2385 GEN_INT (- total_pushed)),
2386 p);
2387 break;
2388 }
2389 XEXP (dest, 0)
2390 = plus_constant (stack_pointer_rtx, total_pushed);
2391 }
2392 }
2393 #endif
2394
2395 /* Detect and delete no-op move instructions
2396 resulting from not allocating a parameter in a register. */
2397
2398 if (GET_CODE (body) == SET
2399 && (SET_DEST (body) == SET_SRC (body)
2400 || (GET_CODE (SET_DEST (body)) == MEM
2401 && GET_CODE (SET_SRC (body)) == MEM
2402 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2403 && ! (GET_CODE (SET_DEST (body)) == MEM
2404 && MEM_VOLATILE_P (SET_DEST (body)))
2405 && ! (GET_CODE (SET_SRC (body)) == MEM
2406 && MEM_VOLATILE_P (SET_SRC (body))))
2407 delete_computation (insn);
2408
2409 /* Detect and ignore no-op move instructions
2410 resulting from smart or fortuitous register allocation. */
2411
2412 else if (GET_CODE (body) == SET)
2413 {
2414 int sreg = true_regnum (SET_SRC (body));
2415 int dreg = true_regnum (SET_DEST (body));
2416
2417 if (sreg == dreg && sreg >= 0)
2418 delete_insn (insn);
2419 else if (sreg >= 0 && dreg >= 0)
2420 {
2421 rtx trial;
2422 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2423 sreg, NULL_PTR, dreg,
2424 GET_MODE (SET_SRC (body)));
2425
2426 if (tem != 0
2427 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2428 {
2429 /* DREG may have been the target of a REG_DEAD note in
2430 the insn which makes INSN redundant. If so, reorg
2431 would still think it is dead. So search for such a
2432 note and delete it if we find it. */
2433 if (! find_regno_note (insn, REG_UNUSED, dreg))
2434 for (trial = prev_nonnote_insn (insn);
2435 trial && GET_CODE (trial) != CODE_LABEL;
2436 trial = prev_nonnote_insn (trial))
2437 if (find_regno_note (trial, REG_DEAD, dreg))
2438 {
2439 remove_death (dreg, trial);
2440 break;
2441 }
2442
2443 /* Deleting insn could lose a death-note for SREG. */
2444 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2445 {
2446 /* Change this into a USE so that we won't emit
2447 code for it, but still can keep the note. */
2448 PATTERN (insn)
2449 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2450 INSN_CODE (insn) = -1;
2451 /* Remove all reg notes but the REG_DEAD one. */
2452 REG_NOTES (insn) = trial;
2453 XEXP (trial, 1) = NULL_RTX;
2454 }
2455 else
2456 delete_insn (insn);
2457 }
2458 }
2459 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2460 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2461 NULL_PTR, 0,
2462 GET_MODE (SET_DEST (body))))
2463 {
2464 /* This handles the case where we have two consecutive
2465 assignments of the same constant to pseudos that didn't
2466 get a hard reg. Each SET from the constant will be
2467 converted into a SET of the spill register and an
2468 output reload will be made following it. This produces
2469 two loads of the same constant into the same spill
2470 register. */
2471
2472 rtx in_insn = insn;
2473
2474 /* Look back for a death note for the first reg.
2475 If there is one, it is no longer accurate. */
2476 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2477 {
2478 if ((GET_CODE (in_insn) == INSN
2479 || GET_CODE (in_insn) == JUMP_INSN)
2480 && find_regno_note (in_insn, REG_DEAD, dreg))
2481 {
2482 remove_death (dreg, in_insn);
2483 break;
2484 }
2485 in_insn = PREV_INSN (in_insn);
2486 }
2487
2488 /* Delete the second load of the value. */
2489 delete_insn (insn);
2490 }
2491 }
2492 else if (GET_CODE (body) == PARALLEL)
2493 {
2494 /* If each part is a set between two identical registers or
2495 a USE or CLOBBER, delete the insn. */
2496 int i, sreg, dreg;
2497 rtx tem;
2498
2499 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2500 {
2501 tem = XVECEXP (body, 0, i);
2502 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2503 continue;
2504
2505 if (GET_CODE (tem) != SET
2506 || (sreg = true_regnum (SET_SRC (tem))) < 0
2507 || (dreg = true_regnum (SET_DEST (tem))) < 0
2508 || dreg != sreg)
2509 break;
2510 }
2511
2512 if (i < 0)
2513 delete_insn (insn);
2514 }
2515 /* Also delete insns to store bit fields if they are no-ops. */
2516 /* Not worth the hair to detect this in the big-endian case. */
2517 else if (! BYTES_BIG_ENDIAN
2518 && GET_CODE (body) == SET
2519 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2520 && XEXP (SET_DEST (body), 2) == const0_rtx
2521 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2522 && ! (GET_CODE (SET_SRC (body)) == MEM
2523 && MEM_VOLATILE_P (SET_SRC (body))))
2524 delete_insn (insn);
2525 }
2526 insn = next;
2527 }
2528 }
2529
2530 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2531 If so indicate that this function can drop off the end by returning
2532 1, else return 0.
2533
2534 CHECK_DELETED indicates whether we must check if the note being
2535 searched for has the deleted flag set.
2536
2537 DELETE_FINAL_NOTE indicates whether we should delete the note
2538 if we find it. */
2539
2540 static int
2541 calculate_can_reach_end (last, delete_final_note)
2542 rtx last;
2543 int delete_final_note;
2544 {
2545 rtx insn = last;
2546 int n_labels = 1;
2547
2548 while (insn != NULL_RTX)
2549 {
2550 int ok = 0;
2551
2552 /* One label can follow the end-note: the return label. */
2553 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2554 ok = 1;
2555 /* Ordinary insns can follow it if returning a structure. */
2556 else if (GET_CODE (insn) == INSN)
2557 ok = 1;
2558 /* If machine uses explicit RETURN insns, no epilogue,
2559 then one of them follows the note. */
2560 else if (GET_CODE (insn) == JUMP_INSN
2561 && GET_CODE (PATTERN (insn)) == RETURN)
2562 ok = 1;
2563 /* A barrier can follow the return insn. */
2564 else if (GET_CODE (insn) == BARRIER)
2565 ok = 1;
2566 /* Other kinds of notes can follow also. */
2567 else if (GET_CODE (insn) == NOTE
2568 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2569 ok = 1;
2570
2571 if (ok != 1)
2572 break;
2573
2574 insn = PREV_INSN (insn);
2575 }
2576
2577 /* See if we backed up to the appropriate type of note. */
2578 if (insn != NULL_RTX
2579 && GET_CODE (insn) == NOTE
2580 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2581 {
2582 if (delete_final_note)
2583 delete_insn (insn);
2584 return 1;
2585 }
2586
2587 return 0;
2588 }
2589
2590 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2591 jump. Assume that this unconditional jump is to the exit test code. If
2592 the code is sufficiently simple, make a copy of it before INSN,
2593 followed by a jump to the exit of the loop. Then delete the unconditional
2594 jump after INSN.
2595
2596 Return 1 if we made the change, else 0.
2597
2598 This is only safe immediately after a regscan pass because it uses the
2599 values of regno_first_uid and regno_last_uid. */
2600
2601 static int
2602 duplicate_loop_exit_test (loop_start)
2603 rtx loop_start;
2604 {
2605 rtx insn, set, reg, p, link;
2606 rtx copy = 0, first_copy = 0;
2607 int num_insns = 0;
2608 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2609 rtx lastexit;
2610 int max_reg = max_reg_num ();
2611 rtx *reg_map = 0;
2612
2613 /* Scan the exit code. We do not perform this optimization if any insn:
2614
2615 is a CALL_INSN
2616 is a CODE_LABEL
2617 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2618 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2619 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2620 is not valid.
2621
2622 We also do not do this if we find an insn with ASM_OPERANDS. While
2623 this restriction should not be necessary, copying an insn with
2624 ASM_OPERANDS can confuse asm_noperands in some cases.
2625
2626 Also, don't do this if the exit code is more than 20 insns. */
2627
2628 for (insn = exitcode;
2629 insn
2630 && ! (GET_CODE (insn) == NOTE
2631 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2632 insn = NEXT_INSN (insn))
2633 {
2634 switch (GET_CODE (insn))
2635 {
2636 case CODE_LABEL:
2637 case CALL_INSN:
2638 return 0;
2639 case NOTE:
2640 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2641 a jump immediately after the loop start that branches outside
2642 the loop but within an outer loop, near the exit test.
2643 If we copied this exit test and created a phony
2644 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2645 before the exit test look like these could be safely moved
2646 out of the loop even if they actually may be never executed.
2647 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2648
2649 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2650 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2651 return 0;
2652
2653 if (optimize < 2
2654 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2655 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2656 /* If we were to duplicate this code, we would not move
2657 the BLOCK notes, and so debugging the moved code would
2658 be difficult. Thus, we only move the code with -O2 or
2659 higher. */
2660 return 0;
2661
2662 break;
2663 case JUMP_INSN:
2664 case INSN:
2665 /* The code below would grossly mishandle REG_WAS_0 notes,
2666 so get rid of them here. */
2667 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2668 remove_note (insn, p);
2669 if (++num_insns > 20
2670 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2671 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2672 return 0;
2673 break;
2674 default:
2675 break;
2676 }
2677 }
2678
2679 /* Unless INSN is zero, we can do the optimization. */
2680 if (insn == 0)
2681 return 0;
2682
2683 lastexit = insn;
2684
2685 /* See if any insn sets a register only used in the loop exit code and
2686 not a user variable. If so, replace it with a new register. */
2687 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2688 if (GET_CODE (insn) == INSN
2689 && (set = single_set (insn)) != 0
2690 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2691 || (GET_CODE (reg) == SUBREG
2692 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2693 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2694 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2695 {
2696 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2697 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2698 break;
2699
2700 if (p != lastexit)
2701 {
2702 /* We can do the replacement. Allocate reg_map if this is the
2703 first replacement we found. */
2704 if (reg_map == 0)
2705 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
2706
2707 REG_LOOP_TEST_P (reg) = 1;
2708
2709 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2710 }
2711 }
2712
2713 /* Now copy each insn. */
2714 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2715 {
2716 switch (GET_CODE (insn))
2717 {
2718 case BARRIER:
2719 copy = emit_barrier_before (loop_start);
2720 break;
2721 case NOTE:
2722 /* Only copy line-number notes. */
2723 if (NOTE_LINE_NUMBER (insn) >= 0)
2724 {
2725 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2726 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2727 }
2728 break;
2729
2730 case INSN:
2731 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
2732 if (reg_map)
2733 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2734
2735 mark_jump_label (PATTERN (copy), copy, 0, 0);
2736
2737 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2738 make them. */
2739 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2740 if (REG_NOTE_KIND (link) != REG_LABEL)
2741 REG_NOTES (copy)
2742 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2743 XEXP (link, 0),
2744 REG_NOTES (copy)));
2745 if (reg_map && REG_NOTES (copy))
2746 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2747 break;
2748
2749 case JUMP_INSN:
2750 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)), loop_start);
2751 if (reg_map)
2752 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2753 mark_jump_label (PATTERN (copy), copy, 0, 0);
2754 if (REG_NOTES (insn))
2755 {
2756 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
2757 if (reg_map)
2758 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2759 }
2760
2761 /* If this is a simple jump, add it to the jump chain. */
2762
2763 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2764 && simplejump_p (copy))
2765 {
2766 jump_chain[INSN_UID (copy)]
2767 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2768 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2769 }
2770 break;
2771
2772 default:
2773 abort ();
2774 }
2775
2776 /* Record the first insn we copied. We need it so that we can
2777 scan the copied insns for new pseudo registers. */
2778 if (! first_copy)
2779 first_copy = copy;
2780 }
2781
2782 /* Now clean up by emitting a jump to the end label and deleting the jump
2783 at the start of the loop. */
2784 if (! copy || GET_CODE (copy) != BARRIER)
2785 {
2786 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2787 loop_start);
2788
2789 /* Record the first insn we copied. We need it so that we can
2790 scan the copied insns for new pseudo registers. This may not
2791 be strictly necessary since we should have copied at least one
2792 insn above. But I am going to be safe. */
2793 if (! first_copy)
2794 first_copy = copy;
2795
2796 mark_jump_label (PATTERN (copy), copy, 0, 0);
2797 if (INSN_UID (copy) < max_jump_chain
2798 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2799 {
2800 jump_chain[INSN_UID (copy)]
2801 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2802 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2803 }
2804 emit_barrier_before (loop_start);
2805 }
2806
2807 /* Now scan from the first insn we copied to the last insn we copied
2808 (copy) for new pseudo registers. Do this after the code to jump to
2809 the end label since that might create a new pseudo too. */
2810 reg_scan_update (first_copy, copy, max_reg);
2811
2812 /* Mark the exit code as the virtual top of the converted loop. */
2813 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2814
2815 delete_insn (next_nonnote_insn (loop_start));
2816
2817 /* Clean up. */
2818 if (reg_map)
2819 free (reg_map);
2820
2821 return 1;
2822 }
2823 \f
2824 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2825 loop-end notes between START and END out before START. Assume that
2826 END is not such a note. START may be such a note. Returns the value
2827 of the new starting insn, which may be different if the original start
2828 was such a note. */
2829
2830 rtx
2831 squeeze_notes (start, end)
2832 rtx start, end;
2833 {
2834 rtx insn;
2835 rtx next;
2836
2837 for (insn = start; insn != end; insn = next)
2838 {
2839 next = NEXT_INSN (insn);
2840 if (GET_CODE (insn) == NOTE
2841 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2842 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2843 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2844 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2845 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2846 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2847 {
2848 if (insn == start)
2849 start = next;
2850 else
2851 {
2852 rtx prev = PREV_INSN (insn);
2853 PREV_INSN (insn) = PREV_INSN (start);
2854 NEXT_INSN (insn) = start;
2855 NEXT_INSN (PREV_INSN (insn)) = insn;
2856 PREV_INSN (NEXT_INSN (insn)) = insn;
2857 NEXT_INSN (prev) = next;
2858 PREV_INSN (next) = prev;
2859 }
2860 }
2861 }
2862
2863 return start;
2864 }
2865 \f
2866 /* Compare the instructions before insn E1 with those before E2
2867 to find an opportunity for cross jumping.
2868 (This means detecting identical sequences of insns followed by
2869 jumps to the same place, or followed by a label and a jump
2870 to that label, and replacing one with a jump to the other.)
2871
2872 Assume E1 is a jump that jumps to label E2
2873 (that is not always true but it might as well be).
2874 Find the longest possible equivalent sequences
2875 and store the first insns of those sequences into *F1 and *F2.
2876 Store zero there if no equivalent preceding instructions are found.
2877
2878 We give up if we find a label in stream 1.
2879 Actually we could transfer that label into stream 2. */
2880
2881 static void
2882 find_cross_jump (e1, e2, minimum, f1, f2)
2883 rtx e1, e2;
2884 int minimum;
2885 rtx *f1, *f2;
2886 {
2887 register rtx i1 = e1, i2 = e2;
2888 register rtx p1, p2;
2889 int lose = 0;
2890
2891 rtx last1 = 0, last2 = 0;
2892 rtx afterlast1 = 0, afterlast2 = 0;
2893
2894 *f1 = 0;
2895 *f2 = 0;
2896
2897 while (1)
2898 {
2899 i1 = prev_nonnote_insn (i1);
2900
2901 i2 = PREV_INSN (i2);
2902 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2903 i2 = PREV_INSN (i2);
2904
2905 if (i1 == 0)
2906 break;
2907
2908 /* Don't allow the range of insns preceding E1 or E2
2909 to include the other (E2 or E1). */
2910 if (i2 == e1 || i1 == e2)
2911 break;
2912
2913 /* If we will get to this code by jumping, those jumps will be
2914 tensioned to go directly to the new label (before I2),
2915 so this cross-jumping won't cost extra. So reduce the minimum. */
2916 if (GET_CODE (i1) == CODE_LABEL)
2917 {
2918 --minimum;
2919 break;
2920 }
2921
2922 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2923 break;
2924
2925 /* Avoid moving insns across EH regions if either of the insns
2926 can throw. */
2927 if (flag_exceptions
2928 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
2929 && !in_same_eh_region (i1, i2))
2930 break;
2931
2932 p1 = PATTERN (i1);
2933 p2 = PATTERN (i2);
2934
2935 /* If this is a CALL_INSN, compare register usage information.
2936 If we don't check this on stack register machines, the two
2937 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2938 numbers of stack registers in the same basic block.
2939 If we don't check this on machines with delay slots, a delay slot may
2940 be filled that clobbers a parameter expected by the subroutine.
2941
2942 ??? We take the simple route for now and assume that if they're
2943 equal, they were constructed identically. */
2944
2945 if (GET_CODE (i1) == CALL_INSN
2946 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2947 CALL_INSN_FUNCTION_USAGE (i2)))
2948 lose = 1;
2949
2950 #ifdef STACK_REGS
2951 /* If cross_jump_death_matters is not 0, the insn's mode
2952 indicates whether or not the insn contains any stack-like
2953 regs. */
2954
2955 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
2956 {
2957 /* If register stack conversion has already been done, then
2958 death notes must also be compared before it is certain that
2959 the two instruction streams match. */
2960
2961 rtx note;
2962 HARD_REG_SET i1_regset, i2_regset;
2963
2964 CLEAR_HARD_REG_SET (i1_regset);
2965 CLEAR_HARD_REG_SET (i2_regset);
2966
2967 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2968 if (REG_NOTE_KIND (note) == REG_DEAD
2969 && STACK_REG_P (XEXP (note, 0)))
2970 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2971
2972 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2973 if (REG_NOTE_KIND (note) == REG_DEAD
2974 && STACK_REG_P (XEXP (note, 0)))
2975 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2976
2977 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2978
2979 lose = 1;
2980
2981 done:
2982 ;
2983 }
2984 #endif
2985
2986 /* Don't allow old-style asm or volatile extended asms to be accepted
2987 for cross jumping purposes. It is conceptually correct to allow
2988 them, since cross-jumping preserves the dynamic instruction order
2989 even though it is changing the static instruction order. However,
2990 if an asm is being used to emit an assembler pseudo-op, such as
2991 the MIPS `.set reorder' pseudo-op, then the static instruction order
2992 matters and it must be preserved. */
2993 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2994 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2995 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2996 lose = 1;
2997
2998 if (lose || GET_CODE (p1) != GET_CODE (p2)
2999 || ! rtx_renumbered_equal_p (p1, p2))
3000 {
3001 /* The following code helps take care of G++ cleanups. */
3002 rtx equiv1;
3003 rtx equiv2;
3004
3005 if (!lose && GET_CODE (p1) == GET_CODE (p2)
3006 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
3007 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
3008 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
3009 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
3010 /* If the equivalences are not to a constant, they may
3011 reference pseudos that no longer exist, so we can't
3012 use them. */
3013 && CONSTANT_P (XEXP (equiv1, 0))
3014 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3015 {
3016 rtx s1 = single_set (i1);
3017 rtx s2 = single_set (i2);
3018 if (s1 != 0 && s2 != 0
3019 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3020 {
3021 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3022 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3023 if (! rtx_renumbered_equal_p (p1, p2))
3024 cancel_changes (0);
3025 else if (apply_change_group ())
3026 goto win;
3027 }
3028 }
3029
3030 /* Insns fail to match; cross jumping is limited to the following
3031 insns. */
3032
3033 #ifdef HAVE_cc0
3034 /* Don't allow the insn after a compare to be shared by
3035 cross-jumping unless the compare is also shared.
3036 Here, if either of these non-matching insns is a compare,
3037 exclude the following insn from possible cross-jumping. */
3038 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3039 last1 = afterlast1, last2 = afterlast2, ++minimum;
3040 #endif
3041
3042 /* If cross-jumping here will feed a jump-around-jump
3043 optimization, this jump won't cost extra, so reduce
3044 the minimum. */
3045 if (GET_CODE (i1) == JUMP_INSN
3046 && JUMP_LABEL (i1)
3047 && prev_real_insn (JUMP_LABEL (i1)) == e1)
3048 --minimum;
3049 break;
3050 }
3051
3052 win:
3053 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3054 {
3055 /* Ok, this insn is potentially includable in a cross-jump here. */
3056 afterlast1 = last1, afterlast2 = last2;
3057 last1 = i1, last2 = i2, --minimum;
3058 }
3059 }
3060
3061 if (minimum <= 0 && last1 != 0 && last1 != e1)
3062 *f1 = last1, *f2 = last2;
3063 }
3064
3065 static void
3066 do_cross_jump (insn, newjpos, newlpos)
3067 rtx insn, newjpos, newlpos;
3068 {
3069 /* Find an existing label at this point
3070 or make a new one if there is none. */
3071 register rtx label = get_label_before (newlpos);
3072
3073 /* Make the same jump insn jump to the new point. */
3074 if (GET_CODE (PATTERN (insn)) == RETURN)
3075 {
3076 /* Remove from jump chain of returns. */
3077 delete_from_jump_chain (insn);
3078 /* Change the insn. */
3079 PATTERN (insn) = gen_jump (label);
3080 INSN_CODE (insn) = -1;
3081 JUMP_LABEL (insn) = label;
3082 LABEL_NUSES (label)++;
3083 /* Add to new the jump chain. */
3084 if (INSN_UID (label) < max_jump_chain
3085 && INSN_UID (insn) < max_jump_chain)
3086 {
3087 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3088 jump_chain[INSN_UID (label)] = insn;
3089 }
3090 }
3091 else
3092 redirect_jump (insn, label);
3093
3094 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3095 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3096 the NEWJPOS stream. */
3097
3098 while (newjpos != insn)
3099 {
3100 rtx lnote;
3101
3102 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3103 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3104 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3105 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3106 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3107 remove_note (newlpos, lnote);
3108
3109 delete_insn (newjpos);
3110 newjpos = next_real_insn (newjpos);
3111 newlpos = next_real_insn (newlpos);
3112 }
3113 }
3114 \f
3115 /* Return the label before INSN, or put a new label there. */
3116
3117 rtx
3118 get_label_before (insn)
3119 rtx insn;
3120 {
3121 rtx label;
3122
3123 /* Find an existing label at this point
3124 or make a new one if there is none. */
3125 label = prev_nonnote_insn (insn);
3126
3127 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3128 {
3129 rtx prev = PREV_INSN (insn);
3130
3131 label = gen_label_rtx ();
3132 emit_label_after (label, prev);
3133 LABEL_NUSES (label) = 0;
3134 }
3135 return label;
3136 }
3137
3138 /* Return the label after INSN, or put a new label there. */
3139
3140 rtx
3141 get_label_after (insn)
3142 rtx insn;
3143 {
3144 rtx label;
3145
3146 /* Find an existing label at this point
3147 or make a new one if there is none. */
3148 label = next_nonnote_insn (insn);
3149
3150 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3151 {
3152 label = gen_label_rtx ();
3153 emit_label_after (label, insn);
3154 LABEL_NUSES (label) = 0;
3155 }
3156 return label;
3157 }
3158 \f
3159 /* Return 1 if INSN is a jump that jumps to right after TARGET
3160 only on the condition that TARGET itself would drop through.
3161 Assumes that TARGET is a conditional jump. */
3162
3163 static int
3164 jump_back_p (insn, target)
3165 rtx insn, target;
3166 {
3167 rtx cinsn, ctarget;
3168 enum rtx_code codei, codet;
3169
3170 if (simplejump_p (insn) || ! condjump_p (insn)
3171 || simplejump_p (target)
3172 || target != prev_real_insn (JUMP_LABEL (insn)))
3173 return 0;
3174
3175 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3176 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3177
3178 codei = GET_CODE (cinsn);
3179 codet = GET_CODE (ctarget);
3180
3181 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3182 {
3183 if (! can_reverse_comparison_p (cinsn, insn))
3184 return 0;
3185 codei = reverse_condition (codei);
3186 }
3187
3188 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3189 {
3190 if (! can_reverse_comparison_p (ctarget, target))
3191 return 0;
3192 codet = reverse_condition (codet);
3193 }
3194
3195 return (codei == codet
3196 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3197 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3198 }
3199 \f
3200 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3201 return non-zero if it is safe to reverse this comparison. It is if our
3202 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3203 this is known to be an integer comparison. */
3204
3205 int
3206 can_reverse_comparison_p (comparison, insn)
3207 rtx comparison;
3208 rtx insn;
3209 {
3210 rtx arg0;
3211
3212 /* If this is not actually a comparison, we can't reverse it. */
3213 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3214 return 0;
3215
3216 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3217 /* If this is an NE comparison, it is safe to reverse it to an EQ
3218 comparison and vice versa, even for floating point. If no operands
3219 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3220 always false and NE is always true, so the reversal is also valid. */
3221 || flag_fast_math
3222 || GET_CODE (comparison) == NE
3223 || GET_CODE (comparison) == EQ)
3224 return 1;
3225
3226 arg0 = XEXP (comparison, 0);
3227
3228 /* Make sure ARG0 is one of the actual objects being compared. If we
3229 can't do this, we can't be sure the comparison can be reversed.
3230
3231 Handle cc0 and a MODE_CC register. */
3232 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3233 #ifdef HAVE_cc0
3234 || arg0 == cc0_rtx
3235 #endif
3236 )
3237 {
3238 rtx prev = prev_nonnote_insn (insn);
3239 rtx set;
3240
3241 /* First see if the condition code mode alone if enough to say we can
3242 reverse the condition. If not, then search backwards for a set of
3243 ARG0. We do not need to check for an insn clobbering it since valid
3244 code will contain set a set with no intervening clobber. But
3245 stop when we reach a label. */
3246 #ifdef REVERSIBLE_CC_MODE
3247 if (GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC
3248 && REVERSIBLE_CC_MODE (GET_MODE (arg0)))
3249 return 1;
3250 #endif
3251
3252 for (prev = prev_nonnote_insn (insn);
3253 prev != 0 && GET_CODE (prev) != CODE_LABEL;
3254 prev = prev_nonnote_insn (prev))
3255 if ((set = single_set (prev)) != 0
3256 && rtx_equal_p (SET_DEST (set), arg0))
3257 {
3258 arg0 = SET_SRC (set);
3259
3260 if (GET_CODE (arg0) == COMPARE)
3261 arg0 = XEXP (arg0, 0);
3262 break;
3263 }
3264 }
3265
3266 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3267 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3268 return (GET_CODE (arg0) == CONST_INT
3269 || (GET_MODE (arg0) != VOIDmode
3270 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3271 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3272 }
3273
3274 /* Given an rtx-code for a comparison, return the code for the negated
3275 comparison. If no such code exists, return UNKNOWN.
3276
3277 WATCH OUT! reverse_condition is not safe to use on a jump that might
3278 be acting on the results of an IEEE floating point comparison, because
3279 of the special treatment of non-signaling nans in comparisons.
3280 Use can_reverse_comparison_p to be sure. */
3281
3282 enum rtx_code
3283 reverse_condition (code)
3284 enum rtx_code code;
3285 {
3286 switch (code)
3287 {
3288 case EQ:
3289 return NE;
3290 case NE:
3291 return EQ;
3292 case GT:
3293 return LE;
3294 case GE:
3295 return LT;
3296 case LT:
3297 return GE;
3298 case LE:
3299 return GT;
3300 case GTU:
3301 return LEU;
3302 case GEU:
3303 return LTU;
3304 case LTU:
3305 return GEU;
3306 case LEU:
3307 return GTU;
3308 case UNORDERED:
3309 return ORDERED;
3310 case ORDERED:
3311 return UNORDERED;
3312
3313 case UNLT:
3314 case UNLE:
3315 case UNGT:
3316 case UNGE:
3317 case UNEQ:
3318 case LTGT:
3319 return UNKNOWN;
3320
3321 default:
3322 abort ();
3323 }
3324 }
3325
3326 /* Similar, but we're allowed to generate unordered comparisons, which
3327 makes it safe for IEEE floating-point. Of course, we have to recognize
3328 that the target will support them too... */
3329
3330 enum rtx_code
3331 reverse_condition_maybe_unordered (code)
3332 enum rtx_code code;
3333 {
3334 /* Non-IEEE formats don't have unordered conditions. */
3335 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
3336 return reverse_condition (code);
3337
3338 switch (code)
3339 {
3340 case EQ:
3341 return NE;
3342 case NE:
3343 return EQ;
3344 case GT:
3345 return UNLE;
3346 case GE:
3347 return UNLT;
3348 case LT:
3349 return UNGE;
3350 case LE:
3351 return UNGT;
3352 case LTGT:
3353 return UNEQ;
3354 case GTU:
3355 return LEU;
3356 case GEU:
3357 return LTU;
3358 case LTU:
3359 return GEU;
3360 case LEU:
3361 return GTU;
3362 case UNORDERED:
3363 return ORDERED;
3364 case ORDERED:
3365 return UNORDERED;
3366 case UNLT:
3367 return GE;
3368 case UNLE:
3369 return GT;
3370 case UNGT:
3371 return LE;
3372 case UNGE:
3373 return LT;
3374 case UNEQ:
3375 return LTGT;
3376
3377 default:
3378 abort ();
3379 }
3380 }
3381
3382 /* Similar, but return the code when two operands of a comparison are swapped.
3383 This IS safe for IEEE floating-point. */
3384
3385 enum rtx_code
3386 swap_condition (code)
3387 enum rtx_code code;
3388 {
3389 switch (code)
3390 {
3391 case EQ:
3392 case NE:
3393 case UNORDERED:
3394 case ORDERED:
3395 case UNEQ:
3396 case LTGT:
3397 return code;
3398
3399 case GT:
3400 return LT;
3401 case GE:
3402 return LE;
3403 case LT:
3404 return GT;
3405 case LE:
3406 return GE;
3407 case GTU:
3408 return LTU;
3409 case GEU:
3410 return LEU;
3411 case LTU:
3412 return GTU;
3413 case LEU:
3414 return GEU;
3415 case UNLT:
3416 return UNGT;
3417 case UNLE:
3418 return UNGE;
3419 case UNGT:
3420 return UNLT;
3421 case UNGE:
3422 return UNLE;
3423
3424 default:
3425 abort ();
3426 }
3427 }
3428
3429 /* Given a comparison CODE, return the corresponding unsigned comparison.
3430 If CODE is an equality comparison or already an unsigned comparison,
3431 CODE is returned. */
3432
3433 enum rtx_code
3434 unsigned_condition (code)
3435 enum rtx_code code;
3436 {
3437 switch (code)
3438 {
3439 case EQ:
3440 case NE:
3441 case GTU:
3442 case GEU:
3443 case LTU:
3444 case LEU:
3445 return code;
3446
3447 case GT:
3448 return GTU;
3449 case GE:
3450 return GEU;
3451 case LT:
3452 return LTU;
3453 case LE:
3454 return LEU;
3455
3456 default:
3457 abort ();
3458 }
3459 }
3460
3461 /* Similarly, return the signed version of a comparison. */
3462
3463 enum rtx_code
3464 signed_condition (code)
3465 enum rtx_code code;
3466 {
3467 switch (code)
3468 {
3469 case EQ:
3470 case NE:
3471 case GT:
3472 case GE:
3473 case LT:
3474 case LE:
3475 return code;
3476
3477 case GTU:
3478 return GT;
3479 case GEU:
3480 return GE;
3481 case LTU:
3482 return LT;
3483 case LEU:
3484 return LE;
3485
3486 default:
3487 abort ();
3488 }
3489 }
3490 \f
3491 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3492 truth of CODE1 implies the truth of CODE2. */
3493
3494 int
3495 comparison_dominates_p (code1, code2)
3496 enum rtx_code code1, code2;
3497 {
3498 if (code1 == code2)
3499 return 1;
3500
3501 switch (code1)
3502 {
3503 case EQ:
3504 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
3505 || code2 == ORDERED)
3506 return 1;
3507 break;
3508
3509 case LT:
3510 if (code2 == LE || code2 == NE || code2 == ORDERED)
3511 return 1;
3512 break;
3513
3514 case GT:
3515 if (code2 == GE || code2 == NE || code2 == ORDERED)
3516 return 1;
3517 break;
3518
3519 case GE:
3520 case LE:
3521 if (code2 == ORDERED)
3522 return 1;
3523 break;
3524
3525 case LTGT:
3526 if (code2 == NE || code2 == ORDERED)
3527 return 1;
3528 break;
3529
3530 case LTU:
3531 if (code2 == LEU || code2 == NE)
3532 return 1;
3533 break;
3534
3535 case GTU:
3536 if (code2 == GEU || code2 == NE)
3537 return 1;
3538 break;
3539
3540 case UNORDERED:
3541 if (code2 == NE)
3542 return 1;
3543 break;
3544
3545 default:
3546 break;
3547 }
3548
3549 return 0;
3550 }
3551 \f
3552 /* Return 1 if INSN is an unconditional jump and nothing else. */
3553
3554 int
3555 simplejump_p (insn)
3556 rtx insn;
3557 {
3558 return (GET_CODE (insn) == JUMP_INSN
3559 && GET_CODE (PATTERN (insn)) == SET
3560 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3561 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3562 }
3563
3564 /* Return nonzero if INSN is a (possibly) conditional jump
3565 and nothing more. */
3566
3567 int
3568 condjump_p (insn)
3569 rtx insn;
3570 {
3571 register rtx x = PATTERN (insn);
3572
3573 if (GET_CODE (x) != SET
3574 || GET_CODE (SET_DEST (x)) != PC)
3575 return 0;
3576
3577 x = SET_SRC (x);
3578 if (GET_CODE (x) == LABEL_REF)
3579 return 1;
3580 else return (GET_CODE (x) == IF_THEN_ELSE
3581 && ((GET_CODE (XEXP (x, 2)) == PC
3582 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
3583 || GET_CODE (XEXP (x, 1)) == RETURN))
3584 || (GET_CODE (XEXP (x, 1)) == PC
3585 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
3586 || GET_CODE (XEXP (x, 2)) == RETURN))));
3587
3588 return 0;
3589 }
3590
3591 /* Return nonzero if INSN is a (possibly) conditional jump inside a
3592 PARALLEL. */
3593
3594 int
3595 condjump_in_parallel_p (insn)
3596 rtx insn;
3597 {
3598 register rtx x = PATTERN (insn);
3599
3600 if (GET_CODE (x) != PARALLEL)
3601 return 0;
3602 else
3603 x = XVECEXP (x, 0, 0);
3604
3605 if (GET_CODE (x) != SET)
3606 return 0;
3607 if (GET_CODE (SET_DEST (x)) != PC)
3608 return 0;
3609 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3610 return 1;
3611 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3612 return 0;
3613 if (XEXP (SET_SRC (x), 2) == pc_rtx
3614 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3615 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3616 return 1;
3617 if (XEXP (SET_SRC (x), 1) == pc_rtx
3618 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3619 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3620 return 1;
3621 return 0;
3622 }
3623
3624 /* Return the label of a conditional jump. */
3625
3626 rtx
3627 condjump_label (insn)
3628 rtx insn;
3629 {
3630 register rtx x = PATTERN (insn);
3631
3632 if (GET_CODE (x) == PARALLEL)
3633 x = XVECEXP (x, 0, 0);
3634 if (GET_CODE (x) != SET)
3635 return NULL_RTX;
3636 if (GET_CODE (SET_DEST (x)) != PC)
3637 return NULL_RTX;
3638 x = SET_SRC (x);
3639 if (GET_CODE (x) == LABEL_REF)
3640 return x;
3641 if (GET_CODE (x) != IF_THEN_ELSE)
3642 return NULL_RTX;
3643 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3644 return XEXP (x, 1);
3645 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3646 return XEXP (x, 2);
3647 return NULL_RTX;
3648 }
3649
3650 /* Return true if INSN is a (possibly conditional) return insn. */
3651
3652 static int
3653 returnjump_p_1 (loc, data)
3654 rtx *loc;
3655 void *data ATTRIBUTE_UNUSED;
3656 {
3657 rtx x = *loc;
3658 return x && GET_CODE (x) == RETURN;
3659 }
3660
3661 int
3662 returnjump_p (insn)
3663 rtx insn;
3664 {
3665 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3666 }
3667
3668 /* Return true if INSN is a jump that only transfers control and
3669 nothing more. */
3670
3671 int
3672 onlyjump_p (insn)
3673 rtx insn;
3674 {
3675 rtx set;
3676
3677 if (GET_CODE (insn) != JUMP_INSN)
3678 return 0;
3679
3680 set = single_set (insn);
3681 if (set == NULL)
3682 return 0;
3683 if (GET_CODE (SET_DEST (set)) != PC)
3684 return 0;
3685 if (side_effects_p (SET_SRC (set)))
3686 return 0;
3687
3688 return 1;
3689 }
3690
3691 #ifdef HAVE_cc0
3692
3693 /* Return 1 if X is an RTX that does nothing but set the condition codes
3694 and CLOBBER or USE registers.
3695 Return -1 if X does explicitly set the condition codes,
3696 but also does other things. */
3697
3698 int
3699 sets_cc0_p (x)
3700 rtx x ATTRIBUTE_UNUSED;
3701 {
3702 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3703 return 1;
3704 if (GET_CODE (x) == PARALLEL)
3705 {
3706 int i;
3707 int sets_cc0 = 0;
3708 int other_things = 0;
3709 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3710 {
3711 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3712 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3713 sets_cc0 = 1;
3714 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3715 other_things = 1;
3716 }
3717 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3718 }
3719 return 0;
3720 }
3721 #endif
3722 \f
3723 /* Follow any unconditional jump at LABEL;
3724 return the ultimate label reached by any such chain of jumps.
3725 If LABEL is not followed by a jump, return LABEL.
3726 If the chain loops or we can't find end, return LABEL,
3727 since that tells caller to avoid changing the insn.
3728
3729 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3730 a USE or CLOBBER. */
3731
3732 rtx
3733 follow_jumps (label)
3734 rtx label;
3735 {
3736 register rtx insn;
3737 register rtx next;
3738 register rtx value = label;
3739 register int depth;
3740
3741 for (depth = 0;
3742 (depth < 10
3743 && (insn = next_active_insn (value)) != 0
3744 && GET_CODE (insn) == JUMP_INSN
3745 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3746 || GET_CODE (PATTERN (insn)) == RETURN)
3747 && (next = NEXT_INSN (insn))
3748 && GET_CODE (next) == BARRIER);
3749 depth++)
3750 {
3751 /* Don't chain through the insn that jumps into a loop
3752 from outside the loop,
3753 since that would create multiple loop entry jumps
3754 and prevent loop optimization. */
3755 rtx tem;
3756 if (!reload_completed)
3757 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3758 if (GET_CODE (tem) == NOTE
3759 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3760 /* ??? Optional. Disables some optimizations, but makes
3761 gcov output more accurate with -O. */
3762 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3763 return value;
3764
3765 /* If we have found a cycle, make the insn jump to itself. */
3766 if (JUMP_LABEL (insn) == label)
3767 return label;
3768
3769 tem = next_active_insn (JUMP_LABEL (insn));
3770 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3771 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3772 break;
3773
3774 value = JUMP_LABEL (insn);
3775 }
3776 if (depth == 10)
3777 return label;
3778 return value;
3779 }
3780
3781 /* Assuming that field IDX of X is a vector of label_refs,
3782 replace each of them by the ultimate label reached by it.
3783 Return nonzero if a change is made.
3784 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3785
3786 static int
3787 tension_vector_labels (x, idx)
3788 register rtx x;
3789 register int idx;
3790 {
3791 int changed = 0;
3792 register int i;
3793 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3794 {
3795 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3796 register rtx nlabel = follow_jumps (olabel);
3797 if (nlabel && nlabel != olabel)
3798 {
3799 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3800 ++LABEL_NUSES (nlabel);
3801 if (--LABEL_NUSES (olabel) == 0)
3802 delete_insn (olabel);
3803 changed = 1;
3804 }
3805 }
3806 return changed;
3807 }
3808 \f
3809 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3810 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3811 in INSN, then store one of them in JUMP_LABEL (INSN).
3812 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3813 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3814 Also, when there are consecutive labels, canonicalize on the last of them.
3815
3816 Note that two labels separated by a loop-beginning note
3817 must be kept distinct if we have not yet done loop-optimization,
3818 because the gap between them is where loop-optimize
3819 will want to move invariant code to. CROSS_JUMP tells us
3820 that loop-optimization is done with.
3821
3822 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3823 two labels distinct if they are separated by only USE or CLOBBER insns. */
3824
3825 static void
3826 mark_jump_label (x, insn, cross_jump, in_mem)
3827 register rtx x;
3828 rtx insn;
3829 int cross_jump;
3830 int in_mem;
3831 {
3832 register RTX_CODE code = GET_CODE (x);
3833 register int i;
3834 register const char *fmt;
3835
3836 switch (code)
3837 {
3838 case PC:
3839 case CC0:
3840 case REG:
3841 case SUBREG:
3842 case CONST_INT:
3843 case CONST_DOUBLE:
3844 case CLOBBER:
3845 case CALL:
3846 return;
3847
3848 case MEM:
3849 in_mem = 1;
3850 break;
3851
3852 case SYMBOL_REF:
3853 if (!in_mem)
3854 return;
3855
3856 /* If this is a constant-pool reference, see if it is a label. */
3857 if (CONSTANT_POOL_ADDRESS_P (x))
3858 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
3859 break;
3860
3861 case LABEL_REF:
3862 {
3863 rtx label = XEXP (x, 0);
3864 rtx olabel = label;
3865 rtx note;
3866 rtx next;
3867
3868 if (GET_CODE (label) != CODE_LABEL)
3869 abort ();
3870
3871 /* Ignore references to labels of containing functions. */
3872 if (LABEL_REF_NONLOCAL_P (x))
3873 break;
3874
3875 /* If there are other labels following this one,
3876 replace it with the last of the consecutive labels. */
3877 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3878 {
3879 if (GET_CODE (next) == CODE_LABEL)
3880 label = next;
3881 else if (cross_jump && GET_CODE (next) == INSN
3882 && (GET_CODE (PATTERN (next)) == USE
3883 || GET_CODE (PATTERN (next)) == CLOBBER))
3884 continue;
3885 else if (GET_CODE (next) != NOTE)
3886 break;
3887 else if (! cross_jump
3888 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3889 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3890 /* ??? Optional. Disables some optimizations, but
3891 makes gcov output more accurate with -O. */
3892 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3893 break;
3894 }
3895
3896 XEXP (x, 0) = label;
3897 if (! insn || ! INSN_DELETED_P (insn))
3898 ++LABEL_NUSES (label);
3899
3900 if (insn)
3901 {
3902 if (GET_CODE (insn) == JUMP_INSN)
3903 JUMP_LABEL (insn) = label;
3904
3905 /* If we've changed OLABEL and we had a REG_LABEL note
3906 for it, update it as well. */
3907 else if (label != olabel
3908 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3909 XEXP (note, 0) = label;
3910
3911 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3912 is one. */
3913 else if (! find_reg_note (insn, REG_LABEL, label))
3914 {
3915 /* This code used to ignore labels which refered to dispatch
3916 tables to avoid flow.c generating worse code.
3917
3918 However, in the presense of global optimizations like
3919 gcse which call find_basic_blocks without calling
3920 life_analysis, not recording such labels will lead
3921 to compiler aborts because of inconsistencies in the
3922 flow graph. So we go ahead and record the label.
3923
3924 It may also be the case that the optimization argument
3925 is no longer valid because of the more accurate cfg
3926 we build in find_basic_blocks -- it no longer pessimizes
3927 code when it finds a REG_LABEL note. */
3928 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3929 REG_NOTES (insn));
3930 }
3931 }
3932 return;
3933 }
3934
3935 /* Do walk the labels in a vector, but not the first operand of an
3936 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3937 case ADDR_VEC:
3938 case ADDR_DIFF_VEC:
3939 if (! INSN_DELETED_P (insn))
3940 {
3941 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3942
3943 for (i = 0; i < XVECLEN (x, eltnum); i++)
3944 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
3945 cross_jump, in_mem);
3946 }
3947 return;
3948
3949 default:
3950 break;
3951 }
3952
3953 fmt = GET_RTX_FORMAT (code);
3954 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3955 {
3956 if (fmt[i] == 'e')
3957 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
3958 else if (fmt[i] == 'E')
3959 {
3960 register int j;
3961 for (j = 0; j < XVECLEN (x, i); j++)
3962 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
3963 }
3964 }
3965 }
3966
3967 /* If all INSN does is set the pc, delete it,
3968 and delete the insn that set the condition codes for it
3969 if that's what the previous thing was. */
3970
3971 void
3972 delete_jump (insn)
3973 rtx insn;
3974 {
3975 register rtx set = single_set (insn);
3976
3977 if (set && GET_CODE (SET_DEST (set)) == PC)
3978 delete_computation (insn);
3979 }
3980
3981 /* Verify INSN is a BARRIER and delete it. */
3982
3983 void
3984 delete_barrier (insn)
3985 rtx insn;
3986 {
3987 if (GET_CODE (insn) != BARRIER)
3988 abort ();
3989
3990 delete_insn (insn);
3991 }
3992
3993 /* Recursively delete prior insns that compute the value (used only by INSN
3994 which the caller is deleting) stored in the register mentioned by NOTE
3995 which is a REG_DEAD note associated with INSN. */
3996
3997 static void
3998 delete_prior_computation (note, insn)
3999 rtx note;
4000 rtx insn;
4001 {
4002 rtx our_prev;
4003 rtx reg = XEXP (note, 0);
4004
4005 for (our_prev = prev_nonnote_insn (insn);
4006 our_prev && (GET_CODE (our_prev) == INSN
4007 || GET_CODE (our_prev) == CALL_INSN);
4008 our_prev = prev_nonnote_insn (our_prev))
4009 {
4010 rtx pat = PATTERN (our_prev);
4011
4012 /* If we reach a CALL which is not calling a const function
4013 or the callee pops the arguments, then give up. */
4014 if (GET_CODE (our_prev) == CALL_INSN
4015 && (! CONST_CALL_P (our_prev)
4016 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
4017 break;
4018
4019 /* If we reach a SEQUENCE, it is too complex to try to
4020 do anything with it, so give up. */
4021 if (GET_CODE (pat) == SEQUENCE)
4022 break;
4023
4024 if (GET_CODE (pat) == USE
4025 && GET_CODE (XEXP (pat, 0)) == INSN)
4026 /* reorg creates USEs that look like this. We leave them
4027 alone because reorg needs them for its own purposes. */
4028 break;
4029
4030 if (reg_set_p (reg, pat))
4031 {
4032 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
4033 break;
4034
4035 if (GET_CODE (pat) == PARALLEL)
4036 {
4037 /* If we find a SET of something else, we can't
4038 delete the insn. */
4039
4040 int i;
4041
4042 for (i = 0; i < XVECLEN (pat, 0); i++)
4043 {
4044 rtx part = XVECEXP (pat, 0, i);
4045
4046 if (GET_CODE (part) == SET
4047 && SET_DEST (part) != reg)
4048 break;
4049 }
4050
4051 if (i == XVECLEN (pat, 0))
4052 delete_computation (our_prev);
4053 }
4054 else if (GET_CODE (pat) == SET
4055 && GET_CODE (SET_DEST (pat)) == REG)
4056 {
4057 int dest_regno = REGNO (SET_DEST (pat));
4058 int dest_endregno
4059 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4060 ? HARD_REGNO_NREGS (dest_regno,
4061 GET_MODE (SET_DEST (pat))) : 1);
4062 int regno = REGNO (reg);
4063 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
4064 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
4065
4066 if (dest_regno >= regno
4067 && dest_endregno <= endregno)
4068 delete_computation (our_prev);
4069
4070 /* We may have a multi-word hard register and some, but not
4071 all, of the words of the register are needed in subsequent
4072 insns. Write REG_UNUSED notes for those parts that were not
4073 needed. */
4074 else if (dest_regno <= regno
4075 && dest_endregno >= endregno)
4076 {
4077 int i;
4078
4079 REG_NOTES (our_prev)
4080 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
4081
4082 for (i = dest_regno; i < dest_endregno; i++)
4083 if (! find_regno_note (our_prev, REG_UNUSED, i))
4084 break;
4085
4086 if (i == dest_endregno)
4087 delete_computation (our_prev);
4088 }
4089 }
4090
4091 break;
4092 }
4093
4094 /* If PAT references the register that dies here, it is an
4095 additional use. Hence any prior SET isn't dead. However, this
4096 insn becomes the new place for the REG_DEAD note. */
4097 if (reg_overlap_mentioned_p (reg, pat))
4098 {
4099 XEXP (note, 1) = REG_NOTES (our_prev);
4100 REG_NOTES (our_prev) = note;
4101 break;
4102 }
4103 }
4104 }
4105
4106 /* Delete INSN and recursively delete insns that compute values used only
4107 by INSN. This uses the REG_DEAD notes computed during flow analysis.
4108 If we are running before flow.c, we need do nothing since flow.c will
4109 delete dead code. We also can't know if the registers being used are
4110 dead or not at this point.
4111
4112 Otherwise, look at all our REG_DEAD notes. If a previous insn does
4113 nothing other than set a register that dies in this insn, we can delete
4114 that insn as well.
4115
4116 On machines with CC0, if CC0 is used in this insn, we may be able to
4117 delete the insn that set it. */
4118
4119 static void
4120 delete_computation (insn)
4121 rtx insn;
4122 {
4123 rtx note, next;
4124 rtx set;
4125
4126 #ifdef HAVE_cc0
4127 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
4128 {
4129 rtx prev = prev_nonnote_insn (insn);
4130 /* We assume that at this stage
4131 CC's are always set explicitly
4132 and always immediately before the jump that
4133 will use them. So if the previous insn
4134 exists to set the CC's, delete it
4135 (unless it performs auto-increments, etc.). */
4136 if (prev && GET_CODE (prev) == INSN
4137 && sets_cc0_p (PATTERN (prev)))
4138 {
4139 if (sets_cc0_p (PATTERN (prev)) > 0
4140 && ! side_effects_p (PATTERN (prev)))
4141 delete_computation (prev);
4142 else
4143 /* Otherwise, show that cc0 won't be used. */
4144 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
4145 cc0_rtx, REG_NOTES (prev));
4146 }
4147 }
4148 #endif
4149
4150 #ifdef INSN_SCHEDULING
4151 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4152 reload has completed. The schedulers need to be fixed. Until
4153 they are, we must not rely on the death notes here. */
4154 if (reload_completed && flag_schedule_insns_after_reload)
4155 {
4156 delete_insn (insn);
4157 return;
4158 }
4159 #endif
4160
4161 /* The REG_DEAD note may have been omitted for a register
4162 which is both set and used by the insn. */
4163 set = single_set (insn);
4164 if (set && GET_CODE (SET_DEST (set)) == REG)
4165 {
4166 int dest_regno = REGNO (SET_DEST (set));
4167 int dest_endregno
4168 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4169 ? HARD_REGNO_NREGS (dest_regno,
4170 GET_MODE (SET_DEST (set))) : 1);
4171 int i;
4172
4173 for (i = dest_regno; i < dest_endregno; i++)
4174 {
4175 if (! refers_to_regno_p (i, i + 1, SET_SRC (set), NULL_PTR)
4176 || find_regno_note (insn, REG_DEAD, i))
4177 continue;
4178
4179 note = gen_rtx_EXPR_LIST (REG_DEAD, (i < FIRST_PSEUDO_REGISTER
4180 ? gen_rtx_REG (reg_raw_mode[i], i)
4181 : SET_DEST (set)), NULL_RTX);
4182 delete_prior_computation (note, insn);
4183 }
4184 }
4185
4186 for (note = REG_NOTES (insn); note; note = next)
4187 {
4188 next = XEXP (note, 1);
4189
4190 if (REG_NOTE_KIND (note) != REG_DEAD
4191 /* Verify that the REG_NOTE is legitimate. */
4192 || GET_CODE (XEXP (note, 0)) != REG)
4193 continue;
4194
4195 delete_prior_computation (note, insn);
4196 }
4197
4198 delete_insn (insn);
4199 }
4200 \f
4201 /* Delete insn INSN from the chain of insns and update label ref counts.
4202 May delete some following insns as a consequence; may even delete
4203 a label elsewhere and insns that follow it.
4204
4205 Returns the first insn after INSN that was not deleted. */
4206
4207 rtx
4208 delete_insn (insn)
4209 register rtx insn;
4210 {
4211 register rtx next = NEXT_INSN (insn);
4212 register rtx prev = PREV_INSN (insn);
4213 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4214 register int dont_really_delete = 0;
4215
4216 while (next && INSN_DELETED_P (next))
4217 next = NEXT_INSN (next);
4218
4219 /* This insn is already deleted => return first following nondeleted. */
4220 if (INSN_DELETED_P (insn))
4221 return next;
4222
4223 if (was_code_label)
4224 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4225
4226 /* Don't delete user-declared labels. Convert them to special NOTEs
4227 instead. */
4228 if (was_code_label && LABEL_NAME (insn) != 0
4229 && optimize && ! dont_really_delete)
4230 {
4231 PUT_CODE (insn, NOTE);
4232 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4233 NOTE_SOURCE_FILE (insn) = 0;
4234 dont_really_delete = 1;
4235 }
4236 else
4237 /* Mark this insn as deleted. */
4238 INSN_DELETED_P (insn) = 1;
4239
4240 /* If this is an unconditional jump, delete it from the jump chain. */
4241 if (simplejump_p (insn))
4242 delete_from_jump_chain (insn);
4243
4244 /* If instruction is followed by a barrier,
4245 delete the barrier too. */
4246
4247 if (next != 0 && GET_CODE (next) == BARRIER)
4248 {
4249 INSN_DELETED_P (next) = 1;
4250 next = NEXT_INSN (next);
4251 }
4252
4253 /* Patch out INSN (and the barrier if any) */
4254
4255 if (! dont_really_delete)
4256 {
4257 if (prev)
4258 {
4259 NEXT_INSN (prev) = next;
4260 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4261 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4262 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4263 }
4264
4265 if (next)
4266 {
4267 PREV_INSN (next) = prev;
4268 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4269 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4270 }
4271
4272 if (prev && NEXT_INSN (prev) == 0)
4273 set_last_insn (prev);
4274 }
4275
4276 /* If deleting a jump, decrement the count of the label,
4277 and delete the label if it is now unused. */
4278
4279 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4280 {
4281 rtx lab = JUMP_LABEL (insn), lab_next;
4282
4283 if (--LABEL_NUSES (lab) == 0)
4284 {
4285 /* This can delete NEXT or PREV,
4286 either directly if NEXT is JUMP_LABEL (INSN),
4287 or indirectly through more levels of jumps. */
4288 delete_insn (lab);
4289
4290 /* I feel a little doubtful about this loop,
4291 but I see no clean and sure alternative way
4292 to find the first insn after INSN that is not now deleted.
4293 I hope this works. */
4294 while (next && INSN_DELETED_P (next))
4295 next = NEXT_INSN (next);
4296 return next;
4297 }
4298 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4299 && GET_CODE (lab_next) == JUMP_INSN
4300 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4301 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4302 {
4303 /* If we're deleting the tablejump, delete the dispatch table.
4304 We may not be able to kill the label immediately preceeding
4305 just yet, as it might be referenced in code leading up to
4306 the tablejump. */
4307 delete_insn (lab_next);
4308 }
4309 }
4310
4311 /* Likewise if we're deleting a dispatch table. */
4312
4313 if (GET_CODE (insn) == JUMP_INSN
4314 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4315 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4316 {
4317 rtx pat = PATTERN (insn);
4318 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4319 int len = XVECLEN (pat, diff_vec_p);
4320
4321 for (i = 0; i < len; i++)
4322 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4323 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4324 while (next && INSN_DELETED_P (next))
4325 next = NEXT_INSN (next);
4326 return next;
4327 }
4328
4329 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4330 prev = PREV_INSN (prev);
4331
4332 /* If INSN was a label and a dispatch table follows it,
4333 delete the dispatch table. The tablejump must have gone already.
4334 It isn't useful to fall through into a table. */
4335
4336 if (was_code_label
4337 && NEXT_INSN (insn) != 0
4338 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4339 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4340 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4341 next = delete_insn (NEXT_INSN (insn));
4342
4343 /* If INSN was a label, delete insns following it if now unreachable. */
4344
4345 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4346 {
4347 register RTX_CODE code;
4348 while (next != 0
4349 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4350 || code == NOTE || code == BARRIER
4351 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4352 {
4353 if (code == NOTE
4354 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4355 next = NEXT_INSN (next);
4356 /* Keep going past other deleted labels to delete what follows. */
4357 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4358 next = NEXT_INSN (next);
4359 else
4360 /* Note: if this deletes a jump, it can cause more
4361 deletion of unreachable code, after a different label.
4362 As long as the value from this recursive call is correct,
4363 this invocation functions correctly. */
4364 next = delete_insn (next);
4365 }
4366 }
4367
4368 return next;
4369 }
4370
4371 /* Advance from INSN till reaching something not deleted
4372 then return that. May return INSN itself. */
4373
4374 rtx
4375 next_nondeleted_insn (insn)
4376 rtx insn;
4377 {
4378 while (INSN_DELETED_P (insn))
4379 insn = NEXT_INSN (insn);
4380 return insn;
4381 }
4382 \f
4383 /* Delete a range of insns from FROM to TO, inclusive.
4384 This is for the sake of peephole optimization, so assume
4385 that whatever these insns do will still be done by a new
4386 peephole insn that will replace them. */
4387
4388 void
4389 delete_for_peephole (from, to)
4390 register rtx from, to;
4391 {
4392 register rtx insn = from;
4393
4394 while (1)
4395 {
4396 register rtx next = NEXT_INSN (insn);
4397 register rtx prev = PREV_INSN (insn);
4398
4399 if (GET_CODE (insn) != NOTE)
4400 {
4401 INSN_DELETED_P (insn) = 1;
4402
4403 /* Patch this insn out of the chain. */
4404 /* We don't do this all at once, because we
4405 must preserve all NOTEs. */
4406 if (prev)
4407 NEXT_INSN (prev) = next;
4408
4409 if (next)
4410 PREV_INSN (next) = prev;
4411 }
4412
4413 if (insn == to)
4414 break;
4415 insn = next;
4416 }
4417
4418 /* Note that if TO is an unconditional jump
4419 we *do not* delete the BARRIER that follows,
4420 since the peephole that replaces this sequence
4421 is also an unconditional jump in that case. */
4422 }
4423 \f
4424 /* We have determined that INSN is never reached, and are about to
4425 delete it. Print a warning if the user asked for one.
4426
4427 To try to make this warning more useful, this should only be called
4428 once per basic block not reached, and it only warns when the basic
4429 block contains more than one line from the current function, and
4430 contains at least one operation. CSE and inlining can duplicate insns,
4431 so it's possible to get spurious warnings from this. */
4432
4433 void
4434 never_reached_warning (avoided_insn)
4435 rtx avoided_insn;
4436 {
4437 rtx insn;
4438 rtx a_line_note = NULL;
4439 int two_avoided_lines = 0;
4440 int contains_insn = 0;
4441
4442 if (! warn_notreached)
4443 return;
4444
4445 /* Scan forwards, looking at LINE_NUMBER notes, until
4446 we hit a LABEL or we run out of insns. */
4447
4448 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
4449 {
4450 if (GET_CODE (insn) == CODE_LABEL)
4451 break;
4452 else if (GET_CODE (insn) == NOTE /* A line number note? */
4453 && NOTE_LINE_NUMBER (insn) >= 0)
4454 {
4455 if (a_line_note == NULL)
4456 a_line_note = insn;
4457 else
4458 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
4459 != NOTE_LINE_NUMBER (insn));
4460 }
4461 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4462 contains_insn = 1;
4463 }
4464 if (two_avoided_lines && contains_insn)
4465 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
4466 NOTE_LINE_NUMBER (a_line_note),
4467 "will never be executed");
4468 }
4469 \f
4470 /* Invert the condition of the jump JUMP, and make it jump
4471 to label NLABEL instead of where it jumps now. */
4472
4473 int
4474 invert_jump (jump, nlabel)
4475 rtx jump, nlabel;
4476 {
4477 /* We have to either invert the condition and change the label or
4478 do neither. Either operation could fail. We first try to invert
4479 the jump. If that succeeds, we try changing the label. If that fails,
4480 we invert the jump back to what it was. */
4481
4482 if (! invert_exp (PATTERN (jump), jump))
4483 return 0;
4484
4485 if (redirect_jump (jump, nlabel))
4486 {
4487 if (flag_branch_probabilities)
4488 {
4489 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4490
4491 /* An inverted jump means that a probability taken becomes a
4492 probability not taken. Subtract the branch probability from the
4493 probability base to convert it back to a taken probability.
4494 (We don't flip the probability on a branch that's never taken. */
4495 if (note && XINT (XEXP (note, 0), 0) >= 0)
4496 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4497 }
4498
4499 return 1;
4500 }
4501
4502 if (! invert_exp (PATTERN (jump), jump))
4503 /* This should just be putting it back the way it was. */
4504 abort ();
4505
4506 return 0;
4507 }
4508
4509 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4510
4511 Return 1 if we can do so, 0 if we cannot find a way to do so that
4512 matches a pattern. */
4513
4514 int
4515 invert_exp (x, insn)
4516 rtx x;
4517 rtx insn;
4518 {
4519 register RTX_CODE code;
4520 register int i;
4521 register const char *fmt;
4522
4523 code = GET_CODE (x);
4524
4525 if (code == IF_THEN_ELSE)
4526 {
4527 register rtx comp = XEXP (x, 0);
4528 register rtx tem;
4529
4530 /* We can do this in two ways: The preferable way, which can only
4531 be done if this is not an integer comparison, is to reverse
4532 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4533 of the IF_THEN_ELSE. If we can't do either, fail. */
4534
4535 if (can_reverse_comparison_p (comp, insn)
4536 && validate_change (insn, &XEXP (x, 0),
4537 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4538 GET_MODE (comp), XEXP (comp, 0),
4539 XEXP (comp, 1)), 0))
4540 return 1;
4541
4542 tem = XEXP (x, 1);
4543 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4544 validate_change (insn, &XEXP (x, 2), tem, 1);
4545 return apply_change_group ();
4546 }
4547
4548 fmt = GET_RTX_FORMAT (code);
4549 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4550 {
4551 if (fmt[i] == 'e')
4552 {
4553 if (! invert_exp (XEXP (x, i), insn))
4554 return 0;
4555 }
4556 else if (fmt[i] == 'E')
4557 {
4558 register int j;
4559 for (j = 0; j < XVECLEN (x, i); j++)
4560 if (!invert_exp (XVECEXP (x, i, j), insn))
4561 return 0;
4562 }
4563 }
4564
4565 return 1;
4566 }
4567 \f
4568 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4569 If the old jump target label is unused as a result,
4570 it and the code following it may be deleted.
4571
4572 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4573 RETURN insn.
4574
4575 The return value will be 1 if the change was made, 0 if it wasn't (this
4576 can only occur for NLABEL == 0). */
4577
4578 int
4579 redirect_jump (jump, nlabel)
4580 rtx jump, nlabel;
4581 {
4582 register rtx olabel = JUMP_LABEL (jump);
4583
4584 if (nlabel == olabel)
4585 return 1;
4586
4587 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4588 return 0;
4589
4590 /* If this is an unconditional branch, delete it from the jump_chain of
4591 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4592 have UID's in range and JUMP_CHAIN is valid). */
4593 if (jump_chain && (simplejump_p (jump)
4594 || GET_CODE (PATTERN (jump)) == RETURN))
4595 {
4596 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4597
4598 delete_from_jump_chain (jump);
4599 if (label_index < max_jump_chain
4600 && INSN_UID (jump) < max_jump_chain)
4601 {
4602 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4603 jump_chain[label_index] = jump;
4604 }
4605 }
4606
4607 JUMP_LABEL (jump) = nlabel;
4608 if (nlabel)
4609 ++LABEL_NUSES (nlabel);
4610
4611 /* If we're eliding the jump over exception cleanups at the end of a
4612 function, move the function end note so that -Wreturn-type works. */
4613 if (olabel && NEXT_INSN (olabel)
4614 && GET_CODE (NEXT_INSN (olabel)) == NOTE
4615 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
4616 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
4617
4618 if (olabel && --LABEL_NUSES (olabel) == 0)
4619 delete_insn (olabel);
4620
4621 return 1;
4622 }
4623
4624 /* Delete the instruction JUMP from any jump chain it might be on. */
4625
4626 static void
4627 delete_from_jump_chain (jump)
4628 rtx jump;
4629 {
4630 int index;
4631 rtx olabel = JUMP_LABEL (jump);
4632
4633 /* Handle unconditional jumps. */
4634 if (jump_chain && olabel != 0
4635 && INSN_UID (olabel) < max_jump_chain
4636 && simplejump_p (jump))
4637 index = INSN_UID (olabel);
4638 /* Handle return insns. */
4639 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4640 index = 0;
4641 else return;
4642
4643 if (jump_chain[index] == jump)
4644 jump_chain[index] = jump_chain[INSN_UID (jump)];
4645 else
4646 {
4647 rtx insn;
4648
4649 for (insn = jump_chain[index];
4650 insn != 0;
4651 insn = jump_chain[INSN_UID (insn)])
4652 if (jump_chain[INSN_UID (insn)] == jump)
4653 {
4654 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4655 break;
4656 }
4657 }
4658 }
4659
4660 /* If NLABEL is nonzero, throughout the rtx at LOC,
4661 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4662 zero, alter (RETURN) to (LABEL_REF NLABEL).
4663
4664 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4665 validity with validate_change. Convert (set (pc) (label_ref olabel))
4666 to (return).
4667
4668 Return 0 if we found a change we would like to make but it is invalid.
4669 Otherwise, return 1. */
4670
4671 int
4672 redirect_exp (loc, olabel, nlabel, insn)
4673 rtx *loc;
4674 rtx olabel, nlabel;
4675 rtx insn;
4676 {
4677 register rtx x = *loc;
4678 register RTX_CODE code = GET_CODE (x);
4679 register int i;
4680 register const char *fmt;
4681
4682 if (code == LABEL_REF)
4683 {
4684 if (XEXP (x, 0) == olabel)
4685 {
4686 if (nlabel)
4687 XEXP (x, 0) = nlabel;
4688 else
4689 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4690 return 1;
4691 }
4692 }
4693 else if (code == RETURN && olabel == 0)
4694 {
4695 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4696 if (loc == &PATTERN (insn))
4697 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4698 return validate_change (insn, loc, x, 0);
4699 }
4700
4701 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4702 && GET_CODE (SET_SRC (x)) == LABEL_REF
4703 && XEXP (SET_SRC (x), 0) == olabel)
4704 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4705
4706 fmt = GET_RTX_FORMAT (code);
4707 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4708 {
4709 if (fmt[i] == 'e')
4710 {
4711 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4712 return 0;
4713 }
4714 else if (fmt[i] == 'E')
4715 {
4716 register int j;
4717 for (j = 0; j < XVECLEN (x, i); j++)
4718 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4719 return 0;
4720 }
4721 }
4722
4723 return 1;
4724 }
4725 \f
4726 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4727
4728 If the old jump target label (before the dispatch table) becomes unused,
4729 it and the dispatch table may be deleted. In that case, find the insn
4730 before the jump references that label and delete it and logical successors
4731 too. */
4732
4733 static void
4734 redirect_tablejump (jump, nlabel)
4735 rtx jump, nlabel;
4736 {
4737 register rtx olabel = JUMP_LABEL (jump);
4738
4739 /* Add this jump to the jump_chain of NLABEL. */
4740 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4741 && INSN_UID (jump) < max_jump_chain)
4742 {
4743 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4744 jump_chain[INSN_UID (nlabel)] = jump;
4745 }
4746
4747 PATTERN (jump) = gen_jump (nlabel);
4748 JUMP_LABEL (jump) = nlabel;
4749 ++LABEL_NUSES (nlabel);
4750 INSN_CODE (jump) = -1;
4751
4752 if (--LABEL_NUSES (olabel) == 0)
4753 {
4754 delete_labelref_insn (jump, olabel, 0);
4755 delete_insn (olabel);
4756 }
4757 }
4758
4759 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4760 If we found one, delete it and then delete this insn if DELETE_THIS is
4761 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4762
4763 static int
4764 delete_labelref_insn (insn, label, delete_this)
4765 rtx insn, label;
4766 int delete_this;
4767 {
4768 int deleted = 0;
4769 rtx link;
4770
4771 if (GET_CODE (insn) != NOTE
4772 && reg_mentioned_p (label, PATTERN (insn)))
4773 {
4774 if (delete_this)
4775 {
4776 delete_insn (insn);
4777 deleted = 1;
4778 }
4779 else
4780 return 1;
4781 }
4782
4783 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4784 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4785 {
4786 if (delete_this)
4787 {
4788 delete_insn (insn);
4789 deleted = 1;
4790 }
4791 else
4792 return 1;
4793 }
4794
4795 return deleted;
4796 }
4797 \f
4798 /* Like rtx_equal_p except that it considers two REGs as equal
4799 if they renumber to the same value and considers two commutative
4800 operations to be the same if the order of the operands has been
4801 reversed.
4802
4803 ??? Addition is not commutative on the PA due to the weird implicit
4804 space register selection rules for memory addresses. Therefore, we
4805 don't consider a + b == b + a.
4806
4807 We could/should make this test a little tighter. Possibly only
4808 disabling it on the PA via some backend macro or only disabling this
4809 case when the PLUS is inside a MEM. */
4810
4811 int
4812 rtx_renumbered_equal_p (x, y)
4813 rtx x, y;
4814 {
4815 register int i;
4816 register RTX_CODE code = GET_CODE (x);
4817 register const char *fmt;
4818
4819 if (x == y)
4820 return 1;
4821
4822 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4823 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4824 && GET_CODE (SUBREG_REG (y)) == REG)))
4825 {
4826 int reg_x = -1, reg_y = -1;
4827 int word_x = 0, word_y = 0;
4828
4829 if (GET_MODE (x) != GET_MODE (y))
4830 return 0;
4831
4832 /* If we haven't done any renumbering, don't
4833 make any assumptions. */
4834 if (reg_renumber == 0)
4835 return rtx_equal_p (x, y);
4836
4837 if (code == SUBREG)
4838 {
4839 reg_x = REGNO (SUBREG_REG (x));
4840 word_x = SUBREG_WORD (x);
4841
4842 if (reg_renumber[reg_x] >= 0)
4843 {
4844 reg_x = reg_renumber[reg_x] + word_x;
4845 word_x = 0;
4846 }
4847 }
4848
4849 else
4850 {
4851 reg_x = REGNO (x);
4852 if (reg_renumber[reg_x] >= 0)
4853 reg_x = reg_renumber[reg_x];
4854 }
4855
4856 if (GET_CODE (y) == SUBREG)
4857 {
4858 reg_y = REGNO (SUBREG_REG (y));
4859 word_y = SUBREG_WORD (y);
4860
4861 if (reg_renumber[reg_y] >= 0)
4862 {
4863 reg_y = reg_renumber[reg_y];
4864 word_y = 0;
4865 }
4866 }
4867
4868 else
4869 {
4870 reg_y = REGNO (y);
4871 if (reg_renumber[reg_y] >= 0)
4872 reg_y = reg_renumber[reg_y];
4873 }
4874
4875 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4876 }
4877
4878 /* Now we have disposed of all the cases
4879 in which different rtx codes can match. */
4880 if (code != GET_CODE (y))
4881 return 0;
4882
4883 switch (code)
4884 {
4885 case PC:
4886 case CC0:
4887 case ADDR_VEC:
4888 case ADDR_DIFF_VEC:
4889 return 0;
4890
4891 case CONST_INT:
4892 return INTVAL (x) == INTVAL (y);
4893
4894 case LABEL_REF:
4895 /* We can't assume nonlocal labels have their following insns yet. */
4896 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4897 return XEXP (x, 0) == XEXP (y, 0);
4898
4899 /* Two label-refs are equivalent if they point at labels
4900 in the same position in the instruction stream. */
4901 return (next_real_insn (XEXP (x, 0))
4902 == next_real_insn (XEXP (y, 0)));
4903
4904 case SYMBOL_REF:
4905 return XSTR (x, 0) == XSTR (y, 0);
4906
4907 case CODE_LABEL:
4908 /* If we didn't match EQ equality above, they aren't the same. */
4909 return 0;
4910
4911 default:
4912 break;
4913 }
4914
4915 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4916
4917 if (GET_MODE (x) != GET_MODE (y))
4918 return 0;
4919
4920 /* For commutative operations, the RTX match if the operand match in any
4921 order. Also handle the simple binary and unary cases without a loop.
4922
4923 ??? Don't consider PLUS a commutative operator; see comments above. */
4924 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4925 && code != PLUS)
4926 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4927 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4928 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4929 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4930 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4931 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4932 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4933 else if (GET_RTX_CLASS (code) == '1')
4934 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4935
4936 /* Compare the elements. If any pair of corresponding elements
4937 fail to match, return 0 for the whole things. */
4938
4939 fmt = GET_RTX_FORMAT (code);
4940 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4941 {
4942 register int j;
4943 switch (fmt[i])
4944 {
4945 case 'w':
4946 if (XWINT (x, i) != XWINT (y, i))
4947 return 0;
4948 break;
4949
4950 case 'i':
4951 if (XINT (x, i) != XINT (y, i))
4952 return 0;
4953 break;
4954
4955 case 's':
4956 if (strcmp (XSTR (x, i), XSTR (y, i)))
4957 return 0;
4958 break;
4959
4960 case 'e':
4961 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4962 return 0;
4963 break;
4964
4965 case 'u':
4966 if (XEXP (x, i) != XEXP (y, i))
4967 return 0;
4968 /* fall through. */
4969 case '0':
4970 break;
4971
4972 case 'E':
4973 if (XVECLEN (x, i) != XVECLEN (y, i))
4974 return 0;
4975 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4976 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4977 return 0;
4978 break;
4979
4980 default:
4981 abort ();
4982 }
4983 }
4984 return 1;
4985 }
4986 \f
4987 /* If X is a hard register or equivalent to one or a subregister of one,
4988 return the hard register number. If X is a pseudo register that was not
4989 assigned a hard register, return the pseudo register number. Otherwise,
4990 return -1. Any rtx is valid for X. */
4991
4992 int
4993 true_regnum (x)
4994 rtx x;
4995 {
4996 if (GET_CODE (x) == REG)
4997 {
4998 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4999 return reg_renumber[REGNO (x)];
5000 return REGNO (x);
5001 }
5002 if (GET_CODE (x) == SUBREG)
5003 {
5004 int base = true_regnum (SUBREG_REG (x));
5005 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
5006 return SUBREG_WORD (x) + base;
5007 }
5008 return -1;
5009 }
5010 \f
5011 /* Optimize code of the form:
5012
5013 for (x = a[i]; x; ...)
5014 ...
5015 for (x = a[i]; x; ...)
5016 ...
5017 foo:
5018
5019 Loop optimize will change the above code into
5020
5021 if (x = a[i])
5022 for (;;)
5023 { ...; if (! (x = ...)) break; }
5024 if (x = a[i])
5025 for (;;)
5026 { ...; if (! (x = ...)) break; }
5027 foo:
5028
5029 In general, if the first test fails, the program can branch
5030 directly to `foo' and skip the second try which is doomed to fail.
5031 We run this after loop optimization and before flow analysis. */
5032
5033 /* When comparing the insn patterns, we track the fact that different
5034 pseudo-register numbers may have been used in each computation.
5035 The following array stores an equivalence -- same_regs[I] == J means
5036 that pseudo register I was used in the first set of tests in a context
5037 where J was used in the second set. We also count the number of such
5038 pending equivalences. If nonzero, the expressions really aren't the
5039 same. */
5040
5041 static int *same_regs;
5042
5043 static int num_same_regs;
5044
5045 /* Track any registers modified between the target of the first jump and
5046 the second jump. They never compare equal. */
5047
5048 static char *modified_regs;
5049
5050 /* Record if memory was modified. */
5051
5052 static int modified_mem;
5053
5054 /* Called via note_stores on each insn between the target of the first
5055 branch and the second branch. It marks any changed registers. */
5056
5057 static void
5058 mark_modified_reg (dest, x, data)
5059 rtx dest;
5060 rtx x ATTRIBUTE_UNUSED;
5061 void *data ATTRIBUTE_UNUSED;
5062 {
5063 int regno, i;
5064
5065 if (GET_CODE (dest) == SUBREG)
5066 dest = SUBREG_REG (dest);
5067
5068 if (GET_CODE (dest) == MEM)
5069 modified_mem = 1;
5070
5071 if (GET_CODE (dest) != REG)
5072 return;
5073
5074 regno = REGNO (dest);
5075 if (regno >= FIRST_PSEUDO_REGISTER)
5076 modified_regs[regno] = 1;
5077 else
5078 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
5079 modified_regs[regno + i] = 1;
5080 }
5081
5082 /* F is the first insn in the chain of insns. */
5083
5084 void
5085 thread_jumps (f, max_reg, flag_before_loop)
5086 rtx f;
5087 int max_reg;
5088 int flag_before_loop;
5089 {
5090 /* Basic algorithm is to find a conditional branch,
5091 the label it may branch to, and the branch after
5092 that label. If the two branches test the same condition,
5093 walk back from both branch paths until the insn patterns
5094 differ, or code labels are hit. If we make it back to
5095 the target of the first branch, then we know that the first branch
5096 will either always succeed or always fail depending on the relative
5097 senses of the two branches. So adjust the first branch accordingly
5098 in this case. */
5099
5100 rtx label, b1, b2, t1, t2;
5101 enum rtx_code code1, code2;
5102 rtx b1op0, b1op1, b2op0, b2op1;
5103 int changed = 1;
5104 int i;
5105 int *all_reset;
5106
5107 /* Allocate register tables and quick-reset table. */
5108 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
5109 same_regs = (int *) xmalloc (max_reg * sizeof (int));
5110 all_reset = (int *) xmalloc (max_reg * sizeof (int));
5111 for (i = 0; i < max_reg; i++)
5112 all_reset[i] = -1;
5113
5114 while (changed)
5115 {
5116 changed = 0;
5117
5118 for (b1 = f; b1; b1 = NEXT_INSN (b1))
5119 {
5120 /* Get to a candidate branch insn. */
5121 if (GET_CODE (b1) != JUMP_INSN
5122 || ! condjump_p (b1) || simplejump_p (b1)
5123 || JUMP_LABEL (b1) == 0)
5124 continue;
5125
5126 bzero (modified_regs, max_reg * sizeof (char));
5127 modified_mem = 0;
5128
5129 bcopy ((char *) all_reset, (char *) same_regs,
5130 max_reg * sizeof (int));
5131 num_same_regs = 0;
5132
5133 label = JUMP_LABEL (b1);
5134
5135 /* Look for a branch after the target. Record any registers and
5136 memory modified between the target and the branch. Stop when we
5137 get to a label since we can't know what was changed there. */
5138 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
5139 {
5140 if (GET_CODE (b2) == CODE_LABEL)
5141 break;
5142
5143 else if (GET_CODE (b2) == JUMP_INSN)
5144 {
5145 /* If this is an unconditional jump and is the only use of
5146 its target label, we can follow it. */
5147 if (simplejump_p (b2)
5148 && JUMP_LABEL (b2) != 0
5149 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
5150 {
5151 b2 = JUMP_LABEL (b2);
5152 continue;
5153 }
5154 else
5155 break;
5156 }
5157
5158 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
5159 continue;
5160
5161 if (GET_CODE (b2) == CALL_INSN)
5162 {
5163 modified_mem = 1;
5164 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5165 if (call_used_regs[i] && ! fixed_regs[i]
5166 && i != STACK_POINTER_REGNUM
5167 && i != FRAME_POINTER_REGNUM
5168 && i != HARD_FRAME_POINTER_REGNUM
5169 && i != ARG_POINTER_REGNUM)
5170 modified_regs[i] = 1;
5171 }
5172
5173 note_stores (PATTERN (b2), mark_modified_reg, NULL);
5174 }
5175
5176 /* Check the next candidate branch insn from the label
5177 of the first. */
5178 if (b2 == 0
5179 || GET_CODE (b2) != JUMP_INSN
5180 || b2 == b1
5181 || ! condjump_p (b2)
5182 || simplejump_p (b2))
5183 continue;
5184
5185 /* Get the comparison codes and operands, reversing the
5186 codes if appropriate. If we don't have comparison codes,
5187 we can't do anything. */
5188 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
5189 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
5190 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
5191 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
5192 code1 = reverse_condition (code1);
5193
5194 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
5195 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
5196 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
5197 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
5198 code2 = reverse_condition (code2);
5199
5200 /* If they test the same things and knowing that B1 branches
5201 tells us whether or not B2 branches, check if we
5202 can thread the branch. */
5203 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
5204 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
5205 && (comparison_dominates_p (code1, code2)
5206 || (can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
5207 0),
5208 b1)
5209 && comparison_dominates_p (code1, reverse_condition (code2)))))
5210
5211 {
5212 t1 = prev_nonnote_insn (b1);
5213 t2 = prev_nonnote_insn (b2);
5214
5215 while (t1 != 0 && t2 != 0)
5216 {
5217 if (t2 == label)
5218 {
5219 /* We have reached the target of the first branch.
5220 If there are no pending register equivalents,
5221 we know that this branch will either always
5222 succeed (if the senses of the two branches are
5223 the same) or always fail (if not). */
5224 rtx new_label;
5225
5226 if (num_same_regs != 0)
5227 break;
5228
5229 if (comparison_dominates_p (code1, code2))
5230 new_label = JUMP_LABEL (b2);
5231 else
5232 new_label = get_label_after (b2);
5233
5234 if (JUMP_LABEL (b1) != new_label)
5235 {
5236 rtx prev = PREV_INSN (new_label);
5237
5238 if (flag_before_loop
5239 && GET_CODE (prev) == NOTE
5240 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5241 {
5242 /* Don't thread to the loop label. If a loop
5243 label is reused, loop optimization will
5244 be disabled for that loop. */
5245 new_label = gen_label_rtx ();
5246 emit_label_after (new_label, PREV_INSN (prev));
5247 }
5248 changed |= redirect_jump (b1, new_label);
5249 }
5250 break;
5251 }
5252
5253 /* If either of these is not a normal insn (it might be
5254 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5255 have already been skipped above.) Similarly, fail
5256 if the insns are different. */
5257 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5258 || recog_memoized (t1) != recog_memoized (t2)
5259 || ! rtx_equal_for_thread_p (PATTERN (t1),
5260 PATTERN (t2), t2))
5261 break;
5262
5263 t1 = prev_nonnote_insn (t1);
5264 t2 = prev_nonnote_insn (t2);
5265 }
5266 }
5267 }
5268 }
5269
5270 /* Clean up. */
5271 free (modified_regs);
5272 free (same_regs);
5273 free (all_reset);
5274 }
5275 \f
5276 /* This is like RTX_EQUAL_P except that it knows about our handling of
5277 possibly equivalent registers and knows to consider volatile and
5278 modified objects as not equal.
5279
5280 YINSN is the insn containing Y. */
5281
5282 int
5283 rtx_equal_for_thread_p (x, y, yinsn)
5284 rtx x, y;
5285 rtx yinsn;
5286 {
5287 register int i;
5288 register int j;
5289 register enum rtx_code code;
5290 register const char *fmt;
5291
5292 code = GET_CODE (x);
5293 /* Rtx's of different codes cannot be equal. */
5294 if (code != GET_CODE (y))
5295 return 0;
5296
5297 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5298 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5299
5300 if (GET_MODE (x) != GET_MODE (y))
5301 return 0;
5302
5303 /* For floating-point, consider everything unequal. This is a bit
5304 pessimistic, but this pass would only rarely do anything for FP
5305 anyway. */
5306 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5307 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5308 return 0;
5309
5310 /* For commutative operations, the RTX match if the operand match in any
5311 order. Also handle the simple binary and unary cases without a loop. */
5312 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5313 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5314 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5315 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5316 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5317 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5318 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5319 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5320 else if (GET_RTX_CLASS (code) == '1')
5321 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5322
5323 /* Handle special-cases first. */
5324 switch (code)
5325 {
5326 case REG:
5327 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5328 return 1;
5329
5330 /* If neither is user variable or hard register, check for possible
5331 equivalence. */
5332 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5333 || REGNO (x) < FIRST_PSEUDO_REGISTER
5334 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5335 return 0;
5336
5337 if (same_regs[REGNO (x)] == -1)
5338 {
5339 same_regs[REGNO (x)] = REGNO (y);
5340 num_same_regs++;
5341
5342 /* If this is the first time we are seeing a register on the `Y'
5343 side, see if it is the last use. If not, we can't thread the
5344 jump, so mark it as not equivalent. */
5345 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5346 return 0;
5347
5348 return 1;
5349 }
5350 else
5351 return (same_regs[REGNO (x)] == REGNO (y));
5352
5353 break;
5354
5355 case MEM:
5356 /* If memory modified or either volatile, not equivalent.
5357 Else, check address. */
5358 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5359 return 0;
5360
5361 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5362
5363 case ASM_INPUT:
5364 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5365 return 0;
5366
5367 break;
5368
5369 case SET:
5370 /* Cancel a pending `same_regs' if setting equivalenced registers.
5371 Then process source. */
5372 if (GET_CODE (SET_DEST (x)) == REG
5373 && GET_CODE (SET_DEST (y)) == REG)
5374 {
5375 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
5376 {
5377 same_regs[REGNO (SET_DEST (x))] = -1;
5378 num_same_regs--;
5379 }
5380 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5381 return 0;
5382 }
5383 else
5384 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5385 return 0;
5386
5387 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5388
5389 case LABEL_REF:
5390 return XEXP (x, 0) == XEXP (y, 0);
5391
5392 case SYMBOL_REF:
5393 return XSTR (x, 0) == XSTR (y, 0);
5394
5395 default:
5396 break;
5397 }
5398
5399 if (x == y)
5400 return 1;
5401
5402 fmt = GET_RTX_FORMAT (code);
5403 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5404 {
5405 switch (fmt[i])
5406 {
5407 case 'w':
5408 if (XWINT (x, i) != XWINT (y, i))
5409 return 0;
5410 break;
5411
5412 case 'n':
5413 case 'i':
5414 if (XINT (x, i) != XINT (y, i))
5415 return 0;
5416 break;
5417
5418 case 'V':
5419 case 'E':
5420 /* Two vectors must have the same length. */
5421 if (XVECLEN (x, i) != XVECLEN (y, i))
5422 return 0;
5423
5424 /* And the corresponding elements must match. */
5425 for (j = 0; j < XVECLEN (x, i); j++)
5426 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5427 XVECEXP (y, i, j), yinsn) == 0)
5428 return 0;
5429 break;
5430
5431 case 'e':
5432 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5433 return 0;
5434 break;
5435
5436 case 'S':
5437 case 's':
5438 if (strcmp (XSTR (x, i), XSTR (y, i)))
5439 return 0;
5440 break;
5441
5442 case 'u':
5443 /* These are just backpointers, so they don't matter. */
5444 break;
5445
5446 case '0':
5447 case 't':
5448 break;
5449
5450 /* It is believed that rtx's at this level will never
5451 contain anything but integers and other rtx's,
5452 except for within LABEL_REFs and SYMBOL_REFs. */
5453 default:
5454 abort ();
5455 }
5456 }
5457 return 1;
5458 }
5459 \f
5460
5461 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
5462 /* Return the insn that NEW can be safely inserted in front of starting at
5463 the jump insn INSN. Return 0 if it is not safe to do this jump
5464 optimization. Note that NEW must contain a single set. */
5465
5466 static rtx
5467 find_insert_position (insn, new)
5468 rtx insn;
5469 rtx new;
5470 {
5471 int i;
5472 rtx prev;
5473
5474 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5475 if (GET_CODE (PATTERN (new)) != PARALLEL)
5476 return insn;
5477
5478 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5479 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5480 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5481 insn))
5482 break;
5483
5484 if (i < 0)
5485 return insn;
5486
5487 /* There is a good chance that the previous insn PREV sets the thing
5488 being clobbered (often the CC in a hard reg). If PREV does not
5489 use what NEW sets, we can insert NEW before PREV. */
5490
5491 prev = prev_active_insn (insn);
5492 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5493 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5494 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5495 insn)
5496 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5497 prev))
5498 return 0;
5499
5500 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5501 }
5502 #endif /* !HAVE_cc0 */