re PR target/13926 (GCC generates jumps that are too large to fit in word displacemen...
[gcc.git] / gcc / reorg.c
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
5 Hacked by Michael Tiemann (tiemann@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
23
24 /* Instruction reorganization pass.
25
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
33
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
38
39 The MIPS has a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
43
44 The SPARC always has a branch delay slot, but its effects can be
45 annulled when the branch is not taken. This means that failing to
46 find other sources of insns, we can hoist an insn from the branch
47 target that would only be safe to execute knowing that the branch
48 is taken.
49
50 The HP-PA always has a branch delay slot. For unconditional branches
51 its effects can be annulled when the branch is taken. The effects
52 of the delay slot in a conditional branch can be nullified for forward
53 taken branches, or for untaken backward branches. This means
54 we can hoist insns from the fall-through path for forward branches or
55 steal insns from the target of backward branches.
56
57 The TMS320C3x and C4x have three branch delay slots. When the three
58 slots are filled, the branch penalty is zero. Most insns can fill the
59 delay slots except jump insns.
60
61 Three techniques for filling delay slots have been implemented so far:
62
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
71
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_simple_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops. When
80 `fill_eager_delay_slots' takes insns from the fall-through path of
81 the jump, usually there is no code expansion; when it takes insns
82 from the branch target, there is code expansion if it is not the
83 only way to reach that target.
84
85 (3) `relax_delay_slots' uses a set of rules to simplify code that
86 has been reorganized by (1) and (2). It finds cases where
87 conditional test can be eliminated, jumps can be threaded, extra
88 insns can be eliminated, etc. It is the job of (1) and (2) to do a
89 good job of scheduling locally; `relax_delay_slots' takes care of
90 making the various individual schedules work well together. It is
91 especially tuned to handle the control flow interactions of branch
92 insns. It does nothing for insns with delay slots that do not
93 branch.
94
95 On machines that use CC0, we are very conservative. We will not make
96 a copy of an insn involving CC0 since we want to maintain a 1-1
97 correspondence between the insn that sets and uses CC0. The insns are
98 allowed to be separated by placing an insn that sets CC0 (but not an insn
99 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
100 delay slot. In that case, we point each insn at the other with REG_CC_USER
101 and REG_CC_SETTER notes. Note that these restrictions affect very few
102 machines because most RISC machines with delay slots will not use CC0
103 (the RT is the only known exception at this point).
104
105 Not yet implemented:
106
107 The Acorn Risc Machine can conditionally execute most insns, so
108 it is profitable to move single insns into a position to execute
109 based on the condition code of the previous insn.
110
111 The HP-PA can conditionally nullify insns, providing a similar
112 effect to the ARM, differing mostly in which insn is "in charge". */
113
114 #include "config.h"
115 #include "system.h"
116 #include "coretypes.h"
117 #include "tm.h"
118 #include "toplev.h"
119 #include "rtl.h"
120 #include "tm_p.h"
121 #include "expr.h"
122 #include "function.h"
123 #include "insn-config.h"
124 #include "conditions.h"
125 #include "hard-reg-set.h"
126 #include "basic-block.h"
127 #include "regs.h"
128 #include "recog.h"
129 #include "flags.h"
130 #include "output.h"
131 #include "obstack.h"
132 #include "insn-attr.h"
133 #include "resource.h"
134 #include "except.h"
135 #include "params.h"
136
137 #ifdef DELAY_SLOTS
138
139 #ifndef ANNUL_IFTRUE_SLOTS
140 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
141 #endif
142 #ifndef ANNUL_IFFALSE_SLOTS
143 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
144 #endif
145
146 /* Insns which have delay slots that have not yet been filled. */
147
148 static struct obstack unfilled_slots_obstack;
149 static rtx *unfilled_firstobj;
150
151 /* Define macros to refer to the first and last slot containing unfilled
152 insns. These are used because the list may move and its address
153 should be recomputed at each use. */
154
155 #define unfilled_slots_base \
156 ((rtx *) obstack_base (&unfilled_slots_obstack))
157
158 #define unfilled_slots_next \
159 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
160
161 /* Points to the label before the end of the function. */
162 static rtx end_of_function_label;
163
164 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
165 not always monotonically increase. */
166 static int *uid_to_ruid;
167
168 /* Highest valid index in `uid_to_ruid'. */
169 static int max_uid;
170
171 static int stop_search_p (rtx, int);
172 static int resource_conflicts_p (struct resources *, struct resources *);
173 static int insn_references_resource_p (rtx, struct resources *, int);
174 static int insn_sets_resource_p (rtx, struct resources *, int);
175 static rtx find_end_label (void);
176 static rtx emit_delay_sequence (rtx, rtx, int);
177 static rtx add_to_delay_list (rtx, rtx);
178 static rtx delete_from_delay_slot (rtx);
179 static void delete_scheduled_jump (rtx);
180 static void note_delay_statistics (int, int);
181 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
182 static rtx optimize_skip (rtx);
183 #endif
184 static int get_jump_flags (rtx, rtx);
185 static int rare_destination (rtx);
186 static int mostly_true_jump (rtx, rtx);
187 static rtx get_branch_condition (rtx, rtx);
188 static int condition_dominates_p (rtx, rtx);
189 static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
190 static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
191 static int check_annul_list_true_false (int, rtx);
192 static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx,
193 struct resources *,
194 struct resources *,
195 struct resources *,
196 int, int *, int *, rtx *);
197 static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx,
198 struct resources *,
199 struct resources *,
200 struct resources *,
201 int, int *, int *);
202 static void try_merge_delay_insns (rtx, rtx);
203 static rtx redundant_insn (rtx, rtx, rtx);
204 static int own_thread_p (rtx, rtx, int);
205 static void update_block (rtx, rtx);
206 static int reorg_redirect_jump (rtx, rtx);
207 static void update_reg_dead_notes (rtx, rtx);
208 static void fix_reg_dead_note (rtx, rtx);
209 static void update_reg_unused_notes (rtx, rtx);
210 static void fill_simple_delay_slots (int);
211 static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx, int, int, int, int,
212 int *, rtx);
213 static void fill_eager_delay_slots (void);
214 static void relax_delay_slots (rtx);
215 #ifdef HAVE_return
216 static void make_return_insns (rtx);
217 #endif
218 \f
219 /* Return TRUE if this insn should stop the search for insn to fill delay
220 slots. LABELS_P indicates that labels should terminate the search.
221 In all cases, jumps terminate the search. */
222
223 static int
224 stop_search_p (rtx insn, int labels_p)
225 {
226 if (insn == 0)
227 return 1;
228
229 /* If the insn can throw an exception that is caught within the function,
230 it may effectively perform a jump from the viewpoint of the function.
231 Therefore act like for a jump. */
232 if (can_throw_internal (insn))
233 return 1;
234
235 switch (GET_CODE (insn))
236 {
237 case NOTE:
238 case CALL_INSN:
239 return 0;
240
241 case CODE_LABEL:
242 return labels_p;
243
244 case JUMP_INSN:
245 case BARRIER:
246 return 1;
247
248 case INSN:
249 /* OK unless it contains a delay slot or is an `asm' insn of some type.
250 We don't know anything about these. */
251 return (GET_CODE (PATTERN (insn)) == SEQUENCE
252 || GET_CODE (PATTERN (insn)) == ASM_INPUT
253 || asm_noperands (PATTERN (insn)) >= 0);
254
255 default:
256 abort ();
257 }
258 }
259 \f
260 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
261 resource set contains a volatile memory reference. Otherwise, return FALSE. */
262
263 static int
264 resource_conflicts_p (struct resources *res1, struct resources *res2)
265 {
266 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
267 || (res1->unch_memory && res2->unch_memory)
268 || res1->volatil || res2->volatil)
269 return 1;
270
271 #ifdef HARD_REG_SET
272 return (res1->regs & res2->regs) != HARD_CONST (0);
273 #else
274 {
275 int i;
276
277 for (i = 0; i < HARD_REG_SET_LONGS; i++)
278 if ((res1->regs[i] & res2->regs[i]) != 0)
279 return 1;
280 return 0;
281 }
282 #endif
283 }
284
285 /* Return TRUE if any resource marked in RES, a `struct resources', is
286 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
287 routine is using those resources.
288
289 We compute this by computing all the resources referenced by INSN and
290 seeing if this conflicts with RES. It might be faster to directly check
291 ourselves, and this is the way it used to work, but it means duplicating
292 a large block of complex code. */
293
294 static int
295 insn_references_resource_p (rtx insn, struct resources *res,
296 int include_delayed_effects)
297 {
298 struct resources insn_res;
299
300 CLEAR_RESOURCE (&insn_res);
301 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
302 return resource_conflicts_p (&insn_res, res);
303 }
304
305 /* Return TRUE if INSN modifies resources that are marked in RES.
306 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
307 included. CC0 is only modified if it is explicitly set; see comments
308 in front of mark_set_resources for details. */
309
310 static int
311 insn_sets_resource_p (rtx insn, struct resources *res,
312 int include_delayed_effects)
313 {
314 struct resources insn_sets;
315
316 CLEAR_RESOURCE (&insn_sets);
317 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
318 return resource_conflicts_p (&insn_sets, res);
319 }
320 \f
321 /* Find a label at the end of the function or before a RETURN. If there
322 is none, try to make one. If that fails, returns 0.
323
324 The property of such a label is that it is placed just before the
325 epilogue or a bare RETURN insn, so that another bare RETURN can be
326 turned into a jump to the label unconditionally. In particular, the
327 label cannot be placed before a RETURN insn with a filled delay slot.
328
329 ??? There may be a problem with the current implementation. Suppose
330 we start with a bare RETURN insn and call find_end_label. It may set
331 end_of_function_label just before the RETURN. Suppose the machinery
332 is able to fill the delay slot of the RETURN insn afterwards. Then
333 end_of_function_label is no longer valid according to the property
334 described above and find_end_label will still return it unmodified.
335 Note that this is probably mitigated by the following observation:
336 once end_of_function_label is made, it is very likely the target of
337 a jump, so filling the delay slot of the RETURN will be much more
338 difficult. */
339
340 static rtx
341 find_end_label (void)
342 {
343 rtx insn;
344
345 /* If we found one previously, return it. */
346 if (end_of_function_label)
347 return end_of_function_label;
348
349 /* Otherwise, see if there is a label at the end of the function. If there
350 is, it must be that RETURN insns aren't needed, so that is our return
351 label and we don't have to do anything else. */
352
353 insn = get_last_insn ();
354 while (NOTE_P (insn)
355 || (NONJUMP_INSN_P (insn)
356 && (GET_CODE (PATTERN (insn)) == USE
357 || GET_CODE (PATTERN (insn)) == CLOBBER)))
358 insn = PREV_INSN (insn);
359
360 /* When a target threads its epilogue we might already have a
361 suitable return insn. If so put a label before it for the
362 end_of_function_label. */
363 if (BARRIER_P (insn)
364 && JUMP_P (PREV_INSN (insn))
365 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
366 {
367 rtx temp = PREV_INSN (PREV_INSN (insn));
368 end_of_function_label = gen_label_rtx ();
369 LABEL_NUSES (end_of_function_label) = 0;
370
371 /* Put the label before an USE insns that may precede the RETURN insn. */
372 while (GET_CODE (temp) == USE)
373 temp = PREV_INSN (temp);
374
375 emit_label_after (end_of_function_label, temp);
376 }
377
378 else if (LABEL_P (insn))
379 end_of_function_label = insn;
380 else
381 {
382 end_of_function_label = gen_label_rtx ();
383 LABEL_NUSES (end_of_function_label) = 0;
384 /* If the basic block reorder pass moves the return insn to
385 some other place try to locate it again and put our
386 end_of_function_label there. */
387 while (insn && ! (GET_CODE (insn) == JUMP_INSN
388 && (GET_CODE (PATTERN (insn)) == RETURN)))
389 insn = PREV_INSN (insn);
390 if (insn)
391 {
392 insn = PREV_INSN (insn);
393
394 /* Put the label before an USE insns that may proceed the
395 RETURN insn. */
396 while (GET_CODE (insn) == USE)
397 insn = PREV_INSN (insn);
398
399 emit_label_after (end_of_function_label, insn);
400 }
401 else
402 {
403 #ifdef HAVE_epilogue
404 if (HAVE_epilogue
405 #ifdef HAVE_return
406 && ! HAVE_return
407 #endif
408 )
409 {
410 /* The RETURN insn has its delay slot filled so we cannot
411 emit the label just before it. Since we already have
412 an epilogue and cannot emit a new RETURN, we cannot
413 emit the label at all. */
414 end_of_function_label = NULL_RTX;
415 return end_of_function_label;
416 }
417 #endif /* HAVE_epilogue */
418
419 /* Otherwise, make a new label and emit a RETURN and BARRIER,
420 if needed. */
421 emit_label (end_of_function_label);
422 #ifdef HAVE_return
423 if (HAVE_return)
424 {
425 /* The return we make may have delay slots too. */
426 rtx insn = gen_return ();
427 insn = emit_jump_insn (insn);
428 emit_barrier ();
429 if (num_delay_slots (insn) > 0)
430 obstack_ptr_grow (&unfilled_slots_obstack, insn);
431 }
432 #endif
433 }
434 }
435
436 /* Show one additional use for this label so it won't go away until
437 we are done. */
438 ++LABEL_NUSES (end_of_function_label);
439
440 return end_of_function_label;
441 }
442 \f
443 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
444 the pattern of INSN with the SEQUENCE.
445
446 Chain the insns so that NEXT_INSN of each insn in the sequence points to
447 the next and NEXT_INSN of the last insn in the sequence points to
448 the first insn after the sequence. Similarly for PREV_INSN. This makes
449 it easier to scan all insns.
450
451 Returns the SEQUENCE that replaces INSN. */
452
453 static rtx
454 emit_delay_sequence (rtx insn, rtx list, int length)
455 {
456 int i = 1;
457 rtx li;
458 int had_barrier = 0;
459
460 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
461 rtvec seqv = rtvec_alloc (length + 1);
462 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
463 rtx seq_insn = make_insn_raw (seq);
464 rtx first = get_insns ();
465 rtx last = get_last_insn ();
466
467 /* Make a copy of the insn having delay slots. */
468 rtx delay_insn = copy_rtx (insn);
469
470 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
471 confuse further processing. Update LAST in case it was the last insn.
472 We will put the BARRIER back in later. */
473 if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
474 {
475 delete_related_insns (NEXT_INSN (insn));
476 last = get_last_insn ();
477 had_barrier = 1;
478 }
479
480 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
481 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
482 PREV_INSN (seq_insn) = PREV_INSN (insn);
483
484 if (insn != last)
485 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
486
487 if (insn != first)
488 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
489
490 /* Note the calls to set_new_first_and_last_insn must occur after
491 SEQ_INSN has been completely spliced into the insn stream.
492
493 Otherwise CUR_INSN_UID will get set to an incorrect value because
494 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
495 if (insn == last)
496 set_new_first_and_last_insn (first, seq_insn);
497
498 if (insn == first)
499 set_new_first_and_last_insn (seq_insn, last);
500
501 /* Build our SEQUENCE and rebuild the insn chain. */
502 XVECEXP (seq, 0, 0) = delay_insn;
503 INSN_DELETED_P (delay_insn) = 0;
504 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
505
506 for (li = list; li; li = XEXP (li, 1), i++)
507 {
508 rtx tem = XEXP (li, 0);
509 rtx note, next;
510
511 /* Show that this copy of the insn isn't deleted. */
512 INSN_DELETED_P (tem) = 0;
513
514 XVECEXP (seq, 0, i) = tem;
515 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
516 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
517
518 /* SPARC assembler, for instance, emit warning when debug info is output
519 into the delay slot. */
520 if (INSN_LOCATOR (tem) && !INSN_LOCATOR (seq_insn))
521 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (tem);
522 INSN_LOCATOR (tem) = 0;
523
524 for (note = REG_NOTES (tem); note; note = next)
525 {
526 next = XEXP (note, 1);
527 switch (REG_NOTE_KIND (note))
528 {
529 case REG_DEAD:
530 /* Remove any REG_DEAD notes because we can't rely on them now
531 that the insn has been moved. */
532 remove_note (tem, note);
533 break;
534
535 case REG_LABEL:
536 /* Keep the label reference count up to date. */
537 if (LABEL_P (XEXP (note, 0)))
538 LABEL_NUSES (XEXP (note, 0)) ++;
539 break;
540
541 default:
542 break;
543 }
544 }
545 }
546
547 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
548
549 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
550 last insn in that SEQUENCE to point to us. Similarly for the first
551 insn in the following insn if it is a SEQUENCE. */
552
553 if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
554 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
555 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
556 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
557 = seq_insn;
558
559 if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
560 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
561 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
562
563 /* If there used to be a BARRIER, put it back. */
564 if (had_barrier)
565 emit_barrier_after (seq_insn);
566
567 if (i != length + 1)
568 abort ();
569
570 return seq_insn;
571 }
572
573 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
574 be in the order in which the insns are to be executed. */
575
576 static rtx
577 add_to_delay_list (rtx insn, rtx delay_list)
578 {
579 /* If we have an empty list, just make a new list element. If
580 INSN has its block number recorded, clear it since we may
581 be moving the insn to a new block. */
582
583 if (delay_list == 0)
584 {
585 clear_hashed_info_for_insn (insn);
586 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
587 }
588
589 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
590 list. */
591 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
592
593 return delay_list;
594 }
595 \f
596 /* Delete INSN from the delay slot of the insn that it is in, which may
597 produce an insn with no delay slots. Return the new insn. */
598
599 static rtx
600 delete_from_delay_slot (rtx insn)
601 {
602 rtx trial, seq_insn, seq, prev;
603 rtx delay_list = 0;
604 int i;
605 int had_barrier = 0;
606
607 /* We first must find the insn containing the SEQUENCE with INSN in its
608 delay slot. Do this by finding an insn, TRIAL, where
609 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
610
611 for (trial = insn;
612 PREV_INSN (NEXT_INSN (trial)) == trial;
613 trial = NEXT_INSN (trial))
614 ;
615
616 seq_insn = PREV_INSN (NEXT_INSN (trial));
617 seq = PATTERN (seq_insn);
618
619 if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
620 had_barrier = 1;
621
622 /* Create a delay list consisting of all the insns other than the one
623 we are deleting (unless we were the only one). */
624 if (XVECLEN (seq, 0) > 2)
625 for (i = 1; i < XVECLEN (seq, 0); i++)
626 if (XVECEXP (seq, 0, i) != insn)
627 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
628
629 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
630 list, and rebuild the delay list if non-empty. */
631 prev = PREV_INSN (seq_insn);
632 trial = XVECEXP (seq, 0, 0);
633 delete_related_insns (seq_insn);
634 add_insn_after (trial, prev);
635
636 /* If there was a barrier after the old SEQUENCE, remit it. */
637 if (had_barrier)
638 emit_barrier_after (trial);
639
640 /* If there are any delay insns, remit them. Otherwise clear the
641 annul flag. */
642 if (delay_list)
643 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
644 else if (INSN_P (trial))
645 INSN_ANNULLED_BRANCH_P (trial) = 0;
646
647 INSN_FROM_TARGET_P (insn) = 0;
648
649 /* Show we need to fill this insn again. */
650 obstack_ptr_grow (&unfilled_slots_obstack, trial);
651
652 return trial;
653 }
654 \f
655 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
656 the insn that sets CC0 for it and delete it too. */
657
658 static void
659 delete_scheduled_jump (rtx insn)
660 {
661 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
662 delete the insn that sets the condition code, but it is hard to find it.
663 Since this case is rare anyway, don't bother trying; there would likely
664 be other insns that became dead anyway, which we wouldn't know to
665 delete. */
666
667 #ifdef HAVE_cc0
668 if (reg_mentioned_p (cc0_rtx, insn))
669 {
670 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
671
672 /* If a reg-note was found, it points to an insn to set CC0. This
673 insn is in the delay list of some other insn. So delete it from
674 the delay list it was in. */
675 if (note)
676 {
677 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
678 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
679 delete_from_delay_slot (XEXP (note, 0));
680 }
681 else
682 {
683 /* The insn setting CC0 is our previous insn, but it may be in
684 a delay slot. It will be the last insn in the delay slot, if
685 it is. */
686 rtx trial = previous_insn (insn);
687 if (NOTE_P (trial))
688 trial = prev_nonnote_insn (trial);
689 if (sets_cc0_p (PATTERN (trial)) != 1
690 || FIND_REG_INC_NOTE (trial, NULL_RTX))
691 return;
692 if (PREV_INSN (NEXT_INSN (trial)) == trial)
693 delete_related_insns (trial);
694 else
695 delete_from_delay_slot (trial);
696 }
697 }
698 #endif
699
700 delete_related_insns (insn);
701 }
702 \f
703 /* Counters for delay-slot filling. */
704
705 #define NUM_REORG_FUNCTIONS 2
706 #define MAX_DELAY_HISTOGRAM 3
707 #define MAX_REORG_PASSES 2
708
709 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
710
711 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
712
713 static int reorg_pass_number;
714
715 static void
716 note_delay_statistics (int slots_filled, int index)
717 {
718 num_insns_needing_delays[index][reorg_pass_number]++;
719 if (slots_filled > MAX_DELAY_HISTOGRAM)
720 slots_filled = MAX_DELAY_HISTOGRAM;
721 num_filled_delays[index][slots_filled][reorg_pass_number]++;
722 }
723 \f
724 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
725
726 /* Optimize the following cases:
727
728 1. When a conditional branch skips over only one instruction,
729 use an annulling branch and put that insn in the delay slot.
730 Use either a branch that annuls when the condition if true or
731 invert the test with a branch that annuls when the condition is
732 false. This saves insns, since otherwise we must copy an insn
733 from the L1 target.
734
735 (orig) (skip) (otherwise)
736 Bcc.n L1 Bcc',a L1 Bcc,a L1'
737 insn insn insn2
738 L1: L1: L1:
739 insn2 insn2 insn2
740 insn3 insn3 L1':
741 insn3
742
743 2. When a conditional branch skips over only one instruction,
744 and after that, it unconditionally branches somewhere else,
745 perform the similar optimization. This saves executing the
746 second branch in the case where the inverted condition is true.
747
748 Bcc.n L1 Bcc',a L2
749 insn insn
750 L1: L1:
751 Bra L2 Bra L2
752
753 INSN is a JUMP_INSN.
754
755 This should be expanded to skip over N insns, where N is the number
756 of delay slots required. */
757
758 static rtx
759 optimize_skip (rtx insn)
760 {
761 rtx trial = next_nonnote_insn (insn);
762 rtx next_trial = next_active_insn (trial);
763 rtx delay_list = 0;
764 int flags;
765
766 flags = get_jump_flags (insn, JUMP_LABEL (insn));
767
768 if (trial == 0
769 || !NONJUMP_INSN_P (trial)
770 || GET_CODE (PATTERN (trial)) == SEQUENCE
771 || recog_memoized (trial) < 0
772 || (! eligible_for_annul_false (insn, 0, trial, flags)
773 && ! eligible_for_annul_true (insn, 0, trial, flags))
774 || can_throw_internal (trial))
775 return 0;
776
777 /* There are two cases where we are just executing one insn (we assume
778 here that a branch requires only one insn; this should be generalized
779 at some point): Where the branch goes around a single insn or where
780 we have one insn followed by a branch to the same label we branch to.
781 In both of these cases, inverting the jump and annulling the delay
782 slot give the same effect in fewer insns. */
783 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
784 && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
785 || (next_trial != 0
786 && JUMP_P (next_trial)
787 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
788 && (simplejump_p (next_trial)
789 || GET_CODE (PATTERN (next_trial)) == RETURN)))
790 {
791 if (eligible_for_annul_false (insn, 0, trial, flags))
792 {
793 if (invert_jump (insn, JUMP_LABEL (insn), 1))
794 INSN_FROM_TARGET_P (trial) = 1;
795 else if (! eligible_for_annul_true (insn, 0, trial, flags))
796 return 0;
797 }
798
799 delay_list = add_to_delay_list (trial, NULL_RTX);
800 next_trial = next_active_insn (trial);
801 update_block (trial, trial);
802 delete_related_insns (trial);
803
804 /* Also, if we are targeting an unconditional
805 branch, thread our jump to the target of that branch. Don't
806 change this into a RETURN here, because it may not accept what
807 we have in the delay slot. We'll fix this up later. */
808 if (next_trial && JUMP_P (next_trial)
809 && (simplejump_p (next_trial)
810 || GET_CODE (PATTERN (next_trial)) == RETURN))
811 {
812 rtx target_label = JUMP_LABEL (next_trial);
813 if (target_label == 0)
814 target_label = find_end_label ();
815
816 if (target_label)
817 {
818 /* Recompute the flags based on TARGET_LABEL since threading
819 the jump to TARGET_LABEL may change the direction of the
820 jump (which may change the circumstances in which the
821 delay slot is nullified). */
822 flags = get_jump_flags (insn, target_label);
823 if (eligible_for_annul_true (insn, 0, trial, flags))
824 reorg_redirect_jump (insn, target_label);
825 }
826 }
827
828 INSN_ANNULLED_BRANCH_P (insn) = 1;
829 }
830
831 return delay_list;
832 }
833 #endif
834 \f
835 /* Encode and return branch direction and prediction information for
836 INSN assuming it will jump to LABEL.
837
838 Non conditional branches return no direction information and
839 are predicted as very likely taken. */
840
841 static int
842 get_jump_flags (rtx insn, rtx label)
843 {
844 int flags;
845
846 /* get_jump_flags can be passed any insn with delay slots, these may
847 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
848 direction information, and only if they are conditional jumps.
849
850 If LABEL is zero, then there is no way to determine the branch
851 direction. */
852 if (JUMP_P (insn)
853 && (condjump_p (insn) || condjump_in_parallel_p (insn))
854 && INSN_UID (insn) <= max_uid
855 && label != 0
856 && INSN_UID (label) <= max_uid)
857 flags
858 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
859 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
860 /* No valid direction information. */
861 else
862 flags = 0;
863
864 /* If insn is a conditional branch call mostly_true_jump to get
865 determine the branch prediction.
866
867 Non conditional branches are predicted as very likely taken. */
868 if (JUMP_P (insn)
869 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
870 {
871 int prediction;
872
873 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
874 switch (prediction)
875 {
876 case 2:
877 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
878 break;
879 case 1:
880 flags |= ATTR_FLAG_likely;
881 break;
882 case 0:
883 flags |= ATTR_FLAG_unlikely;
884 break;
885 case -1:
886 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
887 break;
888
889 default:
890 abort ();
891 }
892 }
893 else
894 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
895
896 return flags;
897 }
898
899 /* Return 1 if INSN is a destination that will be branched to rarely (the
900 return point of a function); return 2 if DEST will be branched to very
901 rarely (a call to a function that doesn't return). Otherwise,
902 return 0. */
903
904 static int
905 rare_destination (rtx insn)
906 {
907 int jump_count = 0;
908 rtx next;
909
910 for (; insn; insn = next)
911 {
912 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
913 insn = XVECEXP (PATTERN (insn), 0, 0);
914
915 next = NEXT_INSN (insn);
916
917 switch (GET_CODE (insn))
918 {
919 case CODE_LABEL:
920 return 0;
921 case BARRIER:
922 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
923 don't scan past JUMP_INSNs, so any barrier we find here must
924 have been after a CALL_INSN and hence mean the call doesn't
925 return. */
926 return 2;
927 case JUMP_INSN:
928 if (GET_CODE (PATTERN (insn)) == RETURN)
929 return 1;
930 else if (simplejump_p (insn)
931 && jump_count++ < 10)
932 next = JUMP_LABEL (insn);
933 else
934 return 0;
935
936 default:
937 break;
938 }
939 }
940
941 /* If we got here it means we hit the end of the function. So this
942 is an unlikely destination. */
943
944 return 1;
945 }
946
947 /* Return truth value of the statement that this branch
948 is mostly taken. If we think that the branch is extremely likely
949 to be taken, we return 2. If the branch is slightly more likely to be
950 taken, return 1. If the branch is slightly less likely to be taken,
951 return 0 and if the branch is highly unlikely to be taken, return -1.
952
953 CONDITION, if nonzero, is the condition that JUMP_INSN is testing. */
954
955 static int
956 mostly_true_jump (rtx jump_insn, rtx condition)
957 {
958 rtx target_label = JUMP_LABEL (jump_insn);
959 rtx insn, note;
960 int rare_dest = rare_destination (target_label);
961 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
962
963 /* If branch probabilities are available, then use that number since it
964 always gives a correct answer. */
965 note = find_reg_note (jump_insn, REG_BR_PROB, 0);
966 if (note)
967 {
968 int prob = INTVAL (XEXP (note, 0));
969
970 if (prob >= REG_BR_PROB_BASE * 9 / 10)
971 return 2;
972 else if (prob >= REG_BR_PROB_BASE / 2)
973 return 1;
974 else if (prob >= REG_BR_PROB_BASE / 10)
975 return 0;
976 else
977 return -1;
978 }
979
980 /* ??? Ought to use estimate_probability instead. */
981
982 /* If this is a branch outside a loop, it is highly unlikely. */
983 if (GET_CODE (PATTERN (jump_insn)) == SET
984 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
985 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
986 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
987 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
988 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
989 return -1;
990
991 if (target_label)
992 {
993 /* If this is the test of a loop, it is very likely true. We scan
994 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
995 before the next real insn, we assume the branch is to the top of
996 the loop. */
997 for (insn = PREV_INSN (target_label);
998 insn && NOTE_P (insn);
999 insn = PREV_INSN (insn))
1000 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1001 return 2;
1002
1003 /* If this is a jump to the test of a loop, it is likely true. We scan
1004 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1005 before the next real insn, we assume the branch is to the loop branch
1006 test. */
1007 for (insn = NEXT_INSN (target_label);
1008 insn && NOTE_P (insn);
1009 insn = PREV_INSN (insn))
1010 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1011 return 1;
1012 }
1013
1014 /* Look at the relative rarities of the fallthrough and destination. If
1015 they differ, we can predict the branch that way. */
1016
1017 switch (rare_fallthrough - rare_dest)
1018 {
1019 case -2:
1020 return -1;
1021 case -1:
1022 return 0;
1023 case 0:
1024 break;
1025 case 1:
1026 return 1;
1027 case 2:
1028 return 2;
1029 }
1030
1031 /* If we couldn't figure out what this jump was, assume it won't be
1032 taken. This should be rare. */
1033 if (condition == 0)
1034 return 0;
1035
1036 /* EQ tests are usually false and NE tests are usually true. Also,
1037 most quantities are positive, so we can make the appropriate guesses
1038 about signed comparisons against zero. */
1039 switch (GET_CODE (condition))
1040 {
1041 case CONST_INT:
1042 /* Unconditional branch. */
1043 return 1;
1044 case EQ:
1045 return 0;
1046 case NE:
1047 return 1;
1048 case LE:
1049 case LT:
1050 if (XEXP (condition, 1) == const0_rtx)
1051 return 0;
1052 break;
1053 case GE:
1054 case GT:
1055 if (XEXP (condition, 1) == const0_rtx)
1056 return 1;
1057 break;
1058
1059 default:
1060 break;
1061 }
1062
1063 /* Predict backward branches usually take, forward branches usually not. If
1064 we don't know whether this is forward or backward, assume the branch
1065 will be taken, since most are. */
1066 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1067 || INSN_UID (target_label) > max_uid
1068 || (uid_to_ruid[INSN_UID (jump_insn)]
1069 > uid_to_ruid[INSN_UID (target_label)]));
1070 }
1071
1072 /* Return the condition under which INSN will branch to TARGET. If TARGET
1073 is zero, return the condition under which INSN will return. If INSN is
1074 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1075 type of jump, or it doesn't go to TARGET, return 0. */
1076
1077 static rtx
1078 get_branch_condition (rtx insn, rtx target)
1079 {
1080 rtx pat = PATTERN (insn);
1081 rtx src;
1082
1083 if (condjump_in_parallel_p (insn))
1084 pat = XVECEXP (pat, 0, 0);
1085
1086 if (GET_CODE (pat) == RETURN)
1087 return target == 0 ? const_true_rtx : 0;
1088
1089 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1090 return 0;
1091
1092 src = SET_SRC (pat);
1093 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1094 return const_true_rtx;
1095
1096 else if (GET_CODE (src) == IF_THEN_ELSE
1097 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1098 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1099 && XEXP (XEXP (src, 1), 0) == target))
1100 && XEXP (src, 2) == pc_rtx)
1101 return XEXP (src, 0);
1102
1103 else if (GET_CODE (src) == IF_THEN_ELSE
1104 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1105 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1106 && XEXP (XEXP (src, 2), 0) == target))
1107 && XEXP (src, 1) == pc_rtx)
1108 {
1109 enum rtx_code rev;
1110 rev = reversed_comparison_code (XEXP (src, 0), insn);
1111 if (rev != UNKNOWN)
1112 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
1113 XEXP (XEXP (src, 0), 0),
1114 XEXP (XEXP (src, 0), 1));
1115 }
1116
1117 return 0;
1118 }
1119
1120 /* Return nonzero if CONDITION is more strict than the condition of
1121 INSN, i.e., if INSN will always branch if CONDITION is true. */
1122
1123 static int
1124 condition_dominates_p (rtx condition, rtx insn)
1125 {
1126 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1127 enum rtx_code code = GET_CODE (condition);
1128 enum rtx_code other_code;
1129
1130 if (rtx_equal_p (condition, other_condition)
1131 || other_condition == const_true_rtx)
1132 return 1;
1133
1134 else if (condition == const_true_rtx || other_condition == 0)
1135 return 0;
1136
1137 other_code = GET_CODE (other_condition);
1138 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1139 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1140 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1141 return 0;
1142
1143 return comparison_dominates_p (code, other_code);
1144 }
1145
1146 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1147 any insns already in the delay slot of JUMP. */
1148
1149 static int
1150 redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
1151 {
1152 int flags, i;
1153 rtx pat = PATTERN (seq);
1154
1155 /* Make sure all the delay slots of this jump would still
1156 be valid after threading the jump. If they are still
1157 valid, then return nonzero. */
1158
1159 flags = get_jump_flags (jump, newlabel);
1160 for (i = 1; i < XVECLEN (pat, 0); i++)
1161 if (! (
1162 #ifdef ANNUL_IFFALSE_SLOTS
1163 (INSN_ANNULLED_BRANCH_P (jump)
1164 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1165 ? eligible_for_annul_false (jump, i - 1,
1166 XVECEXP (pat, 0, i), flags) :
1167 #endif
1168 #ifdef ANNUL_IFTRUE_SLOTS
1169 (INSN_ANNULLED_BRANCH_P (jump)
1170 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1171 ? eligible_for_annul_true (jump, i - 1,
1172 XVECEXP (pat, 0, i), flags) :
1173 #endif
1174 eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1175 break;
1176
1177 return (i == XVECLEN (pat, 0));
1178 }
1179
1180 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1181 any insns we wish to place in the delay slot of JUMP. */
1182
1183 static int
1184 redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
1185 {
1186 int flags, i;
1187 rtx li;
1188
1189 /* Make sure all the insns in DELAY_LIST would still be
1190 valid after threading the jump. If they are still
1191 valid, then return nonzero. */
1192
1193 flags = get_jump_flags (jump, newlabel);
1194 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1195 if (! (
1196 #ifdef ANNUL_IFFALSE_SLOTS
1197 (INSN_ANNULLED_BRANCH_P (jump)
1198 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1199 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1200 #endif
1201 #ifdef ANNUL_IFTRUE_SLOTS
1202 (INSN_ANNULLED_BRANCH_P (jump)
1203 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1204 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1205 #endif
1206 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1207 break;
1208
1209 return (li == NULL);
1210 }
1211
1212 /* DELAY_LIST is a list of insns that have already been placed into delay
1213 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1214 If not, return 0; otherwise return 1. */
1215
1216 static int
1217 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1218 {
1219 rtx temp;
1220
1221 if (delay_list)
1222 {
1223 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1224 {
1225 rtx trial = XEXP (temp, 0);
1226
1227 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1228 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1229 return 0;
1230 }
1231 }
1232
1233 return 1;
1234 }
1235 \f
1236 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1237 the condition tested by INSN is CONDITION and the resources shown in
1238 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1239 from SEQ's delay list, in addition to whatever insns it may execute
1240 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1241 needed while searching for delay slot insns. Return the concatenated
1242 delay list if possible, otherwise, return 0.
1243
1244 SLOTS_TO_FILL is the total number of slots required by INSN, and
1245 PSLOTS_FILLED points to the number filled so far (also the number of
1246 insns in DELAY_LIST). It is updated with the number that have been
1247 filled from the SEQUENCE, if any.
1248
1249 PANNUL_P points to a nonzero value if we already know that we need
1250 to annul INSN. If this routine determines that annulling is needed,
1251 it may set that value nonzero.
1252
1253 PNEW_THREAD points to a location that is to receive the place at which
1254 execution should continue. */
1255
1256 static rtx
1257 steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
1258 rtx delay_list, struct resources *sets,
1259 struct resources *needed,
1260 struct resources *other_needed,
1261 int slots_to_fill, int *pslots_filled,
1262 int *pannul_p, rtx *pnew_thread)
1263 {
1264 rtx temp;
1265 int slots_remaining = slots_to_fill - *pslots_filled;
1266 int total_slots_filled = *pslots_filled;
1267 rtx new_delay_list = 0;
1268 int must_annul = *pannul_p;
1269 int used_annul = 0;
1270 int i;
1271 struct resources cc_set;
1272
1273 /* We can't do anything if there are more delay slots in SEQ than we
1274 can handle, or if we don't know that it will be a taken branch.
1275 We know that it will be a taken branch if it is either an unconditional
1276 branch or a conditional branch with a stricter branch condition.
1277
1278 Also, exit if the branch has more than one set, since then it is computing
1279 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1280 ??? It may be possible to move other sets into INSN in addition to
1281 moving the instructions in the delay slots.
1282
1283 We can not steal the delay list if one of the instructions in the
1284 current delay_list modifies the condition codes and the jump in the
1285 sequence is a conditional jump. We can not do this because we can
1286 not change the direction of the jump because the condition codes
1287 will effect the direction of the jump in the sequence. */
1288
1289 CLEAR_RESOURCE (&cc_set);
1290 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1291 {
1292 rtx trial = XEXP (temp, 0);
1293
1294 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1295 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1296 return delay_list;
1297 }
1298
1299 if (XVECLEN (seq, 0) - 1 > slots_remaining
1300 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1301 || ! single_set (XVECEXP (seq, 0, 0)))
1302 return delay_list;
1303
1304 #ifdef MD_CAN_REDIRECT_BRANCH
1305 /* On some targets, branches with delay slots can have a limited
1306 displacement. Give the back end a chance to tell us we can't do
1307 this. */
1308 if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1309 return delay_list;
1310 #endif
1311
1312 for (i = 1; i < XVECLEN (seq, 0); i++)
1313 {
1314 rtx trial = XVECEXP (seq, 0, i);
1315 int flags;
1316
1317 if (insn_references_resource_p (trial, sets, 0)
1318 || insn_sets_resource_p (trial, needed, 0)
1319 || insn_sets_resource_p (trial, sets, 0)
1320 #ifdef HAVE_cc0
1321 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1322 delay list. */
1323 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1324 #endif
1325 /* If TRIAL is from the fallthrough code of an annulled branch insn
1326 in SEQ, we cannot use it. */
1327 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1328 && ! INSN_FROM_TARGET_P (trial)))
1329 return delay_list;
1330
1331 /* If this insn was already done (usually in a previous delay slot),
1332 pretend we put it in our delay slot. */
1333 if (redundant_insn (trial, insn, new_delay_list))
1334 continue;
1335
1336 /* We will end up re-vectoring this branch, so compute flags
1337 based on jumping to the new label. */
1338 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1339
1340 if (! must_annul
1341 && ((condition == const_true_rtx
1342 || (! insn_sets_resource_p (trial, other_needed, 0)
1343 && ! may_trap_p (PATTERN (trial)))))
1344 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1345 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1346 && (must_annul = 1,
1347 check_annul_list_true_false (0, delay_list)
1348 && check_annul_list_true_false (0, new_delay_list)
1349 && eligible_for_annul_false (insn, total_slots_filled,
1350 trial, flags)))
1351 {
1352 if (must_annul)
1353 used_annul = 1;
1354 temp = copy_rtx (trial);
1355 INSN_FROM_TARGET_P (temp) = 1;
1356 new_delay_list = add_to_delay_list (temp, new_delay_list);
1357 total_slots_filled++;
1358
1359 if (--slots_remaining == 0)
1360 break;
1361 }
1362 else
1363 return delay_list;
1364 }
1365
1366 /* Show the place to which we will be branching. */
1367 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1368
1369 /* Add any new insns to the delay list and update the count of the
1370 number of slots filled. */
1371 *pslots_filled = total_slots_filled;
1372 if (used_annul)
1373 *pannul_p = 1;
1374
1375 if (delay_list == 0)
1376 return new_delay_list;
1377
1378 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1379 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1380
1381 return delay_list;
1382 }
1383 \f
1384 /* Similar to steal_delay_list_from_target except that SEQ is on the
1385 fallthrough path of INSN. Here we only do something if the delay insn
1386 of SEQ is an unconditional branch. In that case we steal its delay slot
1387 for INSN since unconditional branches are much easier to fill. */
1388
1389 static rtx
1390 steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq,
1391 rtx delay_list, struct resources *sets,
1392 struct resources *needed,
1393 struct resources *other_needed,
1394 int slots_to_fill, int *pslots_filled,
1395 int *pannul_p)
1396 {
1397 int i;
1398 int flags;
1399 int must_annul = *pannul_p;
1400 int used_annul = 0;
1401
1402 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1403
1404 /* We can't do anything if SEQ's delay insn isn't an
1405 unconditional branch. */
1406
1407 if (! simplejump_p (XVECEXP (seq, 0, 0))
1408 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1409 return delay_list;
1410
1411 for (i = 1; i < XVECLEN (seq, 0); i++)
1412 {
1413 rtx trial = XVECEXP (seq, 0, i);
1414
1415 /* If TRIAL sets CC0, stealing it will move it too far from the use
1416 of CC0. */
1417 if (insn_references_resource_p (trial, sets, 0)
1418 || insn_sets_resource_p (trial, needed, 0)
1419 || insn_sets_resource_p (trial, sets, 0)
1420 #ifdef HAVE_cc0
1421 || sets_cc0_p (PATTERN (trial))
1422 #endif
1423 )
1424
1425 break;
1426
1427 /* If this insn was already done, we don't need it. */
1428 if (redundant_insn (trial, insn, delay_list))
1429 {
1430 delete_from_delay_slot (trial);
1431 continue;
1432 }
1433
1434 if (! must_annul
1435 && ((condition == const_true_rtx
1436 || (! insn_sets_resource_p (trial, other_needed, 0)
1437 && ! may_trap_p (PATTERN (trial)))))
1438 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1439 : (must_annul || delay_list == NULL) && (must_annul = 1,
1440 check_annul_list_true_false (1, delay_list)
1441 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1442 {
1443 if (must_annul)
1444 used_annul = 1;
1445 delete_from_delay_slot (trial);
1446 delay_list = add_to_delay_list (trial, delay_list);
1447
1448 if (++(*pslots_filled) == slots_to_fill)
1449 break;
1450 }
1451 else
1452 break;
1453 }
1454
1455 if (used_annul)
1456 *pannul_p = 1;
1457 return delay_list;
1458 }
1459 \f
1460 /* Try merging insns starting at THREAD which match exactly the insns in
1461 INSN's delay list.
1462
1463 If all insns were matched and the insn was previously annulling, the
1464 annul bit will be cleared.
1465
1466 For each insn that is merged, if the branch is or will be non-annulling,
1467 we delete the merged insn. */
1468
1469 static void
1470 try_merge_delay_insns (rtx insn, rtx thread)
1471 {
1472 rtx trial, next_trial;
1473 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1474 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1475 int slot_number = 1;
1476 int num_slots = XVECLEN (PATTERN (insn), 0);
1477 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1478 struct resources set, needed;
1479 rtx merged_insns = 0;
1480 int i;
1481 int flags;
1482
1483 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1484
1485 CLEAR_RESOURCE (&needed);
1486 CLEAR_RESOURCE (&set);
1487
1488 /* If this is not an annulling branch, take into account anything needed in
1489 INSN's delay slot. This prevents two increments from being incorrectly
1490 folded into one. If we are annulling, this would be the correct
1491 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1492 will essentially disable this optimization. This method is somewhat of
1493 a kludge, but I don't see a better way.) */
1494 if (! annul_p)
1495 for (i = 1 ; i < num_slots; i++)
1496 if (XVECEXP (PATTERN (insn), 0, i))
1497 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1498
1499 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1500 {
1501 rtx pat = PATTERN (trial);
1502 rtx oldtrial = trial;
1503
1504 next_trial = next_nonnote_insn (trial);
1505
1506 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1507 if (NONJUMP_INSN_P (trial)
1508 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1509 continue;
1510
1511 if (GET_CODE (next_to_match) == GET_CODE (trial)
1512 #ifdef HAVE_cc0
1513 /* We can't share an insn that sets cc0. */
1514 && ! sets_cc0_p (pat)
1515 #endif
1516 && ! insn_references_resource_p (trial, &set, 1)
1517 && ! insn_sets_resource_p (trial, &set, 1)
1518 && ! insn_sets_resource_p (trial, &needed, 1)
1519 && (trial = try_split (pat, trial, 0)) != 0
1520 /* Update next_trial, in case try_split succeeded. */
1521 && (next_trial = next_nonnote_insn (trial))
1522 /* Likewise THREAD. */
1523 && (thread = oldtrial == thread ? trial : thread)
1524 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1525 /* Have to test this condition if annul condition is different
1526 from (and less restrictive than) non-annulling one. */
1527 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1528 {
1529
1530 if (! annul_p)
1531 {
1532 update_block (trial, thread);
1533 if (trial == thread)
1534 thread = next_active_insn (thread);
1535
1536 delete_related_insns (trial);
1537 INSN_FROM_TARGET_P (next_to_match) = 0;
1538 }
1539 else
1540 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1541
1542 if (++slot_number == num_slots)
1543 break;
1544
1545 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1546 }
1547
1548 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1549 mark_referenced_resources (trial, &needed, 1);
1550 }
1551
1552 /* See if we stopped on a filled insn. If we did, try to see if its
1553 delay slots match. */
1554 if (slot_number != num_slots
1555 && trial && NONJUMP_INSN_P (trial)
1556 && GET_CODE (PATTERN (trial)) == SEQUENCE
1557 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1558 {
1559 rtx pat = PATTERN (trial);
1560 rtx filled_insn = XVECEXP (pat, 0, 0);
1561
1562 /* Account for resources set/needed by the filled insn. */
1563 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1564 mark_referenced_resources (filled_insn, &needed, 1);
1565
1566 for (i = 1; i < XVECLEN (pat, 0); i++)
1567 {
1568 rtx dtrial = XVECEXP (pat, 0, i);
1569
1570 if (! insn_references_resource_p (dtrial, &set, 1)
1571 && ! insn_sets_resource_p (dtrial, &set, 1)
1572 && ! insn_sets_resource_p (dtrial, &needed, 1)
1573 #ifdef HAVE_cc0
1574 && ! sets_cc0_p (PATTERN (dtrial))
1575 #endif
1576 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1577 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1578 {
1579 if (! annul_p)
1580 {
1581 rtx new;
1582
1583 update_block (dtrial, thread);
1584 new = delete_from_delay_slot (dtrial);
1585 if (INSN_DELETED_P (thread))
1586 thread = new;
1587 INSN_FROM_TARGET_P (next_to_match) = 0;
1588 }
1589 else
1590 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1591 merged_insns);
1592
1593 if (++slot_number == num_slots)
1594 break;
1595
1596 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1597 }
1598 else
1599 {
1600 /* Keep track of the set/referenced resources for the delay
1601 slots of any trial insns we encounter. */
1602 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1603 mark_referenced_resources (dtrial, &needed, 1);
1604 }
1605 }
1606 }
1607
1608 /* If all insns in the delay slot have been matched and we were previously
1609 annulling the branch, we need not any more. In that case delete all the
1610 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1611 the delay list so that we know that it isn't only being used at the
1612 target. */
1613 if (slot_number == num_slots && annul_p)
1614 {
1615 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1616 {
1617 if (GET_MODE (merged_insns) == SImode)
1618 {
1619 rtx new;
1620
1621 update_block (XEXP (merged_insns, 0), thread);
1622 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1623 if (INSN_DELETED_P (thread))
1624 thread = new;
1625 }
1626 else
1627 {
1628 update_block (XEXP (merged_insns, 0), thread);
1629 delete_related_insns (XEXP (merged_insns, 0));
1630 }
1631 }
1632
1633 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1634
1635 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1636 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1637 }
1638 }
1639 \f
1640 /* See if INSN is redundant with an insn in front of TARGET. Often this
1641 is called when INSN is a candidate for a delay slot of TARGET.
1642 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1643 of INSN. Often INSN will be redundant with an insn in a delay slot of
1644 some previous insn. This happens when we have a series of branches to the
1645 same label; in that case the first insn at the target might want to go
1646 into each of the delay slots.
1647
1648 If we are not careful, this routine can take up a significant fraction
1649 of the total compilation time (4%), but only wins rarely. Hence we
1650 speed this routine up by making two passes. The first pass goes back
1651 until it hits a label and sees if it finds an insn with an identical
1652 pattern. Only in this (relatively rare) event does it check for
1653 data conflicts.
1654
1655 We do not split insns we encounter. This could cause us not to find a
1656 redundant insn, but the cost of splitting seems greater than the possible
1657 gain in rare cases. */
1658
1659 static rtx
1660 redundant_insn (rtx insn, rtx target, rtx delay_list)
1661 {
1662 rtx target_main = target;
1663 rtx ipat = PATTERN (insn);
1664 rtx trial, pat;
1665 struct resources needed, set;
1666 int i;
1667 unsigned insns_to_search;
1668
1669 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1670 are allowed to not actually assign to such a register. */
1671 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1672 return 0;
1673
1674 /* Scan backwards looking for a match. */
1675 for (trial = PREV_INSN (target),
1676 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1677 trial && insns_to_search > 0;
1678 trial = PREV_INSN (trial), --insns_to_search)
1679 {
1680 if (LABEL_P (trial))
1681 return 0;
1682
1683 if (! INSN_P (trial))
1684 continue;
1685
1686 pat = PATTERN (trial);
1687 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1688 continue;
1689
1690 if (GET_CODE (pat) == SEQUENCE)
1691 {
1692 /* Stop for a CALL and its delay slots because it is difficult to
1693 track its resource needs correctly. */
1694 if (CALL_P (XVECEXP (pat, 0, 0)))
1695 return 0;
1696
1697 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1698 slots because it is difficult to track its resource needs
1699 correctly. */
1700
1701 #ifdef INSN_SETS_ARE_DELAYED
1702 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1703 return 0;
1704 #endif
1705
1706 #ifdef INSN_REFERENCES_ARE_DELAYED
1707 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1708 return 0;
1709 #endif
1710
1711 /* See if any of the insns in the delay slot match, updating
1712 resource requirements as we go. */
1713 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1714 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1715 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1716 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1717 break;
1718
1719 /* If found a match, exit this loop early. */
1720 if (i > 0)
1721 break;
1722 }
1723
1724 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1725 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1726 break;
1727 }
1728
1729 /* If we didn't find an insn that matches, return 0. */
1730 if (trial == 0)
1731 return 0;
1732
1733 /* See what resources this insn sets and needs. If they overlap, or
1734 if this insn references CC0, it can't be redundant. */
1735
1736 CLEAR_RESOURCE (&needed);
1737 CLEAR_RESOURCE (&set);
1738 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1739 mark_referenced_resources (insn, &needed, 1);
1740
1741 /* If TARGET is a SEQUENCE, get the main insn. */
1742 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1743 target_main = XVECEXP (PATTERN (target), 0, 0);
1744
1745 if (resource_conflicts_p (&needed, &set)
1746 #ifdef HAVE_cc0
1747 || reg_mentioned_p (cc0_rtx, ipat)
1748 #endif
1749 /* The insn requiring the delay may not set anything needed or set by
1750 INSN. */
1751 || insn_sets_resource_p (target_main, &needed, 1)
1752 || insn_sets_resource_p (target_main, &set, 1))
1753 return 0;
1754
1755 /* Insns we pass may not set either NEEDED or SET, so merge them for
1756 simpler tests. */
1757 needed.memory |= set.memory;
1758 needed.unch_memory |= set.unch_memory;
1759 IOR_HARD_REG_SET (needed.regs, set.regs);
1760
1761 /* This insn isn't redundant if it conflicts with an insn that either is
1762 or will be in a delay slot of TARGET. */
1763
1764 while (delay_list)
1765 {
1766 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1767 return 0;
1768 delay_list = XEXP (delay_list, 1);
1769 }
1770
1771 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1772 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1773 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1774 return 0;
1775
1776 /* Scan backwards until we reach a label or an insn that uses something
1777 INSN sets or sets something insn uses or sets. */
1778
1779 for (trial = PREV_INSN (target),
1780 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1781 trial && !LABEL_P (trial) && insns_to_search > 0;
1782 trial = PREV_INSN (trial), --insns_to_search)
1783 {
1784 if (!INSN_P (trial))
1785 continue;
1786
1787 pat = PATTERN (trial);
1788 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1789 continue;
1790
1791 if (GET_CODE (pat) == SEQUENCE)
1792 {
1793 /* If this is a CALL_INSN and its delay slots, it is hard to track
1794 the resource needs properly, so give up. */
1795 if (CALL_P (XVECEXP (pat, 0, 0)))
1796 return 0;
1797
1798 /* If this is an INSN or JUMP_INSN with delayed effects, it
1799 is hard to track the resource needs properly, so give up. */
1800
1801 #ifdef INSN_SETS_ARE_DELAYED
1802 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1803 return 0;
1804 #endif
1805
1806 #ifdef INSN_REFERENCES_ARE_DELAYED
1807 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1808 return 0;
1809 #endif
1810
1811 /* See if any of the insns in the delay slot match, updating
1812 resource requirements as we go. */
1813 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1814 {
1815 rtx candidate = XVECEXP (pat, 0, i);
1816
1817 /* If an insn will be annulled if the branch is false, it isn't
1818 considered as a possible duplicate insn. */
1819 if (rtx_equal_p (PATTERN (candidate), ipat)
1820 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1821 && INSN_FROM_TARGET_P (candidate)))
1822 {
1823 /* Show that this insn will be used in the sequel. */
1824 INSN_FROM_TARGET_P (candidate) = 0;
1825 return candidate;
1826 }
1827
1828 /* Unless this is an annulled insn from the target of a branch,
1829 we must stop if it sets anything needed or set by INSN. */
1830 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1831 || ! INSN_FROM_TARGET_P (candidate))
1832 && insn_sets_resource_p (candidate, &needed, 1))
1833 return 0;
1834 }
1835
1836 /* If the insn requiring the delay slot conflicts with INSN, we
1837 must stop. */
1838 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1839 return 0;
1840 }
1841 else
1842 {
1843 /* See if TRIAL is the same as INSN. */
1844 pat = PATTERN (trial);
1845 if (rtx_equal_p (pat, ipat))
1846 return trial;
1847
1848 /* Can't go any further if TRIAL conflicts with INSN. */
1849 if (insn_sets_resource_p (trial, &needed, 1))
1850 return 0;
1851 }
1852 }
1853
1854 return 0;
1855 }
1856 \f
1857 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1858 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1859 is nonzero, we are allowed to fall into this thread; otherwise, we are
1860 not.
1861
1862 If LABEL is used more than one or we pass a label other than LABEL before
1863 finding an active insn, we do not own this thread. */
1864
1865 static int
1866 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1867 {
1868 rtx active_insn;
1869 rtx insn;
1870
1871 /* We don't own the function end. */
1872 if (thread == 0)
1873 return 0;
1874
1875 /* Get the first active insn, or THREAD, if it is an active insn. */
1876 active_insn = next_active_insn (PREV_INSN (thread));
1877
1878 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1879 if (LABEL_P (insn)
1880 && (insn != label || LABEL_NUSES (insn) != 1))
1881 return 0;
1882
1883 if (allow_fallthrough)
1884 return 1;
1885
1886 /* Ensure that we reach a BARRIER before any insn or label. */
1887 for (insn = prev_nonnote_insn (thread);
1888 insn == 0 || !BARRIER_P (insn);
1889 insn = prev_nonnote_insn (insn))
1890 if (insn == 0
1891 || LABEL_P (insn)
1892 || (NONJUMP_INSN_P (insn)
1893 && GET_CODE (PATTERN (insn)) != USE
1894 && GET_CODE (PATTERN (insn)) != CLOBBER))
1895 return 0;
1896
1897 return 1;
1898 }
1899 \f
1900 /* Called when INSN is being moved from a location near the target of a jump.
1901 We leave a marker of the form (use (INSN)) immediately in front
1902 of WHERE for mark_target_live_regs. These markers will be deleted when
1903 reorg finishes.
1904
1905 We used to try to update the live status of registers if WHERE is at
1906 the start of a basic block, but that can't work since we may remove a
1907 BARRIER in relax_delay_slots. */
1908
1909 static void
1910 update_block (rtx insn, rtx where)
1911 {
1912 /* Ignore if this was in a delay slot and it came from the target of
1913 a branch. */
1914 if (INSN_FROM_TARGET_P (insn))
1915 return;
1916
1917 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1918
1919 /* INSN might be making a value live in a block where it didn't use to
1920 be. So recompute liveness information for this block. */
1921
1922 incr_ticks_for_insn (insn);
1923 }
1924
1925 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1926 the basic block containing the jump. */
1927
1928 static int
1929 reorg_redirect_jump (rtx jump, rtx nlabel)
1930 {
1931 incr_ticks_for_insn (jump);
1932 return redirect_jump (jump, nlabel, 1);
1933 }
1934
1935 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1936 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1937 that reference values used in INSN. If we find one, then we move the
1938 REG_DEAD note to INSN.
1939
1940 This is needed to handle the case where an later insn (after INSN) has a
1941 REG_DEAD note for a register used by INSN, and this later insn subsequently
1942 gets moved before a CODE_LABEL because it is a redundant insn. In this
1943 case, mark_target_live_regs may be confused into thinking the register
1944 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1945
1946 static void
1947 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1948 {
1949 rtx p, link, next;
1950
1951 for (p = next_nonnote_insn (insn); p != delayed_insn;
1952 p = next_nonnote_insn (p))
1953 for (link = REG_NOTES (p); link; link = next)
1954 {
1955 next = XEXP (link, 1);
1956
1957 if (REG_NOTE_KIND (link) != REG_DEAD
1958 || !REG_P (XEXP (link, 0)))
1959 continue;
1960
1961 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1962 {
1963 /* Move the REG_DEAD note from P to INSN. */
1964 remove_note (p, link);
1965 XEXP (link, 1) = REG_NOTES (insn);
1966 REG_NOTES (insn) = link;
1967 }
1968 }
1969 }
1970
1971 /* Called when an insn redundant with start_insn is deleted. If there
1972 is a REG_DEAD note for the target of start_insn between start_insn
1973 and stop_insn, then the REG_DEAD note needs to be deleted since the
1974 value no longer dies there.
1975
1976 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1977 confused into thinking the register is dead. */
1978
1979 static void
1980 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1981 {
1982 rtx p, link, next;
1983
1984 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1985 p = next_nonnote_insn (p))
1986 for (link = REG_NOTES (p); link; link = next)
1987 {
1988 next = XEXP (link, 1);
1989
1990 if (REG_NOTE_KIND (link) != REG_DEAD
1991 || !REG_P (XEXP (link, 0)))
1992 continue;
1993
1994 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1995 {
1996 remove_note (p, link);
1997 return;
1998 }
1999 }
2000 }
2001
2002 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2003
2004 This handles the case of udivmodXi4 instructions which optimize their
2005 output depending on whether any REG_UNUSED notes are present.
2006 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2007 does. */
2008
2009 static void
2010 update_reg_unused_notes (rtx insn, rtx redundant_insn)
2011 {
2012 rtx link, next;
2013
2014 for (link = REG_NOTES (insn); link; link = next)
2015 {
2016 next = XEXP (link, 1);
2017
2018 if (REG_NOTE_KIND (link) != REG_UNUSED
2019 || !REG_P (XEXP (link, 0)))
2020 continue;
2021
2022 if (! find_regno_note (redundant_insn, REG_UNUSED,
2023 REGNO (XEXP (link, 0))))
2024 remove_note (insn, link);
2025 }
2026 }
2027 \f
2028 /* Scan a function looking for insns that need a delay slot and find insns to
2029 put into the delay slot.
2030
2031 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
2032 as calls). We do these first since we don't want jump insns (that are
2033 easier to fill) to get the only insns that could be used for non-jump insns.
2034 When it is zero, only try to fill JUMP_INSNs.
2035
2036 When slots are filled in this manner, the insns (including the
2037 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2038 it is possible to tell whether a delay slot has really been filled
2039 or not. `final' knows how to deal with this, by communicating
2040 through FINAL_SEQUENCE. */
2041
2042 static void
2043 fill_simple_delay_slots (int non_jumps_p)
2044 {
2045 rtx insn, pat, trial, next_trial;
2046 int i;
2047 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2048 struct resources needed, set;
2049 int slots_to_fill, slots_filled;
2050 rtx delay_list;
2051
2052 for (i = 0; i < num_unfilled_slots; i++)
2053 {
2054 int flags;
2055 /* Get the next insn to fill. If it has already had any slots assigned,
2056 we can't do anything with it. Maybe we'll improve this later. */
2057
2058 insn = unfilled_slots_base[i];
2059 if (insn == 0
2060 || INSN_DELETED_P (insn)
2061 || (NONJUMP_INSN_P (insn)
2062 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2063 || (JUMP_P (insn) && non_jumps_p)
2064 || (!JUMP_P (insn) && ! non_jumps_p))
2065 continue;
2066
2067 /* It may have been that this insn used to need delay slots, but
2068 now doesn't; ignore in that case. This can happen, for example,
2069 on the HP PA RISC, where the number of delay slots depends on
2070 what insns are nearby. */
2071 slots_to_fill = num_delay_slots (insn);
2072
2073 /* Some machine description have defined instructions to have
2074 delay slots only in certain circumstances which may depend on
2075 nearby insns (which change due to reorg's actions).
2076
2077 For example, the PA port normally has delay slots for unconditional
2078 jumps.
2079
2080 However, the PA port claims such jumps do not have a delay slot
2081 if they are immediate successors of certain CALL_INSNs. This
2082 allows the port to favor filling the delay slot of the call with
2083 the unconditional jump. */
2084 if (slots_to_fill == 0)
2085 continue;
2086
2087 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2088 says how many. After initialization, first try optimizing
2089
2090 call _foo call _foo
2091 nop add %o7,.-L1,%o7
2092 b,a L1
2093 nop
2094
2095 If this case applies, the delay slot of the call is filled with
2096 the unconditional jump. This is done first to avoid having the
2097 delay slot of the call filled in the backward scan. Also, since
2098 the unconditional jump is likely to also have a delay slot, that
2099 insn must exist when it is subsequently scanned.
2100
2101 This is tried on each insn with delay slots as some machines
2102 have insns which perform calls, but are not represented as
2103 CALL_INSNs. */
2104
2105 slots_filled = 0;
2106 delay_list = 0;
2107
2108 if (JUMP_P (insn))
2109 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2110 else
2111 flags = get_jump_flags (insn, NULL_RTX);
2112
2113 if ((trial = next_active_insn (insn))
2114 && JUMP_P (trial)
2115 && simplejump_p (trial)
2116 && eligible_for_delay (insn, slots_filled, trial, flags)
2117 && no_labels_between_p (insn, trial)
2118 && ! can_throw_internal (trial))
2119 {
2120 rtx *tmp;
2121 slots_filled++;
2122 delay_list = add_to_delay_list (trial, delay_list);
2123
2124 /* TRIAL may have had its delay slot filled, then unfilled. When
2125 the delay slot is unfilled, TRIAL is placed back on the unfilled
2126 slots obstack. Unfortunately, it is placed on the end of the
2127 obstack, not in its original location. Therefore, we must search
2128 from entry i + 1 to the end of the unfilled slots obstack to
2129 try and find TRIAL. */
2130 tmp = &unfilled_slots_base[i + 1];
2131 while (*tmp != trial && tmp != unfilled_slots_next)
2132 tmp++;
2133
2134 /* Remove the unconditional jump from consideration for delay slot
2135 filling and unthread it. */
2136 if (*tmp == trial)
2137 *tmp = 0;
2138 {
2139 rtx next = NEXT_INSN (trial);
2140 rtx prev = PREV_INSN (trial);
2141 if (prev)
2142 NEXT_INSN (prev) = next;
2143 if (next)
2144 PREV_INSN (next) = prev;
2145 }
2146 }
2147
2148 /* Now, scan backwards from the insn to search for a potential
2149 delay-slot candidate. Stop searching when a label or jump is hit.
2150
2151 For each candidate, if it is to go into the delay slot (moved
2152 forward in execution sequence), it must not need or set any resources
2153 that were set by later insns and must not set any resources that
2154 are needed for those insns.
2155
2156 The delay slot insn itself sets resources unless it is a call
2157 (in which case the called routine, not the insn itself, is doing
2158 the setting). */
2159
2160 if (slots_filled < slots_to_fill)
2161 {
2162 CLEAR_RESOURCE (&needed);
2163 CLEAR_RESOURCE (&set);
2164 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2165 mark_referenced_resources (insn, &needed, 0);
2166
2167 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2168 trial = next_trial)
2169 {
2170 next_trial = prev_nonnote_insn (trial);
2171
2172 /* This must be an INSN or CALL_INSN. */
2173 pat = PATTERN (trial);
2174
2175 /* USE and CLOBBER at this level was just for flow; ignore it. */
2176 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2177 continue;
2178
2179 /* Check for resource conflict first, to avoid unnecessary
2180 splitting. */
2181 if (! insn_references_resource_p (trial, &set, 1)
2182 && ! insn_sets_resource_p (trial, &set, 1)
2183 && ! insn_sets_resource_p (trial, &needed, 1)
2184 #ifdef HAVE_cc0
2185 /* Can't separate set of cc0 from its use. */
2186 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2187 #endif
2188 && ! can_throw_internal (trial))
2189 {
2190 trial = try_split (pat, trial, 1);
2191 next_trial = prev_nonnote_insn (trial);
2192 if (eligible_for_delay (insn, slots_filled, trial, flags))
2193 {
2194 /* In this case, we are searching backward, so if we
2195 find insns to put on the delay list, we want
2196 to put them at the head, rather than the
2197 tail, of the list. */
2198
2199 update_reg_dead_notes (trial, insn);
2200 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2201 trial, delay_list);
2202 update_block (trial, trial);
2203 delete_related_insns (trial);
2204 if (slots_to_fill == ++slots_filled)
2205 break;
2206 continue;
2207 }
2208 }
2209
2210 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2211 mark_referenced_resources (trial, &needed, 1);
2212 }
2213 }
2214
2215 /* If all needed slots haven't been filled, we come here. */
2216
2217 /* Try to optimize case of jumping around a single insn. */
2218 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2219 if (slots_filled != slots_to_fill
2220 && delay_list == 0
2221 && JUMP_P (insn)
2222 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2223 {
2224 delay_list = optimize_skip (insn);
2225 if (delay_list)
2226 slots_filled += 1;
2227 }
2228 #endif
2229
2230 /* Try to get insns from beyond the insn needing the delay slot.
2231 These insns can neither set or reference resources set in insns being
2232 skipped, cannot set resources in the insn being skipped, and, if this
2233 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2234 call might not return).
2235
2236 There used to be code which continued past the target label if
2237 we saw all uses of the target label. This code did not work,
2238 because it failed to account for some instructions which were
2239 both annulled and marked as from the target. This can happen as a
2240 result of optimize_skip. Since this code was redundant with
2241 fill_eager_delay_slots anyways, it was just deleted. */
2242
2243 if (slots_filled != slots_to_fill
2244 /* If this instruction could throw an exception which is
2245 caught in the same function, then it's not safe to fill
2246 the delay slot with an instruction from beyond this
2247 point. For example, consider:
2248
2249 int i = 2;
2250
2251 try {
2252 f();
2253 i = 3;
2254 } catch (...) {}
2255
2256 return i;
2257
2258 Even though `i' is a local variable, we must be sure not
2259 to put `i = 3' in the delay slot if `f' might throw an
2260 exception.
2261
2262 Presumably, we should also check to see if we could get
2263 back to this function via `setjmp'. */
2264 && ! can_throw_internal (insn)
2265 && (!JUMP_P (insn)
2266 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2267 && ! simplejump_p (insn)
2268 && JUMP_LABEL (insn) != 0)))
2269 {
2270 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2271 label. Otherwise, zero. */
2272 rtx target = 0;
2273 int maybe_never = 0;
2274 rtx pat, trial_delay;
2275
2276 CLEAR_RESOURCE (&needed);
2277 CLEAR_RESOURCE (&set);
2278
2279 if (CALL_P (insn))
2280 {
2281 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2282 mark_referenced_resources (insn, &needed, 1);
2283 maybe_never = 1;
2284 }
2285 else
2286 {
2287 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2288 mark_referenced_resources (insn, &needed, 1);
2289 if (JUMP_P (insn))
2290 target = JUMP_LABEL (insn);
2291 }
2292
2293 if (target == 0)
2294 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2295 {
2296 next_trial = next_nonnote_insn (trial);
2297
2298 if (LABEL_P (trial)
2299 || BARRIER_P (trial))
2300 break;
2301
2302 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2303 pat = PATTERN (trial);
2304
2305 /* Stand-alone USE and CLOBBER are just for flow. */
2306 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2307 continue;
2308
2309 /* If this already has filled delay slots, get the insn needing
2310 the delay slots. */
2311 if (GET_CODE (pat) == SEQUENCE)
2312 trial_delay = XVECEXP (pat, 0, 0);
2313 else
2314 trial_delay = trial;
2315
2316 /* Stop our search when seeing an unconditional jump. */
2317 if (JUMP_P (trial_delay))
2318 break;
2319
2320 /* See if we have a resource problem before we try to
2321 split. */
2322 if (GET_CODE (pat) != SEQUENCE
2323 && ! insn_references_resource_p (trial, &set, 1)
2324 && ! insn_sets_resource_p (trial, &set, 1)
2325 && ! insn_sets_resource_p (trial, &needed, 1)
2326 #ifdef HAVE_cc0
2327 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2328 #endif
2329 && ! (maybe_never && may_trap_p (pat))
2330 && (trial = try_split (pat, trial, 0))
2331 && eligible_for_delay (insn, slots_filled, trial, flags)
2332 && ! can_throw_internal(trial))
2333 {
2334 next_trial = next_nonnote_insn (trial);
2335 delay_list = add_to_delay_list (trial, delay_list);
2336
2337 #ifdef HAVE_cc0
2338 if (reg_mentioned_p (cc0_rtx, pat))
2339 link_cc0_insns (trial);
2340 #endif
2341
2342 delete_related_insns (trial);
2343 if (slots_to_fill == ++slots_filled)
2344 break;
2345 continue;
2346 }
2347
2348 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2349 mark_referenced_resources (trial, &needed, 1);
2350
2351 /* Ensure we don't put insns between the setting of cc and the
2352 comparison by moving a setting of cc into an earlier delay
2353 slot since these insns could clobber the condition code. */
2354 set.cc = 1;
2355
2356 /* If this is a call or jump, we might not get here. */
2357 if (CALL_P (trial_delay)
2358 || JUMP_P (trial_delay))
2359 maybe_never = 1;
2360 }
2361
2362 /* If there are slots left to fill and our search was stopped by an
2363 unconditional branch, try the insn at the branch target. We can
2364 redirect the branch if it works.
2365
2366 Don't do this if the insn at the branch target is a branch. */
2367 if (slots_to_fill != slots_filled
2368 && trial
2369 && JUMP_P (trial)
2370 && simplejump_p (trial)
2371 && (target == 0 || JUMP_LABEL (trial) == target)
2372 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2373 && ! (NONJUMP_INSN_P (next_trial)
2374 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2375 && !JUMP_P (next_trial)
2376 && ! insn_references_resource_p (next_trial, &set, 1)
2377 && ! insn_sets_resource_p (next_trial, &set, 1)
2378 && ! insn_sets_resource_p (next_trial, &needed, 1)
2379 #ifdef HAVE_cc0
2380 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2381 #endif
2382 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2383 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2384 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2385 && ! can_throw_internal (trial))
2386 {
2387 /* See comment in relax_delay_slots about necessity of using
2388 next_real_insn here. */
2389 rtx new_label = next_real_insn (next_trial);
2390
2391 if (new_label != 0)
2392 new_label = get_label_before (new_label);
2393 else
2394 new_label = find_end_label ();
2395
2396 if (new_label)
2397 {
2398 delay_list
2399 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2400 slots_filled++;
2401 reorg_redirect_jump (trial, new_label);
2402
2403 /* If we merged because we both jumped to the same place,
2404 redirect the original insn also. */
2405 if (target)
2406 reorg_redirect_jump (insn, new_label);
2407 }
2408 }
2409 }
2410
2411 /* If this is an unconditional jump, then try to get insns from the
2412 target of the jump. */
2413 if (JUMP_P (insn)
2414 && simplejump_p (insn)
2415 && slots_filled != slots_to_fill)
2416 delay_list
2417 = fill_slots_from_thread (insn, const_true_rtx,
2418 next_active_insn (JUMP_LABEL (insn)),
2419 NULL, 1, 1,
2420 own_thread_p (JUMP_LABEL (insn),
2421 JUMP_LABEL (insn), 0),
2422 slots_to_fill, &slots_filled,
2423 delay_list);
2424
2425 if (delay_list)
2426 unfilled_slots_base[i]
2427 = emit_delay_sequence (insn, delay_list, slots_filled);
2428
2429 if (slots_to_fill == slots_filled)
2430 unfilled_slots_base[i] = 0;
2431
2432 note_delay_statistics (slots_filled, 0);
2433 }
2434
2435 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2436 /* See if the epilogue needs any delay slots. Try to fill them if so.
2437 The only thing we can do is scan backwards from the end of the
2438 function. If we did this in a previous pass, it is incorrect to do it
2439 again. */
2440 if (current_function_epilogue_delay_list)
2441 return;
2442
2443 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2444 if (slots_to_fill == 0)
2445 return;
2446
2447 slots_filled = 0;
2448 CLEAR_RESOURCE (&set);
2449
2450 /* The frame pointer and stack pointer are needed at the beginning of
2451 the epilogue, so instructions setting them can not be put in the
2452 epilogue delay slot. However, everything else needed at function
2453 end is safe, so we don't want to use end_of_function_needs here. */
2454 CLEAR_RESOURCE (&needed);
2455 if (frame_pointer_needed)
2456 {
2457 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2458 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2459 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2460 #endif
2461 if (! EXIT_IGNORE_STACK
2462 || current_function_sp_is_unchanging)
2463 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2464 }
2465 else
2466 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2467
2468 #ifdef EPILOGUE_USES
2469 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2470 {
2471 if (EPILOGUE_USES (i))
2472 SET_HARD_REG_BIT (needed.regs, i);
2473 }
2474 #endif
2475
2476 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2477 trial = PREV_INSN (trial))
2478 {
2479 if (NOTE_P (trial))
2480 continue;
2481 pat = PATTERN (trial);
2482 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2483 continue;
2484
2485 if (! insn_references_resource_p (trial, &set, 1)
2486 && ! insn_sets_resource_p (trial, &needed, 1)
2487 && ! insn_sets_resource_p (trial, &set, 1)
2488 #ifdef HAVE_cc0
2489 /* Don't want to mess with cc0 here. */
2490 && ! reg_mentioned_p (cc0_rtx, pat)
2491 #endif
2492 && ! can_throw_internal (trial))
2493 {
2494 trial = try_split (pat, trial, 1);
2495 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2496 {
2497 /* Here as well we are searching backward, so put the
2498 insns we find on the head of the list. */
2499
2500 current_function_epilogue_delay_list
2501 = gen_rtx_INSN_LIST (VOIDmode, trial,
2502 current_function_epilogue_delay_list);
2503 mark_end_of_function_resources (trial, 1);
2504 update_block (trial, trial);
2505 delete_related_insns (trial);
2506
2507 /* Clear deleted bit so final.c will output the insn. */
2508 INSN_DELETED_P (trial) = 0;
2509
2510 if (slots_to_fill == ++slots_filled)
2511 break;
2512 continue;
2513 }
2514 }
2515
2516 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2517 mark_referenced_resources (trial, &needed, 1);
2518 }
2519
2520 note_delay_statistics (slots_filled, 0);
2521 #endif
2522 }
2523 \f
2524 /* Try to find insns to place in delay slots.
2525
2526 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2527 or is an unconditional branch if CONDITION is const_true_rtx.
2528 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2529
2530 THREAD is a flow-of-control, either the insns to be executed if the
2531 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2532
2533 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2534 to see if any potential delay slot insns set things needed there.
2535
2536 LIKELY is nonzero if it is extremely likely that the branch will be
2537 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2538 end of a loop back up to the top.
2539
2540 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2541 thread. I.e., it is the fallthrough code of our jump or the target of the
2542 jump when we are the only jump going there.
2543
2544 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2545 case, we can only take insns from the head of the thread for our delay
2546 slot. We then adjust the jump to point after the insns we have taken. */
2547
2548 static rtx
2549 fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
2550 rtx opposite_thread, int likely, int thread_if_true,
2551 int own_thread, int slots_to_fill,
2552 int *pslots_filled, rtx delay_list)
2553 {
2554 rtx new_thread;
2555 struct resources opposite_needed, set, needed;
2556 rtx trial;
2557 int lose = 0;
2558 int must_annul = 0;
2559 int flags;
2560
2561 /* Validate our arguments. */
2562 if ((condition == const_true_rtx && ! thread_if_true)
2563 || (! own_thread && ! thread_if_true))
2564 abort ();
2565
2566 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2567
2568 /* If our thread is the end of subroutine, we can't get any delay
2569 insns from that. */
2570 if (thread == 0)
2571 return delay_list;
2572
2573 /* If this is an unconditional branch, nothing is needed at the
2574 opposite thread. Otherwise, compute what is needed there. */
2575 if (condition == const_true_rtx)
2576 CLEAR_RESOURCE (&opposite_needed);
2577 else
2578 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2579
2580 /* If the insn at THREAD can be split, do it here to avoid having to
2581 update THREAD and NEW_THREAD if it is done in the loop below. Also
2582 initialize NEW_THREAD. */
2583
2584 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2585
2586 /* Scan insns at THREAD. We are looking for an insn that can be removed
2587 from THREAD (it neither sets nor references resources that were set
2588 ahead of it and it doesn't set anything needs by the insns ahead of
2589 it) and that either can be placed in an annulling insn or aren't
2590 needed at OPPOSITE_THREAD. */
2591
2592 CLEAR_RESOURCE (&needed);
2593 CLEAR_RESOURCE (&set);
2594
2595 /* If we do not own this thread, we must stop as soon as we find
2596 something that we can't put in a delay slot, since all we can do
2597 is branch into THREAD at a later point. Therefore, labels stop
2598 the search if this is not the `true' thread. */
2599
2600 for (trial = thread;
2601 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2602 trial = next_nonnote_insn (trial))
2603 {
2604 rtx pat, old_trial;
2605
2606 /* If we have passed a label, we no longer own this thread. */
2607 if (LABEL_P (trial))
2608 {
2609 own_thread = 0;
2610 continue;
2611 }
2612
2613 pat = PATTERN (trial);
2614 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2615 continue;
2616
2617 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2618 don't separate or copy insns that set and use CC0. */
2619 if (! insn_references_resource_p (trial, &set, 1)
2620 && ! insn_sets_resource_p (trial, &set, 1)
2621 && ! insn_sets_resource_p (trial, &needed, 1)
2622 #ifdef HAVE_cc0
2623 && ! (reg_mentioned_p (cc0_rtx, pat)
2624 && (! own_thread || ! sets_cc0_p (pat)))
2625 #endif
2626 && ! can_throw_internal (trial))
2627 {
2628 rtx prior_insn;
2629
2630 /* If TRIAL is redundant with some insn before INSN, we don't
2631 actually need to add it to the delay list; we can merely pretend
2632 we did. */
2633 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2634 {
2635 fix_reg_dead_note (prior_insn, insn);
2636 if (own_thread)
2637 {
2638 update_block (trial, thread);
2639 if (trial == thread)
2640 {
2641 thread = next_active_insn (thread);
2642 if (new_thread == trial)
2643 new_thread = thread;
2644 }
2645
2646 delete_related_insns (trial);
2647 }
2648 else
2649 {
2650 update_reg_unused_notes (prior_insn, trial);
2651 new_thread = next_active_insn (trial);
2652 }
2653
2654 continue;
2655 }
2656
2657 /* There are two ways we can win: If TRIAL doesn't set anything
2658 needed at the opposite thread and can't trap, or if it can
2659 go into an annulled delay slot. */
2660 if (!must_annul
2661 && (condition == const_true_rtx
2662 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2663 && ! may_trap_p (pat))))
2664 {
2665 old_trial = trial;
2666 trial = try_split (pat, trial, 0);
2667 if (new_thread == old_trial)
2668 new_thread = trial;
2669 if (thread == old_trial)
2670 thread = trial;
2671 pat = PATTERN (trial);
2672 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2673 goto winner;
2674 }
2675 else if (0
2676 #ifdef ANNUL_IFTRUE_SLOTS
2677 || ! thread_if_true
2678 #endif
2679 #ifdef ANNUL_IFFALSE_SLOTS
2680 || thread_if_true
2681 #endif
2682 )
2683 {
2684 old_trial = trial;
2685 trial = try_split (pat, trial, 0);
2686 if (new_thread == old_trial)
2687 new_thread = trial;
2688 if (thread == old_trial)
2689 thread = trial;
2690 pat = PATTERN (trial);
2691 if ((must_annul || delay_list == NULL) && (thread_if_true
2692 ? check_annul_list_true_false (0, delay_list)
2693 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2694 : check_annul_list_true_false (1, delay_list)
2695 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2696 {
2697 rtx temp;
2698
2699 must_annul = 1;
2700 winner:
2701
2702 #ifdef HAVE_cc0
2703 if (reg_mentioned_p (cc0_rtx, pat))
2704 link_cc0_insns (trial);
2705 #endif
2706
2707 /* If we own this thread, delete the insn. If this is the
2708 destination of a branch, show that a basic block status
2709 may have been updated. In any case, mark the new
2710 starting point of this thread. */
2711 if (own_thread)
2712 {
2713 rtx note;
2714
2715 update_block (trial, thread);
2716 if (trial == thread)
2717 {
2718 thread = next_active_insn (thread);
2719 if (new_thread == trial)
2720 new_thread = thread;
2721 }
2722
2723 /* We are moving this insn, not deleting it. We must
2724 temporarily increment the use count on any referenced
2725 label lest it be deleted by delete_related_insns. */
2726 note = find_reg_note (trial, REG_LABEL, 0);
2727 /* REG_LABEL could be NOTE_INSN_DELETED_LABEL too. */
2728 if (note && LABEL_P (XEXP (note, 0)))
2729 LABEL_NUSES (XEXP (note, 0))++;
2730
2731 delete_related_insns (trial);
2732
2733 if (note && LABEL_P (XEXP (note, 0)))
2734 LABEL_NUSES (XEXP (note, 0))--;
2735 }
2736 else
2737 new_thread = next_active_insn (trial);
2738
2739 temp = own_thread ? trial : copy_rtx (trial);
2740 if (thread_if_true)
2741 INSN_FROM_TARGET_P (temp) = 1;
2742
2743 delay_list = add_to_delay_list (temp, delay_list);
2744
2745 if (slots_to_fill == ++(*pslots_filled))
2746 {
2747 /* Even though we have filled all the slots, we
2748 may be branching to a location that has a
2749 redundant insn. Skip any if so. */
2750 while (new_thread && ! own_thread
2751 && ! insn_sets_resource_p (new_thread, &set, 1)
2752 && ! insn_sets_resource_p (new_thread, &needed, 1)
2753 && ! insn_references_resource_p (new_thread,
2754 &set, 1)
2755 && (prior_insn
2756 = redundant_insn (new_thread, insn,
2757 delay_list)))
2758 {
2759 /* We know we do not own the thread, so no need
2760 to call update_block and delete_insn. */
2761 fix_reg_dead_note (prior_insn, insn);
2762 update_reg_unused_notes (prior_insn, new_thread);
2763 new_thread = next_active_insn (new_thread);
2764 }
2765 break;
2766 }
2767
2768 continue;
2769 }
2770 }
2771 }
2772
2773 /* This insn can't go into a delay slot. */
2774 lose = 1;
2775 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2776 mark_referenced_resources (trial, &needed, 1);
2777
2778 /* Ensure we don't put insns between the setting of cc and the comparison
2779 by moving a setting of cc into an earlier delay slot since these insns
2780 could clobber the condition code. */
2781 set.cc = 1;
2782
2783 /* If this insn is a register-register copy and the next insn has
2784 a use of our destination, change it to use our source. That way,
2785 it will become a candidate for our delay slot the next time
2786 through this loop. This case occurs commonly in loops that
2787 scan a list.
2788
2789 We could check for more complex cases than those tested below,
2790 but it doesn't seem worth it. It might also be a good idea to try
2791 to swap the two insns. That might do better.
2792
2793 We can't do this if the next insn modifies our destination, because
2794 that would make the replacement into the insn invalid. We also can't
2795 do this if it modifies our source, because it might be an earlyclobber
2796 operand. This latter test also prevents updating the contents of
2797 a PRE_INC. We also can't do this if there's overlap of source and
2798 destination. Overlap may happen for larger-than-register-size modes. */
2799
2800 if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
2801 && REG_P (SET_SRC (pat))
2802 && REG_P (SET_DEST (pat))
2803 && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2804 {
2805 rtx next = next_nonnote_insn (trial);
2806
2807 if (next && NONJUMP_INSN_P (next)
2808 && GET_CODE (PATTERN (next)) != USE
2809 && ! reg_set_p (SET_DEST (pat), next)
2810 && ! reg_set_p (SET_SRC (pat), next)
2811 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2812 && ! modified_in_p (SET_DEST (pat), next))
2813 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2814 }
2815 }
2816
2817 /* If we stopped on a branch insn that has delay slots, see if we can
2818 steal some of the insns in those slots. */
2819 if (trial && NONJUMP_INSN_P (trial)
2820 && GET_CODE (PATTERN (trial)) == SEQUENCE
2821 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
2822 {
2823 /* If this is the `true' thread, we will want to follow the jump,
2824 so we can only do this if we have taken everything up to here. */
2825 if (thread_if_true && trial == new_thread)
2826 {
2827 delay_list
2828 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2829 delay_list, &set, &needed,
2830 &opposite_needed, slots_to_fill,
2831 pslots_filled, &must_annul,
2832 &new_thread);
2833 /* If we owned the thread and are told that it branched
2834 elsewhere, make sure we own the thread at the new location. */
2835 if (own_thread && trial != new_thread)
2836 own_thread = own_thread_p (new_thread, new_thread, 0);
2837 }
2838 else if (! thread_if_true)
2839 delay_list
2840 = steal_delay_list_from_fallthrough (insn, condition,
2841 PATTERN (trial),
2842 delay_list, &set, &needed,
2843 &opposite_needed, slots_to_fill,
2844 pslots_filled, &must_annul);
2845 }
2846
2847 /* If we haven't found anything for this delay slot and it is very
2848 likely that the branch will be taken, see if the insn at our target
2849 increments or decrements a register with an increment that does not
2850 depend on the destination register. If so, try to place the opposite
2851 arithmetic insn after the jump insn and put the arithmetic insn in the
2852 delay slot. If we can't do this, return. */
2853 if (delay_list == 0 && likely && new_thread
2854 && NONJUMP_INSN_P (new_thread)
2855 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2856 && asm_noperands (PATTERN (new_thread)) < 0)
2857 {
2858 rtx pat = PATTERN (new_thread);
2859 rtx dest;
2860 rtx src;
2861
2862 trial = new_thread;
2863 pat = PATTERN (trial);
2864
2865 if (!NONJUMP_INSN_P (trial)
2866 || GET_CODE (pat) != SET
2867 || ! eligible_for_delay (insn, 0, trial, flags)
2868 || can_throw_internal (trial))
2869 return 0;
2870
2871 dest = SET_DEST (pat), src = SET_SRC (pat);
2872 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2873 && rtx_equal_p (XEXP (src, 0), dest)
2874 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2875 && ! side_effects_p (pat))
2876 {
2877 rtx other = XEXP (src, 1);
2878 rtx new_arith;
2879 rtx ninsn;
2880
2881 /* If this is a constant adjustment, use the same code with
2882 the negated constant. Otherwise, reverse the sense of the
2883 arithmetic. */
2884 if (GET_CODE (other) == CONST_INT)
2885 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2886 negate_rtx (GET_MODE (src), other));
2887 else
2888 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2889 GET_MODE (src), dest, other);
2890
2891 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2892 insn);
2893
2894 if (recog_memoized (ninsn) < 0
2895 || (extract_insn (ninsn), ! constrain_operands (1)))
2896 {
2897 delete_related_insns (ninsn);
2898 return 0;
2899 }
2900
2901 if (own_thread)
2902 {
2903 update_block (trial, thread);
2904 if (trial == thread)
2905 {
2906 thread = next_active_insn (thread);
2907 if (new_thread == trial)
2908 new_thread = thread;
2909 }
2910 delete_related_insns (trial);
2911 }
2912 else
2913 new_thread = next_active_insn (trial);
2914
2915 ninsn = own_thread ? trial : copy_rtx (trial);
2916 if (thread_if_true)
2917 INSN_FROM_TARGET_P (ninsn) = 1;
2918
2919 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2920 (*pslots_filled)++;
2921 }
2922 }
2923
2924 if (delay_list && must_annul)
2925 INSN_ANNULLED_BRANCH_P (insn) = 1;
2926
2927 /* If we are to branch into the middle of this thread, find an appropriate
2928 label or make a new one if none, and redirect INSN to it. If we hit the
2929 end of the function, use the end-of-function label. */
2930 if (new_thread != thread)
2931 {
2932 rtx label;
2933
2934 if (! thread_if_true)
2935 abort ();
2936
2937 if (new_thread && JUMP_P (new_thread)
2938 && (simplejump_p (new_thread)
2939 || GET_CODE (PATTERN (new_thread)) == RETURN)
2940 && redirect_with_delay_list_safe_p (insn,
2941 JUMP_LABEL (new_thread),
2942 delay_list))
2943 new_thread = follow_jumps (JUMP_LABEL (new_thread));
2944
2945 if (new_thread == 0)
2946 label = find_end_label ();
2947 else if (LABEL_P (new_thread))
2948 label = new_thread;
2949 else
2950 label = get_label_before (new_thread);
2951
2952 if (label)
2953 reorg_redirect_jump (insn, label);
2954 }
2955
2956 return delay_list;
2957 }
2958 \f
2959 /* Make another attempt to find insns to place in delay slots.
2960
2961 We previously looked for insns located in front of the delay insn
2962 and, for non-jump delay insns, located behind the delay insn.
2963
2964 Here only try to schedule jump insns and try to move insns from either
2965 the target or the following insns into the delay slot. If annulling is
2966 supported, we will be likely to do this. Otherwise, we can do this only
2967 if safe. */
2968
2969 static void
2970 fill_eager_delay_slots (void)
2971 {
2972 rtx insn;
2973 int i;
2974 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2975
2976 for (i = 0; i < num_unfilled_slots; i++)
2977 {
2978 rtx condition;
2979 rtx target_label, insn_at_target, fallthrough_insn;
2980 rtx delay_list = 0;
2981 int own_target;
2982 int own_fallthrough;
2983 int prediction, slots_to_fill, slots_filled;
2984
2985 insn = unfilled_slots_base[i];
2986 if (insn == 0
2987 || INSN_DELETED_P (insn)
2988 || !JUMP_P (insn)
2989 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
2990 continue;
2991
2992 slots_to_fill = num_delay_slots (insn);
2993 /* Some machine description have defined instructions to have
2994 delay slots only in certain circumstances which may depend on
2995 nearby insns (which change due to reorg's actions).
2996
2997 For example, the PA port normally has delay slots for unconditional
2998 jumps.
2999
3000 However, the PA port claims such jumps do not have a delay slot
3001 if they are immediate successors of certain CALL_INSNs. This
3002 allows the port to favor filling the delay slot of the call with
3003 the unconditional jump. */
3004 if (slots_to_fill == 0)
3005 continue;
3006
3007 slots_filled = 0;
3008 target_label = JUMP_LABEL (insn);
3009 condition = get_branch_condition (insn, target_label);
3010
3011 if (condition == 0)
3012 continue;
3013
3014 /* Get the next active fallthrough and target insns and see if we own
3015 them. Then see whether the branch is likely true. We don't need
3016 to do a lot of this for unconditional branches. */
3017
3018 insn_at_target = next_active_insn (target_label);
3019 own_target = own_thread_p (target_label, target_label, 0);
3020
3021 if (condition == const_true_rtx)
3022 {
3023 own_fallthrough = 0;
3024 fallthrough_insn = 0;
3025 prediction = 2;
3026 }
3027 else
3028 {
3029 fallthrough_insn = next_active_insn (insn);
3030 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3031 prediction = mostly_true_jump (insn, condition);
3032 }
3033
3034 /* If this insn is expected to branch, first try to get insns from our
3035 target, then our fallthrough insns. If it is not expected to branch,
3036 try the other order. */
3037
3038 if (prediction > 0)
3039 {
3040 delay_list
3041 = fill_slots_from_thread (insn, condition, insn_at_target,
3042 fallthrough_insn, prediction == 2, 1,
3043 own_target,
3044 slots_to_fill, &slots_filled, delay_list);
3045
3046 if (delay_list == 0 && own_fallthrough)
3047 {
3048 /* Even though we didn't find anything for delay slots,
3049 we might have found a redundant insn which we deleted
3050 from the thread that was filled. So we have to recompute
3051 the next insn at the target. */
3052 target_label = JUMP_LABEL (insn);
3053 insn_at_target = next_active_insn (target_label);
3054
3055 delay_list
3056 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3057 insn_at_target, 0, 0,
3058 own_fallthrough,
3059 slots_to_fill, &slots_filled,
3060 delay_list);
3061 }
3062 }
3063 else
3064 {
3065 if (own_fallthrough)
3066 delay_list
3067 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3068 insn_at_target, 0, 0,
3069 own_fallthrough,
3070 slots_to_fill, &slots_filled,
3071 delay_list);
3072
3073 if (delay_list == 0)
3074 delay_list
3075 = fill_slots_from_thread (insn, condition, insn_at_target,
3076 next_active_insn (insn), 0, 1,
3077 own_target,
3078 slots_to_fill, &slots_filled,
3079 delay_list);
3080 }
3081
3082 if (delay_list)
3083 unfilled_slots_base[i]
3084 = emit_delay_sequence (insn, delay_list, slots_filled);
3085
3086 if (slots_to_fill == slots_filled)
3087 unfilled_slots_base[i] = 0;
3088
3089 note_delay_statistics (slots_filled, 1);
3090 }
3091 }
3092 \f
3093 /* Once we have tried two ways to fill a delay slot, make a pass over the
3094 code to try to improve the results and to do such things as more jump
3095 threading. */
3096
3097 static void
3098 relax_delay_slots (rtx first)
3099 {
3100 rtx insn, next, pat;
3101 rtx trial, delay_insn, target_label;
3102
3103 /* Look at every JUMP_INSN and see if we can improve it. */
3104 for (insn = first; insn; insn = next)
3105 {
3106 rtx other;
3107
3108 next = next_active_insn (insn);
3109
3110 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3111 the next insn, or jumps to a label that is not the last of a
3112 group of consecutive labels. */
3113 if (JUMP_P (insn)
3114 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3115 && (target_label = JUMP_LABEL (insn)) != 0)
3116 {
3117 target_label = skip_consecutive_labels (follow_jumps (target_label));
3118 if (target_label == 0)
3119 target_label = find_end_label ();
3120
3121 if (target_label && next_active_insn (target_label) == next
3122 && ! condjump_in_parallel_p (insn))
3123 {
3124 delete_jump (insn);
3125 continue;
3126 }
3127
3128 if (target_label && target_label != JUMP_LABEL (insn))
3129 reorg_redirect_jump (insn, target_label);
3130
3131 /* See if this jump branches around an unconditional jump.
3132 If so, invert this jump and point it to the target of the
3133 second jump. */
3134 if (next && JUMP_P (next)
3135 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3136 && target_label
3137 && next_active_insn (target_label) == next_active_insn (next)
3138 && no_labels_between_p (insn, next))
3139 {
3140 rtx label = JUMP_LABEL (next);
3141
3142 /* Be careful how we do this to avoid deleting code or
3143 labels that are momentarily dead. See similar optimization
3144 in jump.c.
3145
3146 We also need to ensure we properly handle the case when
3147 invert_jump fails. */
3148
3149 ++LABEL_NUSES (target_label);
3150 if (label)
3151 ++LABEL_NUSES (label);
3152
3153 if (invert_jump (insn, label, 1))
3154 {
3155 delete_related_insns (next);
3156 next = insn;
3157 }
3158
3159 if (label)
3160 --LABEL_NUSES (label);
3161
3162 if (--LABEL_NUSES (target_label) == 0)
3163 delete_related_insns (target_label);
3164
3165 continue;
3166 }
3167 }
3168
3169 /* If this is an unconditional jump and the previous insn is a
3170 conditional jump, try reversing the condition of the previous
3171 insn and swapping our targets. The next pass might be able to
3172 fill the slots.
3173
3174 Don't do this if we expect the conditional branch to be true, because
3175 we would then be making the more common case longer. */
3176
3177 if (JUMP_P (insn)
3178 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3179 && (other = prev_active_insn (insn)) != 0
3180 && (condjump_p (other) || condjump_in_parallel_p (other))
3181 && no_labels_between_p (other, insn)
3182 && 0 > mostly_true_jump (other,
3183 get_branch_condition (other,
3184 JUMP_LABEL (other))))
3185 {
3186 rtx other_target = JUMP_LABEL (other);
3187 target_label = JUMP_LABEL (insn);
3188
3189 if (invert_jump (other, target_label, 0))
3190 reorg_redirect_jump (insn, other_target);
3191 }
3192
3193 /* Now look only at cases where we have filled a delay slot. */
3194 if (!NONJUMP_INSN_P (insn)
3195 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3196 continue;
3197
3198 pat = PATTERN (insn);
3199 delay_insn = XVECEXP (pat, 0, 0);
3200
3201 /* See if the first insn in the delay slot is redundant with some
3202 previous insn. Remove it from the delay slot if so; then set up
3203 to reprocess this insn. */
3204 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3205 {
3206 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3207 next = prev_active_insn (next);
3208 continue;
3209 }
3210
3211 /* See if we have a RETURN insn with a filled delay slot followed
3212 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3213 the first RETURN (but not its delay insn). This gives the same
3214 effect in fewer instructions.
3215
3216 Only do so if optimizing for size since this results in slower, but
3217 smaller code. */
3218 if (optimize_size
3219 && GET_CODE (PATTERN (delay_insn)) == RETURN
3220 && next
3221 && JUMP_P (next)
3222 && GET_CODE (PATTERN (next)) == RETURN)
3223 {
3224 rtx after;
3225 int i;
3226
3227 /* Delete the RETURN and just execute the delay list insns.
3228
3229 We do this by deleting the INSN containing the SEQUENCE, then
3230 re-emitting the insns separately, and then deleting the RETURN.
3231 This allows the count of the jump target to be properly
3232 decremented. */
3233
3234 /* Clear the from target bit, since these insns are no longer
3235 in delay slots. */
3236 for (i = 0; i < XVECLEN (pat, 0); i++)
3237 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3238
3239 trial = PREV_INSN (insn);
3240 delete_related_insns (insn);
3241 if (GET_CODE (pat) != SEQUENCE)
3242 abort ();
3243 after = trial;
3244 for (i = 0; i < XVECLEN (pat, 0); i++)
3245 {
3246 rtx this_insn = XVECEXP (pat, 0, i);
3247 add_insn_after (this_insn, after);
3248 after = this_insn;
3249 }
3250 delete_scheduled_jump (delay_insn);
3251 continue;
3252 }
3253
3254 /* Now look only at the cases where we have a filled JUMP_INSN. */
3255 if (!JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3256 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3257 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3258 continue;
3259
3260 target_label = JUMP_LABEL (delay_insn);
3261
3262 if (target_label)
3263 {
3264 /* If this jump goes to another unconditional jump, thread it, but
3265 don't convert a jump into a RETURN here. */
3266 trial = skip_consecutive_labels (follow_jumps (target_label));
3267 if (trial == 0)
3268 trial = find_end_label ();
3269
3270 if (trial && trial != target_label
3271 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3272 {
3273 reorg_redirect_jump (delay_insn, trial);
3274 target_label = trial;
3275 }
3276
3277 /* If the first insn at TARGET_LABEL is redundant with a previous
3278 insn, redirect the jump to the following insn process again. */
3279 trial = next_active_insn (target_label);
3280 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3281 && redundant_insn (trial, insn, 0)
3282 && ! can_throw_internal (trial))
3283 {
3284 /* Figure out where to emit the special USE insn so we don't
3285 later incorrectly compute register live/death info. */
3286 rtx tmp = next_active_insn (trial);
3287 if (tmp == 0)
3288 tmp = find_end_label ();
3289
3290 if (tmp)
3291 {
3292 /* Insert the special USE insn and update dataflow info. */
3293 update_block (trial, tmp);
3294
3295 /* Now emit a label before the special USE insn, and
3296 redirect our jump to the new label. */
3297 target_label = get_label_before (PREV_INSN (tmp));
3298 reorg_redirect_jump (delay_insn, target_label);
3299 next = insn;
3300 continue;
3301 }
3302 }
3303
3304 /* Similarly, if it is an unconditional jump with one insn in its
3305 delay list and that insn is redundant, thread the jump. */
3306 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3307 && XVECLEN (PATTERN (trial), 0) == 2
3308 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
3309 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3310 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3311 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3312 {
3313 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3314 if (target_label == 0)
3315 target_label = find_end_label ();
3316
3317 if (target_label
3318 && redirect_with_delay_slots_safe_p (delay_insn, target_label,
3319 insn))
3320 {
3321 reorg_redirect_jump (delay_insn, target_label);
3322 next = insn;
3323 continue;
3324 }
3325 }
3326 }
3327
3328 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3329 && prev_active_insn (target_label) == insn
3330 && ! condjump_in_parallel_p (delay_insn)
3331 #ifdef HAVE_cc0
3332 /* If the last insn in the delay slot sets CC0 for some insn,
3333 various code assumes that it is in a delay slot. We could
3334 put it back where it belonged and delete the register notes,
3335 but it doesn't seem worthwhile in this uncommon case. */
3336 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3337 REG_CC_USER, NULL_RTX)
3338 #endif
3339 )
3340 {
3341 rtx after;
3342 int i;
3343
3344 /* All this insn does is execute its delay list and jump to the
3345 following insn. So delete the jump and just execute the delay
3346 list insns.
3347
3348 We do this by deleting the INSN containing the SEQUENCE, then
3349 re-emitting the insns separately, and then deleting the jump.
3350 This allows the count of the jump target to be properly
3351 decremented. */
3352
3353 /* Clear the from target bit, since these insns are no longer
3354 in delay slots. */
3355 for (i = 0; i < XVECLEN (pat, 0); i++)
3356 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3357
3358 trial = PREV_INSN (insn);
3359 delete_related_insns (insn);
3360 if (GET_CODE (pat) != SEQUENCE)
3361 abort ();
3362 after = trial;
3363 for (i = 0; i < XVECLEN (pat, 0); i++)
3364 {
3365 rtx this_insn = XVECEXP (pat, 0, i);
3366 add_insn_after (this_insn, after);
3367 after = this_insn;
3368 }
3369 delete_scheduled_jump (delay_insn);
3370 continue;
3371 }
3372
3373 /* See if this is an unconditional jump around a single insn which is
3374 identical to the one in its delay slot. In this case, we can just
3375 delete the branch and the insn in its delay slot. */
3376 if (next && NONJUMP_INSN_P (next)
3377 && prev_label (next_active_insn (next)) == target_label
3378 && simplejump_p (insn)
3379 && XVECLEN (pat, 0) == 2
3380 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3381 {
3382 delete_related_insns (insn);
3383 continue;
3384 }
3385
3386 /* See if this jump (with its delay slots) branches around another
3387 jump (without delay slots). If so, invert this jump and point
3388 it to the target of the second jump. We cannot do this for
3389 annulled jumps, though. Again, don't convert a jump to a RETURN
3390 here. */
3391 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3392 && next && JUMP_P (next)
3393 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3394 && next_active_insn (target_label) == next_active_insn (next)
3395 && no_labels_between_p (insn, next))
3396 {
3397 rtx label = JUMP_LABEL (next);
3398 rtx old_label = JUMP_LABEL (delay_insn);
3399
3400 if (label == 0)
3401 label = find_end_label ();
3402
3403 /* find_end_label can generate a new label. Check this first. */
3404 if (label
3405 && no_labels_between_p (insn, next)
3406 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3407 {
3408 /* Be careful how we do this to avoid deleting code or labels
3409 that are momentarily dead. See similar optimization in
3410 jump.c */
3411 if (old_label)
3412 ++LABEL_NUSES (old_label);
3413
3414 if (invert_jump (delay_insn, label, 1))
3415 {
3416 int i;
3417
3418 /* Must update the INSN_FROM_TARGET_P bits now that
3419 the branch is reversed, so that mark_target_live_regs
3420 will handle the delay slot insn correctly. */
3421 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3422 {
3423 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3424 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3425 }
3426
3427 delete_related_insns (next);
3428 next = insn;
3429 }
3430
3431 if (old_label && --LABEL_NUSES (old_label) == 0)
3432 delete_related_insns (old_label);
3433 continue;
3434 }
3435 }
3436
3437 /* If we own the thread opposite the way this insn branches, see if we
3438 can merge its delay slots with following insns. */
3439 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3440 && own_thread_p (NEXT_INSN (insn), 0, 1))
3441 try_merge_delay_insns (insn, next);
3442 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3443 && own_thread_p (target_label, target_label, 0))
3444 try_merge_delay_insns (insn, next_active_insn (target_label));
3445
3446 /* If we get here, we haven't deleted INSN. But we may have deleted
3447 NEXT, so recompute it. */
3448 next = next_active_insn (insn);
3449 }
3450 }
3451 \f
3452 #ifdef HAVE_return
3453
3454 /* Look for filled jumps to the end of function label. We can try to convert
3455 them into RETURN insns if the insns in the delay slot are valid for the
3456 RETURN as well. */
3457
3458 static void
3459 make_return_insns (rtx first)
3460 {
3461 rtx insn, jump_insn, pat;
3462 rtx real_return_label = end_of_function_label;
3463 int slots, i;
3464
3465 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3466 /* If a previous pass filled delay slots in the epilogue, things get a
3467 bit more complicated, as those filler insns would generally (without
3468 data flow analysis) have to be executed after any existing branch
3469 delay slot filler insns. It is also unknown whether such a
3470 transformation would actually be profitable. Note that the existing
3471 code only cares for branches with (some) filled delay slots. */
3472 if (current_function_epilogue_delay_list != NULL)
3473 return;
3474 #endif
3475
3476 /* See if there is a RETURN insn in the function other than the one we
3477 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3478 into a RETURN to jump to it. */
3479 for (insn = first; insn; insn = NEXT_INSN (insn))
3480 if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
3481 {
3482 real_return_label = get_label_before (insn);
3483 break;
3484 }
3485
3486 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3487 was equal to END_OF_FUNCTION_LABEL. */
3488 LABEL_NUSES (real_return_label)++;
3489
3490 /* Clear the list of insns to fill so we can use it. */
3491 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3492
3493 for (insn = first; insn; insn = NEXT_INSN (insn))
3494 {
3495 int flags;
3496
3497 /* Only look at filled JUMP_INSNs that go to the end of function
3498 label. */
3499 if (!NONJUMP_INSN_P (insn)
3500 || GET_CODE (PATTERN (insn)) != SEQUENCE
3501 || !JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3502 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3503 continue;
3504
3505 pat = PATTERN (insn);
3506 jump_insn = XVECEXP (pat, 0, 0);
3507
3508 /* If we can't make the jump into a RETURN, try to redirect it to the best
3509 RETURN and go on to the next insn. */
3510 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3511 {
3512 /* Make sure redirecting the jump will not invalidate the delay
3513 slot insns. */
3514 if (redirect_with_delay_slots_safe_p (jump_insn,
3515 real_return_label,
3516 insn))
3517 reorg_redirect_jump (jump_insn, real_return_label);
3518 continue;
3519 }
3520
3521 /* See if this RETURN can accept the insns current in its delay slot.
3522 It can if it has more or an equal number of slots and the contents
3523 of each is valid. */
3524
3525 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3526 slots = num_delay_slots (jump_insn);
3527 if (slots >= XVECLEN (pat, 0) - 1)
3528 {
3529 for (i = 1; i < XVECLEN (pat, 0); i++)
3530 if (! (
3531 #ifdef ANNUL_IFFALSE_SLOTS
3532 (INSN_ANNULLED_BRANCH_P (jump_insn)
3533 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3534 ? eligible_for_annul_false (jump_insn, i - 1,
3535 XVECEXP (pat, 0, i), flags) :
3536 #endif
3537 #ifdef ANNUL_IFTRUE_SLOTS
3538 (INSN_ANNULLED_BRANCH_P (jump_insn)
3539 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3540 ? eligible_for_annul_true (jump_insn, i - 1,
3541 XVECEXP (pat, 0, i), flags) :
3542 #endif
3543 eligible_for_delay (jump_insn, i - 1,
3544 XVECEXP (pat, 0, i), flags)))
3545 break;
3546 }
3547 else
3548 i = 0;
3549
3550 if (i == XVECLEN (pat, 0))
3551 continue;
3552
3553 /* We have to do something with this insn. If it is an unconditional
3554 RETURN, delete the SEQUENCE and output the individual insns,
3555 followed by the RETURN. Then set things up so we try to find
3556 insns for its delay slots, if it needs some. */
3557 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3558 {
3559 rtx prev = PREV_INSN (insn);
3560
3561 delete_related_insns (insn);
3562 for (i = 1; i < XVECLEN (pat, 0); i++)
3563 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3564
3565 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3566 emit_barrier_after (insn);
3567
3568 if (slots)
3569 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3570 }
3571 else
3572 /* It is probably more efficient to keep this with its current
3573 delay slot as a branch to a RETURN. */
3574 reorg_redirect_jump (jump_insn, real_return_label);
3575 }
3576
3577 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3578 new delay slots we have created. */
3579 if (--LABEL_NUSES (real_return_label) == 0)
3580 delete_related_insns (real_return_label);
3581
3582 fill_simple_delay_slots (1);
3583 fill_simple_delay_slots (0);
3584 }
3585 #endif
3586 \f
3587 /* Try to find insns to place in delay slots. */
3588
3589 void
3590 dbr_schedule (rtx first, FILE *file)
3591 {
3592 rtx insn, next, epilogue_insn = 0;
3593 int i;
3594 #if 0
3595 int old_flag_no_peephole = flag_no_peephole;
3596
3597 /* Execute `final' once in prescan mode to delete any insns that won't be
3598 used. Don't let final try to do any peephole optimization--it will
3599 ruin dataflow information for this pass. */
3600
3601 flag_no_peephole = 1;
3602 final (first, 0, NO_DEBUG, 1, 1);
3603 flag_no_peephole = old_flag_no_peephole;
3604 #endif
3605
3606 /* If the current function has no insns other than the prologue and
3607 epilogue, then do not try to fill any delay slots. */
3608 if (n_basic_blocks == 0)
3609 return;
3610
3611 /* Find the highest INSN_UID and allocate and initialize our map from
3612 INSN_UID's to position in code. */
3613 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3614 {
3615 if (INSN_UID (insn) > max_uid)
3616 max_uid = INSN_UID (insn);
3617 if (NOTE_P (insn)
3618 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
3619 epilogue_insn = insn;
3620 }
3621
3622 uid_to_ruid = xmalloc ((max_uid + 1) * sizeof (int));
3623 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3624 uid_to_ruid[INSN_UID (insn)] = i;
3625
3626 /* Initialize the list of insns that need filling. */
3627 if (unfilled_firstobj == 0)
3628 {
3629 gcc_obstack_init (&unfilled_slots_obstack);
3630 unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0);
3631 }
3632
3633 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3634 {
3635 rtx target;
3636
3637 INSN_ANNULLED_BRANCH_P (insn) = 0;
3638 INSN_FROM_TARGET_P (insn) = 0;
3639
3640 /* Skip vector tables. We can't get attributes for them. */
3641 if (JUMP_P (insn)
3642 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3643 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3644 continue;
3645
3646 if (num_delay_slots (insn) > 0)
3647 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3648
3649 /* Ensure all jumps go to the last of a set of consecutive labels. */
3650 if (JUMP_P (insn)
3651 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3652 && JUMP_LABEL (insn) != 0
3653 && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3654 != JUMP_LABEL (insn)))
3655 redirect_jump (insn, target, 1);
3656 }
3657
3658 init_resource_info (epilogue_insn);
3659
3660 /* Show we haven't computed an end-of-function label yet. */
3661 end_of_function_label = 0;
3662
3663 /* Initialize the statistics for this function. */
3664 memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3665 memset (num_filled_delays, 0, sizeof num_filled_delays);
3666
3667 /* Now do the delay slot filling. Try everything twice in case earlier
3668 changes make more slots fillable. */
3669
3670 for (reorg_pass_number = 0;
3671 reorg_pass_number < MAX_REORG_PASSES;
3672 reorg_pass_number++)
3673 {
3674 fill_simple_delay_slots (1);
3675 fill_simple_delay_slots (0);
3676 fill_eager_delay_slots ();
3677 relax_delay_slots (first);
3678 }
3679
3680 /* Delete any USE insns made by update_block; subsequent passes don't need
3681 them or know how to deal with them. */
3682 for (insn = first; insn; insn = next)
3683 {
3684 next = NEXT_INSN (insn);
3685
3686 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
3687 && INSN_P (XEXP (PATTERN (insn), 0)))
3688 next = delete_related_insns (insn);
3689 }
3690
3691 /* If we made an end of function label, indicate that it is now
3692 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3693 If it is now unused, delete it. */
3694 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3695 delete_related_insns (end_of_function_label);
3696
3697 #ifdef HAVE_return
3698 if (HAVE_return && end_of_function_label != 0)
3699 make_return_insns (first);
3700 #endif
3701
3702 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3703
3704 /* It is not clear why the line below is needed, but it does seem to be. */
3705 unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0);
3706
3707 if (file)
3708 {
3709 int i, j, need_comma;
3710 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3711 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3712
3713 for (reorg_pass_number = 0;
3714 reorg_pass_number < MAX_REORG_PASSES;
3715 reorg_pass_number++)
3716 {
3717 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3718 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3719 {
3720 need_comma = 0;
3721 fprintf (file, ";; Reorg function #%d\n", i);
3722
3723 fprintf (file, ";; %d insns needing delay slots\n;; ",
3724 num_insns_needing_delays[i][reorg_pass_number]);
3725
3726 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3727 if (num_filled_delays[i][j][reorg_pass_number])
3728 {
3729 if (need_comma)
3730 fprintf (file, ", ");
3731 need_comma = 1;
3732 fprintf (file, "%d got %d delays",
3733 num_filled_delays[i][j][reorg_pass_number], j);
3734 }
3735 fprintf (file, "\n");
3736 }
3737 }
3738 memset (total_delay_slots, 0, sizeof total_delay_slots);
3739 memset (total_annul_slots, 0, sizeof total_annul_slots);
3740 for (insn = first; insn; insn = NEXT_INSN (insn))
3741 {
3742 if (! INSN_DELETED_P (insn)
3743 && NONJUMP_INSN_P (insn)
3744 && GET_CODE (PATTERN (insn)) != USE
3745 && GET_CODE (PATTERN (insn)) != CLOBBER)
3746 {
3747 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3748 {
3749 j = XVECLEN (PATTERN (insn), 0) - 1;
3750 if (j > MAX_DELAY_HISTOGRAM)
3751 j = MAX_DELAY_HISTOGRAM;
3752 if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0)))
3753 total_annul_slots[j]++;
3754 else
3755 total_delay_slots[j]++;
3756 }
3757 else if (num_delay_slots (insn) > 0)
3758 total_delay_slots[0]++;
3759 }
3760 }
3761 fprintf (file, ";; Reorg totals: ");
3762 need_comma = 0;
3763 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3764 {
3765 if (total_delay_slots[j])
3766 {
3767 if (need_comma)
3768 fprintf (file, ", ");
3769 need_comma = 1;
3770 fprintf (file, "%d got %d delays", total_delay_slots[j], j);
3771 }
3772 }
3773 fprintf (file, "\n");
3774 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3775 fprintf (file, ";; Reorg annuls: ");
3776 need_comma = 0;
3777 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3778 {
3779 if (total_annul_slots[j])
3780 {
3781 if (need_comma)
3782 fprintf (file, ", ");
3783 need_comma = 1;
3784 fprintf (file, "%d got %d delays", total_annul_slots[j], j);
3785 }
3786 }
3787 fprintf (file, "\n");
3788 #endif
3789 fprintf (file, "\n");
3790 }
3791
3792 /* For all JUMP insns, fill in branch prediction notes, so that during
3793 assembler output a target can set branch prediction bits in the code.
3794 We have to do this now, as up until this point the destinations of
3795 JUMPS can be moved around and changed, but past right here that cannot
3796 happen. */
3797 for (insn = first; insn; insn = NEXT_INSN (insn))
3798 {
3799 int pred_flags;
3800
3801 if (NONJUMP_INSN_P (insn))
3802 {
3803 rtx pat = PATTERN (insn);
3804
3805 if (GET_CODE (pat) == SEQUENCE)
3806 insn = XVECEXP (pat, 0, 0);
3807 }
3808 if (!JUMP_P (insn))
3809 continue;
3810
3811 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
3812 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3813 GEN_INT (pred_flags),
3814 REG_NOTES (insn));
3815 }
3816 free_resource_info ();
3817 free (uid_to_ruid);
3818 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3819 /* SPARC assembler, for instance, emit warning when debug info is output
3820 into the delay slot. */
3821 {
3822 rtx link;
3823
3824 for (link = current_function_epilogue_delay_list;
3825 link;
3826 link = XEXP (link, 1))
3827 INSN_LOCATOR (XEXP (link, 0)) = 0;
3828 }
3829 #endif
3830 }
3831 #endif /* DELAY_SLOTS */